diff --git a/.github/workflows/e2e-sql.yml b/.github/workflows/e2e-sql.yml index fcf2352a120e5..971f45e4db585 100644 --- a/.github/workflows/e2e-sql.yml +++ b/.github/workflows/e2e-sql.yml @@ -107,7 +107,7 @@ jobs: matrix: adapter: [ proxy, jdbc ] mode: [ Standalone, Cluster ] - database: [ MySQL, PostgreSQL, openGauss ] + database: [ MySQL, PostgreSQL ] # Fix me #25051 #scenario: [ dbtbl_with_readwrite_splitting, dbtbl_with_readwrite_splitting_and_encrypt, sharding_and_encrypt, encrypt_and_readwrite_splitting, encrypt_shadow, readwrite_splitting_and_shadow, sharding_and_shadow, sharding_encrypt_shadow, mask_encrypt, mask_sharding, mask_encrypt_sharding ] scenario: [ empty_rules, rdl_empty_rules, passthrough, tbl, encrypt, readwrite_splitting, shadow, mask, dbtbl_with_readwrite_splitting_and_encrypt, sharding_and_encrypt, encrypt_and_readwrite_splitting, encrypt_shadow, readwrite_splitting_and_shadow, sharding_and_shadow, sharding_encrypt_shadow, mask_encrypt, mask_sharding, mask_encrypt_sharding ] diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index bb8bf40ac1ea0..2d1883578eb56 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -119,11 +119,16 @@ jobs: ref: ${{ inputs.commit-id }} - uses: graalvm/setup-graalvm@v1 with: - version: '22.3.1' - java-version: '17' - components: 'espresso,native-image' + java-version: '17.0.8' + distribution: 'graalvm-community' github-token: ${{ secrets.GITHUB_TOKEN }} - cache: 'maven' + - uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ needs.global-environment.outputs.GLOBAL_CACHE_PREFIX }}-maven-third-party-cache-${{ github.sha }} + restore-keys: | + ${{ needs.global-environment.outputs.GLOBAL_CACHE_PREFIX }}-maven-third-party-cache- + ${{ needs.global-environment.outputs.GLOBAL_CACHE_PREFIX }}-maven-third-party- - uses: docker/login-action@v2 with: registry: ${{ env.HUB }} @@ -217,17 +222,17 @@ jobs: - name: Build with Maven run: ./mvnw -B -T1C -ntp clean install -DskipITs -DskipTests - name: Generate Examples - run: ./mvnw -B clean install -f examples/shardingsphere-example-generator/pom.xml -Pexample-generator -Dproducts=${{ matrix.product }} -Dmodes=${{ matrix.mode }} -Dtransactions=${{ matrix.transaction }} -Dfeatures=${{ matrix.feature }} -Dframeworks=${{ matrix.framework }} + run: ./mvnw -B clean install -f examples/shardingsphere-jdbc-example-generator/pom.xml -Pexample-generator -Dproducts=${{ matrix.product }} -Dmodes=${{ matrix.mode }} -Dtransactions=${{ matrix.transaction }} -Dfeatures=${{ matrix.feature }} -Dframeworks=${{ matrix.framework }} - name: Test Examples - run : ./mvnw -B test -f examples/shardingsphere-example-generator/target/generated-sources/shardingsphere-jdbc-sample/${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }}/pom.xml -Pexample-generator -Dexec.cleanupDaemonThreads=false + run : ./mvnw -B test -f examples/shardingsphere-jdbc-example-generator/target/generated-sources/shardingsphere-jdbc-sample/${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }}/pom.xml -Pexample-generator -Dexec.cleanupDaemonThreads=false - name: Package Examples run: | - cd examples/shardingsphere-example-generator/target/generated-sources/shardingsphere-jdbc-sample/ + cd examples/shardingsphere-jdbc-example-generator/target/generated-sources/shardingsphere-jdbc-sample/ tar -czvf ${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }}.tar.gz ${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }} - uses: burnett01/rsync-deployments@5.2 with: switches: -avzr - path: examples/shardingsphere-example-generator/target/generated-sources/shardingsphere-jdbc-sample/${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }}.tar.gz + path: examples/shardingsphere-jdbc-example-generator/target/generated-sources/shardingsphere-jdbc-sample/${{ matrix.feature }}--${{ matrix.framework }}--${{ matrix.mode }}--${{ matrix.transaction }}.tar.gz remote_path: ${{ secrets.NIGHTLIES_RSYNC_PATH }}/shardingsphere/examples remote_host: ${{ secrets.NIGHTLIES_RSYNC_HOST }} remote_port: ${{ secrets.NIGHTLIES_RSYNC_PORT }} diff --git a/.github/workflows/nightly-check.yml b/.github/workflows/nightly-check.yml index da99ae7bf5add..ac30213735c63 100644 --- a/.github/workflows/nightly-check.yml +++ b/.github/workflows/nightly-check.yml @@ -143,7 +143,7 @@ jobs: name: Check - CodeQL needs: global-environment runs-on: ubuntu-latest - timeout-minutes: 45 + timeout-minutes: 100 permissions: actions: read contents: read diff --git a/README.md b/README.md index 0ac3f81bf814f..aae761982ebe5 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,6 @@ The concepts at the core of the project are `Connect`, `Enhance` and `Pluggable` - `Enhance:` Capture database access entry to provide additional features transparently, such as: redirect (sharding, readwrite-splitting and shadow), transform (data encrypt and mask), authentication (security, audit and authority), governance (circuit breaker and access limitation and analyze, QoS and observability). - `Pluggable:` Leveraging the micro kernel and 3 layers pluggable mode, features and database ecosystem can be embedded flexibly. Developers can customize their ShardingSphere just like building with LEGO blocks. -Virtually all databases are [supported](https://shardingsphere.apache.org/document/current/en/dev-manual/data-source/) including [MySQL](https://www.mysql.com), [PostgreSQL](https://www.postgresql.org), [SQL Server](https://www.microsoft.com/en-us/sql-server/sql-server-downloads), [Oracle Database](https://www.oracle.com/database/), [MariaDB](https://mariadb.org) or any other SQL-92 database. - ShardingSphere became an [Apache](https://apache.org/index.html#projects-list) Top-Level Project on April 16, 2020. So far, ShardingSphere has been used by over [10,000 projects on GitHub](https://github.com/search?l=Maven+POM&q=shardingsphere+language%3A%22Maven+POM%22&type=Code). @@ -54,7 +52,7 @@ So far, ShardingSphere has been used by over [10,000 projects on GitHub](https:/ For full documentation & more details, visit: [Docs](https://shardingsphere.apache.org/document/current/en/overview/) -### CONTRIBUTION🚀🧑‍💻 +### CONTRIBUTION🚀🧑💻
diff --git a/README_ZH.md b/README_ZH.md index d4408006f6b56..6e3b124750943 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -48,7 +48,7 @@ ShardingSphere 已于 2020 年 4 月 16 日成为 [Apache 软件基金会](https 更多信息请参考:[https://shardingsphere.apache.org/document/current/cn/overview/](https://shardingsphere.apache.org/document/current/cn/overview/) -### 参与贡献🚀🧑‍💻 +### 参与贡献🚀🧑💻
diff --git a/agent/core/pom.xml b/agent/core/pom.xml index a6deb243d4548..014a57a65ea3c 100644 --- a/agent/core/pom.xml +++ b/agent/core/pom.xml @@ -57,11 +57,6 @@ org.yaml snakeyaml - - ch.qos.logback - logback-classic - compile - @@ -88,10 +83,6 @@ - - com.google - ${shade.package}.com.google - net.bytebuddy ${shade.package}.net.bytebuddy @@ -100,18 +91,6 @@ org.yaml ${shade.package}.org.yaml - - org.slf4j - ${shade.package}.org.slf4j - - - ch.qos.logback - ${shade.package}.ch.qos.logback - - - org.apache.commons.logging - ${shade.package}.org.apache.commons.logging - diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/log/config/LogbackConfiguration.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/log/config/LogbackConfiguration.java deleted file mode 100644 index be82187b47bd8..0000000000000 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/log/config/LogbackConfiguration.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.agent.core.log.config; - -import ch.qos.logback.classic.BasicConfigurator; -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.Logger; -import ch.qos.logback.classic.LoggerContext; -import ch.qos.logback.classic.PatternLayout; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.Appender; -import ch.qos.logback.core.FileAppender; -import ch.qos.logback.core.encoder.LayoutWrappingEncoder; -import org.apache.shardingsphere.agent.core.path.AgentPath; - -import java.io.File; - -/** - * Logback configuration. - */ -public final class LogbackConfiguration extends BasicConfigurator { - - public static final String DEFAULT_PATTERN = "[%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %logger{36} - %msg%n"; - - public static final String SHARDINGSPHERE_LOGGER_NAME = "org.apache.shardingsphere.agent"; - - @Override - public void configure(final LoggerContext loggerContext) { - Appender appender = createFileAppender(loggerContext); - Logger rootLogger = loggerContext.getLogger(Logger.ROOT_LOGGER_NAME); - rootLogger.setLevel(Level.INFO); - rootLogger.addAppender(appender); - Logger logger = loggerContext.getLogger(SHARDINGSPHERE_LOGGER_NAME); - logger.setLevel(Level.INFO); - logger.setAdditive(false); - logger.addAppender(appender); - } - - private FileAppender createFileAppender(final LoggerContext loggerContext) { - FileAppender result = new FileAppender<>(); - result.setContext(loggerContext); - result.setName("fileAppender"); - result.setFile(getLogFile()); - LayoutWrappingEncoder encoder = createEncoder(loggerContext); - result.setEncoder(encoder); - result.start(); - return result; - } - - private String getLogFile() { - return String.join(File.separator, AgentPath.getRootPath().getPath(), "logs", "stdout.log"); - } - - private LayoutWrappingEncoder createEncoder(final LoggerContext loggerContext) { - LayoutWrappingEncoder result = new LayoutWrappingEncoder<>(); - result.setContext(loggerContext); - PatternLayout layout = createPatternLayout(loggerContext); - result.setLayout(layout); - return result; - } - - private PatternLayout createPatternLayout(final LoggerContext loggerContext) { - PatternLayout result = new PatternLayout(); - result.setPattern(DEFAULT_PATTERN); - result.setContext(loggerContext); - result.start(); - return result; - } -} diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/path/AgentPath.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/path/AgentPath.java index dc09886229bd1..6bf9a13a4c969 100644 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/path/AgentPath.java +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/path/AgentPath.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.agent.core.path; -import com.google.common.base.Preconditions; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.agent.core.util.AgentPreconditions; import java.io.File; import java.net.MalformedURLException; @@ -47,7 +47,7 @@ public static File getRootPath() { private static File getJarFile(final String url) { try { File result = new File(new URL(url.substring(url.indexOf("file:"), url.indexOf('!'))).toURI()); - Preconditions.checkState(result.exists(), "Can not locate agent jar file by URL `%s`.", url); + AgentPreconditions.checkState(result.exists(), String.format("Can not locate agent jar file by URL `%s`.", url)); return result; } catch (final MalformedURLException | URISyntaxException ex) { throw new IllegalStateException(String.format("Can not locate agent jar file by URL `%s`.", url), ex); diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/classloader/AgentPluginClassLoader.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/classloader/AgentPluginClassLoader.java index 58c9ea2a54ae4..deed99b8139e3 100644 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/classloader/AgentPluginClassLoader.java +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/classloader/AgentPluginClassLoader.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.agent.core.plugin.classloader; -import com.google.common.io.ByteStreams; - +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; @@ -94,10 +94,26 @@ private void definePackage(final String name, final Manifest manifest) { } private Class defineClass(final String name, final JarFile extraJar, final ZipEntry entry) throws IOException { - byte[] data = ByteStreams.toByteArray(extraJar.getInputStream(entry)); + byte[] data = toByteArray(extraJar.getInputStream(entry)); return defineClass(name, data, 0, data.length); } + private static byte[] toByteArray(final InputStream inStream) throws IOException { + int buffSize = 2048; + ByteArrayOutputStream result = new ByteArrayOutputStream(); + try { + byte[] buffer = new byte[buffSize]; + int len = -1; + while ((len = inStream.read(buffer)) != -1) { + result.write(buffer, 0, len); + } + } finally { + result.close(); + inStream.close(); + } + return result.toByteArray(); + } + @Override protected Enumeration findResources(final String name) { Collection result = new LinkedList<>(); diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/config/yaml/swapper/YamlPluginsConfigurationSwapper.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/config/yaml/swapper/YamlPluginsConfigurationSwapper.java index 4f3b535e827e4..b63fae473135e 100644 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/config/yaml/swapper/YamlPluginsConfigurationSwapper.java +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/plugin/config/yaml/swapper/YamlPluginsConfigurationSwapper.java @@ -28,6 +28,7 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.Properties; import java.util.stream.Collectors; /** @@ -57,10 +58,11 @@ public static Map swap(final YamlAgentConfiguration private static Map swap(final Map yamlConfigs) { return null == yamlConfigs ? Collections.emptyMap() - : yamlConfigs.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swap(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); + : yamlConfigs.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swap(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } private static PluginConfiguration swap(final YamlPluginConfiguration yamlConfig) { - return new PluginConfiguration(yamlConfig.getHost(), yamlConfig.getPort(), yamlConfig.getPassword(), yamlConfig.getProps()); + return null == yamlConfig ? new PluginConfiguration(null, 0, null, new Properties()) + : new PluginConfiguration(yamlConfig.getHost(), yamlConfig.getPort(), yamlConfig.getPassword(), yamlConfig.getProps()); } } diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/spi/AgentServiceLoader.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/spi/AgentServiceLoader.java index 2aa5d5f46f05c..03eb3de6e3f32 100644 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/spi/AgentServiceLoader.java +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/spi/AgentServiceLoader.java @@ -17,8 +17,8 @@ package org.apache.shardingsphere.agent.core.spi; -import com.google.common.base.Preconditions; import lombok.Getter; +import org.apache.shardingsphere.agent.core.util.AgentPreconditions; import java.util.Collection; import java.util.LinkedList; @@ -44,8 +44,8 @@ private AgentServiceLoader(final Class service) { } private void validate(final Class service) { - Preconditions.checkNotNull(service, "SPI class is null."); - Preconditions.checkArgument(service.isInterface(), "SPI class `%s` is not interface.", service); + AgentPreconditions.checkNotNull(service, "SPI class is null."); + AgentPreconditions.checkArgument(service.isInterface(), String.format("SPI class `%s` is not interface.", service)); } private Collection load(final Class service) { diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/util/AgentPreconditions.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/util/AgentPreconditions.java new file mode 100644 index 0000000000000..f6fe11baf5967 --- /dev/null +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/util/AgentPreconditions.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.agent.core.util; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + * Agent preconditions. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class AgentPreconditions { + + /** + * Check state. + * + * @param state state + * @param errorMessage error message + * @throws IllegalStateException illegal state exception + */ + public static void checkState(final boolean state, final String errorMessage) { + if (!state) { + throw new IllegalStateException(errorMessage); + } + } + + /** + * Check not null. + * + * @param reference reference + * @param errorMessage error message + * @throws NullPointerException null pointer exception + */ + public static void checkNotNull(final Object reference, final String errorMessage) { + if (null == reference) { + throw new NullPointerException(errorMessage); + } + } + + /** + * Check argument. + * + * @param condition condition + * @param errorMessage error message + * @throws IllegalArgumentException illegal argument exception + */ + public static void checkArgument(final boolean condition, final String errorMessage) { + if (!condition) { + throw new IllegalArgumentException(errorMessage); + } + } +} diff --git a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/yaml/AgentYamlConstructor.java b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/yaml/AgentYamlConstructor.java index 9338b3b69545b..dec92288623d8 100644 --- a/agent/core/src/main/java/org/apache/shardingsphere/agent/core/yaml/AgentYamlConstructor.java +++ b/agent/core/src/main/java/org/apache/shardingsphere/agent/core/yaml/AgentYamlConstructor.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.agent.core.yaml; -import com.google.common.base.Preconditions; +import org.apache.shardingsphere.agent.core.util.AgentPreconditions; import org.yaml.snakeyaml.constructor.Constructor; /** @@ -34,7 +34,7 @@ public AgentYamlConstructor(final Class rootClass) { @Override protected Class getClassForName(final String className) throws ClassNotFoundException { - Preconditions.checkArgument(className.equals(rootClass.getName()), "Class `%s` is not accepted", className); + AgentPreconditions.checkState(className.equals(rootClass.getName()), String.format("Class `%s` is not accepted", className)); return super.getClassForName(className); } } diff --git a/agent/plugins/core/pom.xml b/agent/plugins/core/pom.xml index 8a6aef8abb695..c638df4dd593f 100644 --- a/agent/plugins/core/pom.xml +++ b/agent/plugins/core/pom.xml @@ -57,14 +57,6 @@ shade package - - - - com.google.common - ${shade.package}.com.google.common - - - diff --git a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/config/validator/PluginConfigurationValidator.java b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/config/validator/PluginConfigurationValidator.java index acdc9f2c611f5..331bbda370f79 100644 --- a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/config/validator/PluginConfigurationValidator.java +++ b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/config/validator/PluginConfigurationValidator.java @@ -17,11 +17,10 @@ package org.apache.shardingsphere.agent.plugin.core.config.validator; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.agent.api.PluginConfiguration; +import org.apache.shardingsphere.agent.plugin.core.util.PluginPreconditions; /** * Remote plugin configuration validator. @@ -47,7 +46,7 @@ public static void validateHostAndPort(final String type, final PluginConfigurat * @param pluginConfig to be validated plugin configuration */ public static void validateHost(final String type, final PluginConfiguration pluginConfig) { - Preconditions.checkArgument(!Strings.isNullOrEmpty(pluginConfig.getHost()), "Hostname of %s is required.", type); + PluginPreconditions.checkArgument(!(null == pluginConfig.getHost() || pluginConfig.getHost().isEmpty()), String.format("Hostname of %s is required.", type)); } /** @@ -57,6 +56,6 @@ public static void validateHost(final String type, final PluginConfiguration plu * @param pluginConfig to be validated plugin configuration */ public static void validatePort(final String type, final PluginConfiguration pluginConfig) { - Preconditions.checkArgument(pluginConfig.getPort() > 0, "Port `%s` of %s must be a positive number.", pluginConfig.getPort(), type); + PluginPreconditions.checkArgument(pluginConfig.getPort() > 0, String.format("Port `%s` of %s must be a positive number.", pluginConfig.getPort(), type)); } } diff --git a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/spi/PluginServiceLoader.java b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/spi/PluginServiceLoader.java index d9711e7ac6182..867d65101976a 100644 --- a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/spi/PluginServiceLoader.java +++ b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/spi/PluginServiceLoader.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.agent.plugin.core.spi; -import com.google.common.base.Preconditions; +import org.apache.shardingsphere.agent.plugin.core.util.PluginPreconditions; import java.util.Collection; import java.util.LinkedList; @@ -42,8 +42,8 @@ private PluginServiceLoader(final Class service) { } private void validate(final Class service) { - Preconditions.checkNotNull(service, "SPI class is null."); - Preconditions.checkArgument(service.isInterface(), "SPI class `%s` is not interface.", service); + PluginPreconditions.checkNotNull(service, "SPI class is null."); + PluginPreconditions.checkArgument(service.isInterface(), String.format("SPI class `%s` is not interface.", service)); } private Collection load(final Class service) { diff --git a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/PluginPreconditions.java b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/PluginPreconditions.java new file mode 100644 index 0000000000000..3b0a83cb45ab5 --- /dev/null +++ b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/PluginPreconditions.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.agent.plugin.core.util; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +/** + * Plugin preconditions. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class PluginPreconditions { + + /** + * Check state. + * + * @param state state + * @param errorMessage error message + * @throws IllegalStateException illegal state exception + */ + public static void checkState(final boolean state, final String errorMessage) { + if (!state) { + throw new IllegalStateException(errorMessage); + } + } + + /** + * Check not null. + * + * @param reference reference + * @param errorMessage error message + * @throws NullPointerException null pointer exception + */ + public static void checkNotNull(final Object reference, final String errorMessage) { + if (null == reference) { + throw new NullPointerException(errorMessage); + } + } + + /** + * Check argument. + * + * @param condition condition + * @param errorMessage error message + * @throws IllegalArgumentException illegal argument exception + */ + public static void checkArgument(final boolean condition, final String errorMessage) { + if (!condition) { + throw new IllegalArgumentException(errorMessage); + } + } +} diff --git a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtil.java b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtils.java similarity index 98% rename from agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtil.java rename to agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtils.java index fc0d417d70d9f..5f943f67bbf5d 100644 --- a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtil.java +++ b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/SQLStatementUtils.java @@ -37,10 +37,10 @@ import org.apache.shardingsphere.sql.parser.sql.common.statement.tcl.TCLStatement; /** - * SQL statement util. + * SQL statement utils. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class SQLStatementUtil { +public final class SQLStatementUtils { /** * Get SQL type. diff --git a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtil.java b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtils.java similarity index 95% rename from agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtil.java rename to agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtils.java index d3832bdb48b06..2be08e87a0117 100644 --- a/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtil.java +++ b/agent/plugins/core/src/main/java/org/apache/shardingsphere/agent/plugin/core/util/ShardingSphereDriverUtils.java @@ -27,10 +27,10 @@ import java.util.Optional; /** - * ShardingSphere driver util. + * ShardingSphere driver utils. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class ShardingSphereDriverUtil { +public final class ShardingSphereDriverUtils { /** * Get sharding sphere driver. diff --git a/agent/plugins/metrics/core/pom.xml b/agent/plugins/metrics/core/pom.xml index 2d6163ea8e34a..05dd1208a987d 100644 --- a/agent/plugins/metrics/core/pom.xml +++ b/agent/plugins/metrics/core/pom.xml @@ -118,10 +118,6 @@ package - - com.google.common - ${shade.package}.com.google.common - net.bytebuddy ${shade.package}.net.bytebuddy diff --git a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLParseCountAdvice.java b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLParseCountAdvice.java index dfd92600a8edd..554c9463ecd88 100644 --- a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLParseCountAdvice.java +++ b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLParseCountAdvice.java @@ -19,7 +19,7 @@ import org.apache.shardingsphere.agent.api.advice.TargetAdviceObject; import org.apache.shardingsphere.agent.api.advice.type.InstanceMethodAdvice; -import org.apache.shardingsphere.agent.plugin.core.util.SQLStatementUtil; +import org.apache.shardingsphere.agent.plugin.core.util.SQLStatementUtils; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.MetricsCollectorRegistry; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.type.CounterMetricsCollector; import org.apache.shardingsphere.agent.plugin.metrics.core.config.MetricCollectorType; @@ -44,6 +44,6 @@ public void afterMethod(final TargetAdviceObject target, final Method method, fi } private Optional getSQLType(final SQLStatement sqlStatement) { - return Optional.of(SQLStatementUtil.getType(sqlStatement).name()); + return Optional.of(SQLStatementUtils.getType(sqlStatement).name()); } } diff --git a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLRouteCountAdvice.java b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLRouteCountAdvice.java index 3d483c83ca6d5..885d015aa8ce2 100644 --- a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLRouteCountAdvice.java +++ b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/advice/SQLRouteCountAdvice.java @@ -19,7 +19,7 @@ import org.apache.shardingsphere.agent.api.advice.TargetAdviceObject; import org.apache.shardingsphere.agent.api.advice.type.InstanceMethodAdvice; -import org.apache.shardingsphere.agent.plugin.core.util.SQLStatementUtil; +import org.apache.shardingsphere.agent.plugin.core.util.SQLStatementUtils; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.MetricsCollectorRegistry; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.type.CounterMetricsCollector; import org.apache.shardingsphere.agent.plugin.metrics.core.config.MetricCollectorType; @@ -47,6 +47,6 @@ public void beforeMethod(final TargetAdviceObject target, final Method method, f } private Optional getSQLType(final SQLStatement sqlStatement) { - return Optional.of(SQLStatementUtil.getType(sqlStatement).name()); + return Optional.of(SQLStatementUtils.getType(sqlStatement).name()); } } diff --git a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCMetaDataInfoExporter.java b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCMetaDataInfoExporter.java index 069b58353c029..ed249b6955f7e 100644 --- a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCMetaDataInfoExporter.java +++ b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCMetaDataInfoExporter.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.agent.plugin.metrics.core.exporter.impl.jdbc; import org.apache.shardingsphere.agent.plugin.core.util.AgentReflectionUtils; -import org.apache.shardingsphere.agent.plugin.core.util.ShardingSphereDriverUtil; +import org.apache.shardingsphere.agent.plugin.core.util.ShardingSphereDriverUtils; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.MetricsCollectorRegistry; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.type.GaugeMetricFamilyMetricsCollector; import org.apache.shardingsphere.agent.plugin.metrics.core.config.MetricCollectorType; @@ -47,7 +47,7 @@ public final class JDBCMetaDataInfoExporter implements MetricsExporter { @Override public Optional export(final String pluginType) { - Optional driver = ShardingSphereDriverUtil.getShardingSphereDriver(); + Optional driver = ShardingSphereDriverUtils.getShardingSphereDriver(); if (!driver.isPresent()) { return Optional.empty(); } @@ -59,7 +59,7 @@ public Optional export(final String pluginTyp ShardingSphereDataSource dataSource = (ShardingSphereDataSource) entry.getValue(); String databaseName = AgentReflectionUtils.getFieldValue(dataSource, "databaseName"); ContextManager contextManager = AgentReflectionUtils.getFieldValue(dataSource, "contextManager"); - result.addMetric(Arrays.asList(databaseName, "storage_unit_count"), contextManager.getDataSourceMap(databaseName).size()); + result.addMetric(Arrays.asList(databaseName, "storage_unit_count"), contextManager.getStorageUnits(databaseName).size()); } return Optional.of(result); } diff --git a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCStateExporter.java b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCStateExporter.java index 1314796bb5637..16c5866d7c4ba 100644 --- a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCStateExporter.java +++ b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/jdbc/JDBCStateExporter.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.agent.plugin.metrics.core.exporter.impl.jdbc; import org.apache.shardingsphere.agent.plugin.core.util.AgentReflectionUtils; -import org.apache.shardingsphere.agent.plugin.core.util.ShardingSphereDriverUtil; +import org.apache.shardingsphere.agent.plugin.core.util.ShardingSphereDriverUtils; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.MetricsCollectorRegistry; import org.apache.shardingsphere.agent.plugin.metrics.core.collector.type.GaugeMetricFamilyMetricsCollector; import org.apache.shardingsphere.agent.plugin.metrics.core.config.MetricCollectorType; @@ -44,7 +44,7 @@ public final class JDBCStateExporter implements MetricsExporter { @Override public Optional export(final String pluginType) { - Optional driver = ShardingSphereDriverUtil.getShardingSphereDriver(); + Optional driver = ShardingSphereDriverUtils.getShardingSphereDriver(); if (!driver.isPresent()) { return Optional.empty(); } diff --git a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporter.java b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporter.java index 0df0998371b36..19bd583fe30c0 100644 --- a/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporter.java +++ b/agent/plugins/metrics/core/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporter.java @@ -53,6 +53,6 @@ public Optional export(final String pluginTyp } private int getStorageUnitCount(final MetaDataContexts metaDataContexts) { - return metaDataContexts.getMetaData().getDatabases().values().stream().map(each -> each.getResourceMetaData().getDataSources().size()).reduce(0, Integer::sum); + return metaDataContexts.getMetaData().getDatabases().values().stream().map(each -> each.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()).reduce(0, Integer::sum); } } diff --git a/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporterTest.java b/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporterTest.java index c62991a522771..f26162ba87d1d 100644 --- a/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporterTest.java +++ b/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/exporter/impl/proxy/ProxyMetaDataInfoExporterTest.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; @@ -36,7 +37,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import javax.sql.DataSource; import java.util.Collections; import java.util.Optional; @@ -75,7 +75,7 @@ void assertExportWithContextManager() { private ContextManager mockContextManager() { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(database.getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("ds_0", mock(DataSource.class))); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", mock(StorageUnit.class))); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); ShardingSphereMetaData metaData = mock(ShardingSphereMetaData.class); when(metaData.getDatabases()).thenReturn(Collections.singletonMap("sharding_db", database)); diff --git a/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/fixture/collector/MetricsCollectorFixture.java b/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/fixture/collector/MetricsCollectorFixture.java index aa8089e43b605..2b2761d7a2f89 100644 --- a/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/fixture/collector/MetricsCollectorFixture.java +++ b/agent/plugins/metrics/core/src/test/java/org/apache/shardingsphere/agent/plugin/metrics/core/fixture/collector/MetricsCollectorFixture.java @@ -76,7 +76,7 @@ public Object getRawMetricFamilyObject() { @Override public String toString() { - return labeledValues.isEmpty() ? value + "" : String.join(", ", getLabeledContents()); + return labeledValues.isEmpty() ? String.valueOf(value) : String.join(", ", getLabeledContents()); } private Collection getLabeledContents() { diff --git a/agent/plugins/metrics/core/src/test/resources/logback-test.xml b/agent/plugins/metrics/core/src/test/resources/logback-test.xml new file mode 100644 index 0000000000000..d90fc4a715683 --- /dev/null +++ b/agent/plugins/metrics/core/src/test/resources/logback-test.xml @@ -0,0 +1,33 @@ + + + + + + + [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %logger{36} - %msg%n + + + + + + + + + + + diff --git a/agent/plugins/metrics/type/prometheus/pom.xml b/agent/plugins/metrics/type/prometheus/pom.xml index a6fce529cf590..501d6d4ad28a4 100644 --- a/agent/plugins/metrics/type/prometheus/pom.xml +++ b/agent/plugins/metrics/type/prometheus/pom.xml @@ -113,10 +113,6 @@ package - - com.google.common - ${shade.package}.com.google.common - net.bytebuddy ${shade.package}.net.bytebuddy @@ -129,14 +125,6 @@ io.prometheus.client ${shade.package}.io.prometheus.client - - org.checkerframework - ${shade.package}.org.checkerframework - - - com.google.errorprone.annotations - ${shade.package}.com.google.errorprone.annotations - diff --git a/agent/plugins/metrics/type/prometheus/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/prometheus/PrometheusPluginLifecycleService.java b/agent/plugins/metrics/type/prometheus/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/prometheus/PrometheusPluginLifecycleService.java index 017b6129f509c..f5ad8f8dfb86d 100644 --- a/agent/plugins/metrics/type/prometheus/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/prometheus/PrometheusPluginLifecycleService.java +++ b/agent/plugins/metrics/type/prometheus/src/main/java/org/apache/shardingsphere/agent/plugin/metrics/prometheus/PrometheusPluginLifecycleService.java @@ -17,7 +17,6 @@ package org.apache.shardingsphere.agent.plugin.metrics.prometheus; -import com.google.common.base.Strings; import io.prometheus.client.CollectorRegistry; import io.prometheus.client.exporter.HTTPServer; import io.prometheus.client.hotspot.DefaultExports; @@ -85,7 +84,11 @@ private void registerCollectorForJDBC() { } private InetSocketAddress getSocketAddress(final PluginConfiguration pluginConfig) { - return Strings.isNullOrEmpty(pluginConfig.getHost()) ? new InetSocketAddress(pluginConfig.getPort()) : new InetSocketAddress(pluginConfig.getHost(), pluginConfig.getPort()); + return isNullOrEmpty(pluginConfig.getHost()) ? new InetSocketAddress(pluginConfig.getPort()) : new InetSocketAddress(pluginConfig.getHost(), pluginConfig.getPort()); + } + + private boolean isNullOrEmpty(final String string) { + return null == string || string.isEmpty(); } @Override diff --git a/agent/plugins/pom.xml b/agent/plugins/pom.xml index 2d135f68020ad..6449bb4be73d9 100644 --- a/agent/plugins/pom.xml +++ b/agent/plugins/pom.xml @@ -42,11 +42,6 @@ provided - - com.google.guava - guava - provided - net.bytebuddy byte-buddy diff --git a/agent/plugins/tracing/type/opentelemetry/pom.xml b/agent/plugins/tracing/type/opentelemetry/pom.xml index 0080590e432bf..d41f68072be6e 100644 --- a/agent/plugins/tracing/type/opentelemetry/pom.xml +++ b/agent/plugins/tracing/type/opentelemetry/pom.xml @@ -103,10 +103,6 @@ net.bytebuddy ${shade.package}.net.bytebuddy - - com.google - ${shade.package}.com.google - io.opentelemetry ${shade.package}.io.opentelemetry diff --git a/agent/pom.xml b/agent/pom.xml index 7c746a2f100a1..7c8129d7bc470 100644 --- a/agent/pom.xml +++ b/agent/pom.xml @@ -57,17 +57,19 @@ com.google.guava guava - - - com.google.code.findbugs - jsr305 - - - org.checkerframework - checker-qual - - + test + + org.apache.commons + commons-lang3 + test + + + org.apache.commons + commons-collections4 + test + + org.apache.shardingsphere shardingsphere-test-util diff --git a/db-protocol/core/src/main/java/org/apache/shardingsphere/db/protocol/codec/PacketCodec.java b/db-protocol/core/src/main/java/org/apache/shardingsphere/db/protocol/codec/PacketCodec.java index 95eb2f469d02d..b69f31fe037df 100644 --- a/db-protocol/core/src/main/java/org/apache/shardingsphere/db/protocol/codec/PacketCodec.java +++ b/db-protocol/core/src/main/java/org/apache/shardingsphere/db/protocol/codec/PacketCodec.java @@ -34,10 +34,8 @@ @Slf4j public final class PacketCodec extends ByteToMessageCodec { - @SuppressWarnings("rawtypes") private final DatabasePacketCodecEngine databasePacketCodecEngine; - @SuppressWarnings("unchecked") @Override protected void decode(final ChannelHandlerContext context, final ByteBuf in, final List out) { int readableBytes = in.readableBytes(); @@ -50,7 +48,6 @@ protected void decode(final ChannelHandlerContext context, final ByteBuf in, fin databasePacketCodecEngine.decode(context, in, out); } - @SuppressWarnings("unchecked") @Override protected void encode(final ChannelHandlerContext context, final DatabasePacket message, final ByteBuf out) { databasePacketCodecEngine.encode(context, message, out); diff --git a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/codec/MySQLPacketCodecEngine.java b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/codec/MySQLPacketCodecEngine.java index 57d54ae826940..9adb3d73dff38 100644 --- a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/codec/MySQLPacketCodecEngine.java +++ b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/codec/MySQLPacketCodecEngine.java @@ -29,7 +29,6 @@ import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnknownSQLException; import java.nio.charset.Charset; -import java.sql.SQLException; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -95,8 +94,7 @@ public void encode(final ChannelHandlerContext context, final DatabasePacket mes } catch (final RuntimeException ex) { // CHECKSTYLE:ON out.resetWriterIndex(); - SQLException unknownSQLException = new UnknownSQLException(ex).toSQLException(); - new MySQLErrPacket(unknownSQLException.getErrorCode(), unknownSQLException.getSQLState(), unknownSQLException.getMessage()).write(payload); + new MySQLErrPacket(new UnknownSQLException(ex).toSQLException()).write(payload); } finally { if (out.readableBytes() - PAYLOAD_LENGTH - SEQUENCE_LENGTH < MAX_PACKET_LENGTH) { updateMessageHeader(out, context.channel().attr(MySQLConstants.MYSQL_SEQUENCE_ID).get().getAndIncrement()); diff --git a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactory.java b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactory.java index 60e6aa885eab9..d27b28ba46adb 100644 --- a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactory.java +++ b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactory.java @@ -36,6 +36,7 @@ public final class MySQLBinaryProtocolValueFactory { static { setStringLenencBinaryProtocolValue(); + setByteLenencBinaryProtocolValue(); setInt8BinaryProtocolValue(); setInt4BinaryProtocolValue(); setInt2BinaryProtocolValue(); @@ -49,21 +50,25 @@ public final class MySQLBinaryProtocolValueFactory { private static void setStringLenencBinaryProtocolValue() { MySQLStringLenencBinaryProtocolValue binaryProtocolValue = new MySQLStringLenencBinaryProtocolValue(); - BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.STRING, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.VARCHAR, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.VAR_STRING, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.ENUM, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.SET, binaryProtocolValue); - BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.LONG_BLOB, binaryProtocolValue); - BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.MEDIUM_BLOB, binaryProtocolValue); - BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.BLOB, binaryProtocolValue); - BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.TINY_BLOB, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.GEOMETRY, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.BIT, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.DECIMAL, binaryProtocolValue); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.NEWDECIMAL, binaryProtocolValue); } + private static void setByteLenencBinaryProtocolValue() { + MySQLByteLenencBinaryProtocolValue binaryProtocolValue = new MySQLByteLenencBinaryProtocolValue(); + BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.STRING, binaryProtocolValue); + BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.LONG_BLOB, binaryProtocolValue); + BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.MEDIUM_BLOB, binaryProtocolValue); + BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.BLOB, binaryProtocolValue); + BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.TINY_BLOB, binaryProtocolValue); + } + private static void setInt8BinaryProtocolValue() { MySQLInt8BinaryProtocolValue binaryProtocolValue = new MySQLInt8BinaryProtocolValue(); BINARY_PROTOCOL_VALUES.put(MySQLBinaryColumnType.LONGLONG, binaryProtocolValue); diff --git a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolFieldMetaData.java b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValue.java similarity index 51% rename from test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolFieldMetaData.java rename to db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValue.java index fa195221c639a..84003a99d92e2 100644 --- a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolFieldMetaData.java +++ b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValue.java @@ -15,32 +15,26 @@ * limitations under the License. */ -package org.apache.shardingsphere.test.fixture.infra.datasource.pool.metadata; +package org.apache.shardingsphere.db.protocol.mysql.packet.command.query.binary.execute.protocol; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; +import org.apache.shardingsphere.db.protocol.mysql.payload.MySQLPacketPayload; /** - * Mocked data source pool field meta data. + * Binary protocol value for byte lenenc for MySQL. Actually this is string lenenc, but converting to {@link String} may corrupt the raw bytes. */ -public final class MockedDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { +public final class MySQLByteLenencBinaryProtocolValue implements MySQLBinaryProtocolValue { @Override - public String getUsernameFieldName() { - return "username"; + public Object read(final MySQLPacketPayload payload, final boolean unsigned) { + return payload.readStringLenencByBytes(); } @Override - public String getPasswordFieldName() { - return "password"; - } - - @Override - public String getJdbcUrlFieldName() { - return "url"; - } - - @Override - public String getJdbcUrlPropertiesFieldName() { - return "jdbcUrlProperties"; + public void write(final MySQLPacketPayload payload, final Object value) { + if (value instanceof byte[]) { + payload.writeBytesLenenc((byte[]) value); + } else { + payload.writeStringLenenc(value.toString()); + } } } diff --git a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValue.java b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValue.java index 3e5304ea700b8..aa29c132d5285 100644 --- a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValue.java +++ b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValue.java @@ -60,15 +60,14 @@ private Timestamp getTimestampForDatetime(final MySQLPacketPayload payload) { @Override public void write(final MySQLPacketPayload payload, final Object value) { - Timestamp timestamp = new Timestamp(((Date) value).getTime()); - LocalDateTime dateTime = timestamp.toLocalDateTime(); + LocalDateTime dateTime = value instanceof LocalDateTime ? (LocalDateTime) value : new Timestamp(((Date) value).getTime()).toLocalDateTime(); int year = dateTime.getYear(); int month = dateTime.getMonthValue(); int dayOfMonth = dateTime.getDayOfMonth(); int hours = dateTime.getHour(); int minutes = dateTime.getMinute(); int seconds = dateTime.getSecond(); - int nanos = timestamp.getNanos(); + int nanos = dateTime.getNano(); boolean isTimeAbsent = 0 == hours && 0 == minutes && 0 == seconds; boolean isNanosAbsent = 0 == nanos; if (isTimeAbsent && isNanosAbsent) { diff --git a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacket.java b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacket.java index 3ae0749bf4b81..5ed84acdac542 100644 --- a/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacket.java +++ b/db-protocol/mysql/src/main/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacket.java @@ -19,17 +19,17 @@ import com.google.common.base.Preconditions; import lombok.Getter; -import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.db.protocol.mysql.packet.MySQLPacket; import org.apache.shardingsphere.db.protocol.mysql.payload.MySQLPacketPayload; import org.apache.shardingsphere.infra.exception.core.external.sql.vendor.VendorError; +import java.sql.SQLException; + /** * ERR packet protocol for MySQL. * * @see ERR Packet */ -@RequiredArgsConstructor @Getter public final class MySQLErrPacket extends MySQLPacket { @@ -46,8 +46,16 @@ public final class MySQLErrPacket extends MySQLPacket { private final String errorMessage; + public MySQLErrPacket(final SQLException exception) { + errorCode = exception.getErrorCode(); + sqlState = exception.getSQLState(); + errorMessage = exception.getMessage(); + } + public MySQLErrPacket(final VendorError vendorError, final Object... errorMessageArgs) { - this(vendorError.getVendorCode(), vendorError.getSqlState().getValue(), String.format(vendorError.getReason(), errorMessageArgs)); + errorCode = vendorError.getVendorCode(); + sqlState = vendorError.getSqlState().getValue(); + errorMessage = String.format(vendorError.getReason(), errorMessageArgs); } public MySQLErrPacket(final MySQLPacketPayload payload) { diff --git a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactoryTest.java b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactoryTest.java index fb4801aa44e57..1ddf0b8fade1d 100644 --- a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactoryTest.java +++ b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLBinaryProtocolValueFactoryTest.java @@ -29,7 +29,7 @@ class MySQLBinaryProtocolValueFactoryTest { @Test void assertGetBinaryProtocolValueWithMySQLTypeString() { - assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.STRING), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); + assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.STRING), instanceOf(MySQLByteLenencBinaryProtocolValue.class)); } @Test @@ -54,22 +54,22 @@ void assertGetBinaryProtocolValueWithMySQLTypeSet() { @Test void assertGetBinaryProtocolValueWithMySQLTypeLongBlob() { - assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.LONG_BLOB), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); + assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.LONG_BLOB), instanceOf(MySQLByteLenencBinaryProtocolValue.class)); } @Test void assertGetBinaryProtocolValueWithMySQLTypeMediumBlob() { - assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.MEDIUM_BLOB), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); + assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.MEDIUM_BLOB), instanceOf(MySQLByteLenencBinaryProtocolValue.class)); } @Test void assertGetBinaryProtocolValueWithMySQLTypeBlob() { - assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.BLOB), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); + assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.BLOB), instanceOf(MySQLByteLenencBinaryProtocolValue.class)); } @Test void assertGetBinaryProtocolValueWithMySQLTypeTinyBlob() { - assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.TINY_BLOB), instanceOf(MySQLStringLenencBinaryProtocolValue.class)); + assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.TINY_BLOB), instanceOf(MySQLByteLenencBinaryProtocolValue.class)); } @Test diff --git a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValueTest.java b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValueTest.java new file mode 100644 index 0000000000000..6ee5d588ebfe3 --- /dev/null +++ b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLByteLenencBinaryProtocolValueTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.db.protocol.mysql.packet.command.query.binary.execute.protocol; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.buffer.Unpooled; +import org.apache.shardingsphere.db.protocol.mysql.payload.MySQLPacketPayload; +import org.junit.jupiter.api.Test; + +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +class MySQLByteLenencBinaryProtocolValueTest { + + @Test + void assertRead() { + byte[] input = {0x0d, 0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; + byte[] expected = {0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; + ByteBuf byteBuf = Unpooled.wrappedBuffer(input); + MySQLPacketPayload payload = new MySQLPacketPayload(byteBuf, StandardCharsets.UTF_8); + byte[] actual = (byte[]) new MySQLByteLenencBinaryProtocolValue().read(payload, false); + assertThat(actual, is(expected)); + } + + @Test + void assertWrite() { + byte[] input = {0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; + byte[] expected = {0x0d, 0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; + ByteBuf actual = Unpooled.wrappedBuffer(new byte[expected.length]).writerIndex(0); + MySQLPacketPayload payload = new MySQLPacketPayload(actual, StandardCharsets.UTF_8); + new MySQLByteLenencBinaryProtocolValue().write(payload, input); + assertThat(ByteBufUtil.getBytes(actual), is(expected)); + } +} diff --git a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValueTest.java b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValueTest.java index 7e7769d5df44f..1d0bc9a0ece5f 100644 --- a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValueTest.java +++ b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/command/query/binary/execute/protocol/MySQLDateBinaryProtocolValueTest.java @@ -91,6 +91,29 @@ void assertReadWithIllegalArgument() { assertThrows(SQLFeatureNotSupportedException.class, () -> new MySQLDateBinaryProtocolValue().read(payload, false)); } + @Test + void assertWriteLocalDateTimeTypeFourBytes() { + MySQLDateBinaryProtocolValue actual = new MySQLDateBinaryProtocolValue(); + actual.write(payload, LocalDateTime.of(1970, 1, 14, 0, 0, 0)); + verify(payload).writeInt1(4); + verify(payload).writeInt2(1970); + verify(payload).writeInt1(1); + verify(payload).writeInt1(14); + } + + @Test + void assertWriteLocalDateTimeTypeSevenBytes() { + MySQLDateBinaryProtocolValue actual = new MySQLDateBinaryProtocolValue(); + actual.write(payload, LocalDateTime.of(1970, 1, 14, 12, 10, 30)); + verify(payload).writeInt1(7); + verify(payload).writeInt2(1970); + verify(payload).writeInt1(1); + verify(payload).writeInt1(14); + verify(payload).writeInt1(12); + verify(payload).writeInt1(10); + verify(payload).writeInt1(30); + } + @Test void assertWriteWithFourBytes() { MySQLDateBinaryProtocolValue actual = new MySQLDateBinaryProtocolValue(); diff --git a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacketTest.java b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacketTest.java index 83b17d126de47..5074e7aab56f9 100644 --- a/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacketTest.java +++ b/db-protocol/mysql/src/test/java/org/apache/shardingsphere/db/protocol/mysql/packet/generic/MySQLErrPacketTest.java @@ -17,13 +17,16 @@ package org.apache.shardingsphere.db.protocol.mysql.packet.generic; -import org.apache.shardingsphere.infra.exception.mysql.vendor.MySQLVendorError; import org.apache.shardingsphere.db.protocol.mysql.payload.MySQLPacketPayload; +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; +import org.apache.shardingsphere.infra.exception.mysql.vendor.MySQLVendorError; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import java.sql.SQLException; + import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.Mockito.verify; @@ -36,11 +39,19 @@ class MySQLErrPacketTest { private MySQLPacketPayload payload; @Test - void assertNewErrPacketWithServerErrorCode() { - MySQLErrPacket actual = new MySQLErrPacket(MySQLVendorError.ER_ACCESS_DENIED_ERROR, "root", "localhost", "root"); - assertThat(actual.getErrorCode(), is(MySQLVendorError.ER_ACCESS_DENIED_ERROR.getVendorCode())); - assertThat(actual.getSqlState(), is(MySQLVendorError.ER_ACCESS_DENIED_ERROR.getSqlState().getValue())); - assertThat(actual.getErrorMessage(), is(String.format(MySQLVendorError.ER_ACCESS_DENIED_ERROR.getReason(), "root", "localhost", "root"))); + void assertNewErrPacketWithSQLException() { + MySQLErrPacket actual = new MySQLErrPacket(new SQLException("No reason", "FOO_STATE", 1)); + assertThat(actual.getErrorCode(), is(1)); + assertThat(actual.getSqlState(), is("FOO_STATE")); + assertThat(actual.getErrorMessage(), is("No reason")); + } + + @Test + void assertNewErrPacketWithVendorError() { + MySQLErrPacket actual = new MySQLErrPacket(MySQLVendorError.ER_INTERNAL_ERROR, "No reason"); + assertThat(actual.getErrorCode(), is(1815)); + assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); + assertThat(actual.getErrorMessage(), is("Internal error: No reason")); } @Test @@ -58,7 +69,8 @@ void assertNewErrPacketWithPayload() { @Test void assertWrite() { - new MySQLErrPacket(MySQLVendorError.ER_NO_DB_ERROR).write(payload); + new MySQLErrPacket(new SQLException(MySQLVendorError.ER_NO_DB_ERROR.getReason(), + MySQLVendorError.ER_NO_DB_ERROR.getSqlState().getValue(), MySQLVendorError.ER_NO_DB_ERROR.getVendorCode())).write(payload); verify(payload).writeInt1(MySQLErrPacket.HEADER); verify(payload).writeInt2(1046); verify(payload).writeStringFix("#"); diff --git a/db-protocol/postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/protocol/text/impl/PostgreSQLBitValueParser.java b/db-protocol/postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/protocol/text/impl/PostgreSQLBitValueParser.java index 9db7afc6c8c8d..b60d225b79bb1 100644 --- a/db-protocol/postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/protocol/text/impl/PostgreSQLBitValueParser.java +++ b/db-protocol/postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/protocol/text/impl/PostgreSQLBitValueParser.java @@ -35,7 +35,7 @@ public PGobject parse(final String value) { result.setType("bit"); result.setValue(value); return result; - } catch (SQLException ex) { + } catch (final SQLException ex) { throw new SQLWrapperException(ex); } } diff --git a/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLDataRowPacketTest.java b/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLDataRowPacketTest.java index b03f99b72c2f7..012a0ddf4751d 100644 --- a/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLDataRowPacketTest.java +++ b/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLDataRowPacketTest.java @@ -59,14 +59,14 @@ void setup() { @Test void assertWriteWithNull() { - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(null)); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(null)); actual.write(payload); verify(payload).writeInt4(0xFFFFFFFF); } @Test void assertWriteWithBytes() { - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(new byte[]{'a'})); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(new byte[]{'a'})); actual.write(payload); verify(payload).writeInt4(new byte[]{'a'}.length); verify(payload).writeBytes(new byte[]{'a'}); @@ -75,7 +75,7 @@ void assertWriteWithBytes() { @Test void assertWriteWithSQLXML() throws SQLException { when(sqlxml.getString()).thenReturn("value"); - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(sqlxml)); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(sqlxml)); actual.write(payload); byte[] valueBytes = "value".getBytes(StandardCharsets.UTF_8); verify(payload).writeInt4(valueBytes.length); @@ -84,8 +84,8 @@ void assertWriteWithSQLXML() throws SQLException { @Test void assertWriteWithString() { - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList("value")); - assertThat(actual.getData(), is(Collections.singletonList("value"))); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton("value")); + assertThat(actual.getData(), is(Collections.singleton("value"))); actual.write(payload); byte[] valueBytes = "value".getBytes(StandardCharsets.UTF_8); verify(payload).writeInt4(valueBytes.length); @@ -95,14 +95,14 @@ void assertWriteWithString() { @Test void assertWriteWithSQLXML4Error() throws SQLException { when(sqlxml.getString()).thenThrow(new SQLException("mock")); - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(sqlxml)); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(sqlxml)); assertThrows(RuntimeException.class, () -> actual.write(payload)); verify(payload, times(0)).writeStringEOF(any()); } @Test void assertWriteBinaryNull() { - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(new BinaryCell(PostgreSQLColumnType.INT4, null))); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(new BinaryCell(PostgreSQLColumnType.INT4, null))); actual.write(payload); verify(payload).writeInt2(1); verify(payload).writeInt4(0xFFFFFFFF); @@ -111,7 +111,7 @@ void assertWriteBinaryNull() { @Test void assertWriteBinaryInt4() { final int value = 12345678; - PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singletonList(new BinaryCell(PostgreSQLColumnType.INT4, value))); + PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton(new BinaryCell(PostgreSQLColumnType.INT4, value))); actual.write(payload); verify(payload).writeInt2(1); verify(payload).writeInt4(4); diff --git a/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLRowDescriptionPacketTest.java b/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLRowDescriptionPacketTest.java index e11ce624ac057..85647e8186469 100644 --- a/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLRowDescriptionPacketTest.java +++ b/db-protocol/postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/PostgreSQLRowDescriptionPacketTest.java @@ -41,7 +41,7 @@ class PostgreSQLRowDescriptionPacketTest { @Test void assertWrite() { PostgreSQLColumnDescription description = new PostgreSQLColumnDescription("name", 1, Types.VARCHAR, 4, null); - PostgreSQLRowDescriptionPacket packet = new PostgreSQLRowDescriptionPacket(Collections.singletonList(description)); + PostgreSQLRowDescriptionPacket packet = new PostgreSQLRowDescriptionPacket(Collections.singleton(description)); packet.write(payload); verify(payload, times(2)).writeInt2(1); verify(payload).writeStringNul("name"); diff --git a/distribution/agent/src/main/release-docs/README.txt b/distribution/agent/src/main/release-docs/README.txt index ac17466e43a0a..99c47867db3b2 100644 --- a/distribution/agent/src/main/release-docs/README.txt +++ b/distribution/agent/src/main/release-docs/README.txt @@ -1,7 +1,7 @@ Welcome to Apache ShardingSphere Agent =============================================================================== -Apache ShardingSphere Agent is used to collect relevant monitoring data generated when Apache ShardingSphere is running, including Loggin, Metrics, and Tracing. +Apache ShardingSphere Agent is used to collect relevant monitoring data generated when Apache ShardingSphere is running, including Logging, Metrics, and Tracing. - `Logging` The log can be easily expanded through the agent to provide more information for analyzing the system running status. - `Metrics` System statistical indicators are collected through probes for display by third-party applications. diff --git a/distribution/agent/src/main/resources/conf/agent.yaml b/distribution/agent/src/main/resources/conf/agent.yaml index 6c71cf58852e6..926a8a019619c 100644 --- a/distribution/agent/src/main/resources/conf/agent.yaml +++ b/distribution/agent/src/main/resources/conf/agent.yaml @@ -18,8 +18,6 @@ plugins: # logging: # File: -# props: -# level: "INFO" # metrics: # Prometheus: # host: "localhost" diff --git a/distribution/proxy-native/Dockerfile b/distribution/proxy-native/Dockerfile index 4578fd982d1ea..a65a1b219d32f 100644 --- a/distribution/proxy-native/Dockerfile +++ b/distribution/proxy-native/Dockerfile @@ -27,13 +27,6 @@ MAINTAINER ShardingSphere "dev@shardingsphere.apache.org" ARG NATIVE_IMAGE_NAME ENV LOCAL_PATH /opt/shardingsphere-proxy-native -ENV JAVA_HOME "/opt/graalvm-ce-java17-22.3.1" -ENV PATH "$JAVA_HOME/bin:$PATH" - -RUN microdnf install gzip -y && \ - bash <(curl -sL https://get.graalvm.org/jdk) --to "/opt" -c espresso graalvm-ce-java17-22.3.1 && \ - $JAVA_HOME/bin/gu remove native-image && \ - microdnf clean all COPY --from=prepare ${LOCAL_PATH} ${LOCAL_PATH} ENTRYPOINT ${LOCAL_PATH}/${NATIVE_IMAGE_NAME} 3307 ${LOCAL_PATH}/conf "0.0.0.0" false diff --git a/distribution/proxy-native/access-filter.json b/distribution/proxy-native/access-filter.json index 64bb113557fa1..a5c58ae341dcd 100644 --- a/distribution/proxy-native/access-filter.json +++ b/distribution/proxy-native/access-filter.json @@ -1,14 +1,17 @@ { "rules": [ {"includeClasses": "**"}, - {"excludeClasses": "org.junit.platform.launcher.**"}, - {"excludeClasses": "org.mariadb.jdbc.**"}, + {"excludeClasses": "com.atomikos.jdbc.**"}, + {"excludeClasses": "com.mchange.v2.c3p0.**"}, {"excludeClasses": "com.mysql.jdbc.**"}, + {"excludeClasses": "com.zaxxer.hikari.**"}, {"excludeClasses": "org.apache.commons.dbcp2.**"}, - {"excludeClasses": "org.apache.shardingsphere.test.fixture.jdbc.**"}, - {"excludeClasses": "org.apache.shardingsphere.data.pipeline.core.sqlbuilder.H2PipelineSQLBuilder"} + {"excludeClasses": "org.junit.platform.launcher.**"}, + {"excludeClasses": "org.mariadb.jdbc.**"}, + {"excludeClasses": "org.apache.shardingsphere.sharding.UndefinedClass"} ], "regexRules": [ - {"excludeClasses": ".*Fixture*.*"} + {"excludeClasses": ".*Fixture*.*"}, + {"excludeClasses": ".*Mocked*.*"} ] } diff --git a/distribution/proxy-native/pom.xml b/distribution/proxy-native/pom.xml index 9ef6fa01c7fef..1820bec16247c 100644 --- a/distribution/proxy-native/pom.xml +++ b/distribution/proxy-native/pom.xml @@ -37,6 +37,12 @@ org.apache.shardingsphere shardingsphere-proxy-bootstrap ${project.version} + + + org.apache.shardingsphere + shardingsphere-cluster-mode-repository-etcd + + @@ -62,13 +68,17 @@ logback-classic runtime + + com.h2database + h2 + runtime + release.native - 22.3.1 true true true @@ -85,7 +95,6 @@ 17 17 - --enable-preview @@ -97,12 +106,9 @@ false true - --language:java --report-unsupported-elements-at-runtime + -J-Xmx7g - - --enable-preview - true diff --git a/distribution/proxy-native/src/main/release-docs/LICENSE b/distribution/proxy-native/src/main/release-docs/LICENSE index 42c4b5ada830b..f41740785cd91 100644 --- a/distribution/proxy-native/src/main/release-docs/LICENSE +++ b/distribution/proxy-native/src/main/release-docs/LICENSE @@ -218,37 +218,35 @@ The text of each license is the standard Apache 2.0 license. accessors-smart 2.4.7: https://www.minidev.net/, Apache 2.0 aggdesigner-algorithm 6.0: Apache 2.0 apiguardian-api 1.1.2: https://github.com/apiguardian-team/apiguardian, Apache 2.0 - audience-annotations 0.5.0: https://github.com/apache/yetus, Apache 2.0 - avatica-core 1.22.0: https://calcite.apache.org/avatica, Apache 2.0 - avatica-metrics 1.22.0: https://calcite.apache.org/avatica, Apache 2.0 - auto-service-annotations 1.0: https://github.com/google/auto/tree/master/service, Apache 2.0 + audience-annotations 0.12.0: https://github.com/apache/yetus, Apache 2.0 + avatica-core 1.23.0: https://calcite.apache.org/avatica, Apache 2.0 + avatica-metrics 1.23.0: https://calcite.apache.org/avatica, Apache 2.0 caffeine 2.9.3: https://github.com/ben-manes/caffeine, Apache 2.0 - calcite-core 1.32.0: https://calcite.apache.org, Apache 2.0 - calcite-linq4j 1.32.0: https://calcite.apache.org, Apache 2.0 + calcite-core 1.35.0: https://calcite.apache.org, Apache 2.0 + calcite-linq4j 1.35.0: https://calcite.apache.org, Apache 2.0 commons-codec 1.15: https://github.com/apache/commons-codec, Apache 2.0 commons-collections4 4.4: https://github.com/apache/commons-collections, Apache 2.0 commons-exec 1.3: https://github.com/apache/commons-exec, Apache 2.0 commons-io 2.11.0: https://github.com/apache/commons-io, Apache 2.0 - commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0 + commons-lang 2.4: https://github.com/apache/commons-lang, Apache 2.0 commons-lang3 3.12.0: https://github.com/apache/commons-lang, Apache 2.0 commons-logging 1.1.3: https://github.com/apache/commons-logging, Apache 2.0 - curator-client 5.3.0: https://github.com/apache/curator, Apache 2.0 - curator-framework 5.3.0: https://github.com/apache/curator, Apache 2.0 - curator-recipes 5.3.0: https://github.com/apache/curator, Apache 2.0 - cosid-core 1.14.4: https://github.com/Ahoo-Wang/CosId, Apache 2.0 - error_prone_annotations 2.3.4: https://github.com/google/error-prone, Apache 2.0 - failsafe 2.4.1: https://github.com/jhalterman/failsafe, Apache 2.0 - failureaccess 1.0.1: https://github.com/google/guava, Apache 2.0 + curator-client 5.4.0: https://github.com/apache/curator, Apache 2.0 + curator-framework 5.4.0: https://github.com/apache/curator, Apache 2.0 + curator-recipes 5.4.0: https://github.com/apache/curator, Apache 2.0 + error_prone_annotations 2.11.0: https://github.com/google/error-prone, Apache 2.0 + failsafe 2.4.4: https://github.com/jhalterman/failsafe, Apache 2.0 + failureaccess 1.0.1: https://github.com/google/guava, Apache 2.0 freemarker 2.3.31: https://freemarker.apache.org/, Apache 2.0 groovy 4.0.10: https://groovy.apache.org/, Apache 2.0 - grpc-api 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-context 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-core 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-grpclb 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-netty 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-protobuf 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-protobuf-lite 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 - grpc-stub 1.48.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-api 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-context 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-core 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-grpclb 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-netty 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-protobuf 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-protobuf-lite 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 + grpc-stub 1.51.0: https://github.com/grpc/grpc-java, Apache 2.0 gson 2.9.1: https://github.com/google/gson, Apache 2.0 guava 30.0-jre: https://github.com/google/guava, Apache 2.0 HikariCP 4.0.3: https://github.com/brettwooldridge/HikariCP, Apache 2.0 @@ -259,6 +257,7 @@ The text of each license is the standard Apache 2.0 license. jackson-core 2.14.0: http://github.com/FasterXML/jackson, Apache 2.0 jackson-databind 2.14.0: http://github.com/FasterXML/jackson, Apache 2.0 jackson-dataformat-yaml 2.14.0: http://github.com/FasterXML/jackson, Apache 2.0 + jackson-datatype-jsr310 2.14.0: http://github.com/FasterXML/jackson, Apache 2.0 jcl-over-slf4j 1.7.36: https://github.com/qos-ch/slf4j, Apache 2.0 jetcd-api 0.7.5: https://github.com/etcd-io/jetcd, Apache 2.0 jetcd-common 0.7.5: https://github.com/etcd-io/jetcd, Apache 2.0 @@ -266,9 +265,8 @@ The text of each license is the standard Apache 2.0 license. jetcd-grpc 0.7.5: https://github.com/etcd-io/jetcd, Apache 2.0 json-path 2.7.0: https://github.com/jayway/JsonPath, Apache 2.0 json-smart 2.4.7: https://www.minidev.net/, Apache 2.0 - json-simple 1.1.1: https://code.google.com/archive/p/json-simple/, Apache 2.0 + json-simple 1.1.1: https://code.google.com/archive/p/json-simple/, Apache 2.0 jsr305 3.0.2: http://findbugs.sourceforge.net/, Apache 2.0 - log4j 1.2.17: http://logging.apache.org/log4j/1.2/, Apache 2.0 memory 0.9.0, Apache 2.0 netty-buffer 4.1.90.Final: https://github.com/netty, Apache 2.0 netty-codec 4.1.90.Final: https://github.com/netty, Apache 2.0 @@ -279,13 +277,20 @@ The text of each license is the standard Apache 2.0 license. netty-handler 4.1.90.Final: https://github.com/netty, Apache 2.0 netty-handler-proxy 4.1.90.Final: https://github.com/netty, Apache 2.0 netty-resolver 4.1.90.Final: https://github.com/netty, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final-linux-aarch_64: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final-linux-x86_64: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final-osx-aarch_64: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final-osx-x86_64: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-boringssl-static 2.0.59.Final-windows-x86_64: https://github.com/netty/netty-tcnative, Apache 2.0 + netty-tcnative-classes 2.0.59.Final: https://github.com/netty/netty-tcnative, Apache 2.0 netty-transport 4.1.90.Final: https://github.com/netty, Apache 2.0 netty-transport-classes-epoll 4.1.90.Final: https://github.com/netty, Apache 2.0 netty-transport-native-epoll 4.1.90.Final-linux-aarch_64: https://github.com/netty, Apache 2.0 netty-transport-native-epoll 4.1.90.Final-linux-x86_64: https://github.com/netty, Apache 2.0 netty-transport-native-unix-common 4.1.90.Final: https://github.com/netty, Apache 2.0 - perfmark-api 0.23.0: https://github.com/perfmark/perfmark, Apache 2.0 - proto-google-common-protos 2.0.1: https://github.com/googleapis/common-protos-java, Apache 2.0 + perfmark-api 0.25.0: https://github.com/perfmark/perfmark, Apache 2.0 + proto-google-common-protos 2.9.0: https://github.com/googleapis/common-protos-java, Apache 2.0 proj4j 1.1.5: https://github.com/locationtech/proj4j, Apache 2.0 quartz 2.3.2: https://github.com/quartz-scheduler/quartz, Apache 2.0 sketches-core 0.9.0, Apache 2.0 @@ -307,7 +312,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. transactions-api 6.0.0: https://www.atomikos.com, Apache 2.0 transactions-jdbc 6.0.0: https://www.atomikos.com, Apache 2.0 transactions-jta 6.0.0: https://www.atomikos.com, Apache 2.0 - + ======================================================================== BSD licenses ======================================================================== @@ -318,7 +323,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. antlr4-runtime 4.10.1: https://github.com/antlr/antlr4, BSD-3-Clause asm 9.1: https://github.com/llbit/ow2-asm, BSD-3-Clause commons-compiler 3.1.8: https://github.com/janino-compiler/janino, BSD-3-Clause - janino 3.1.8: https://github.com/janino-compiler/janino, BSD-3-Clause + janino 3.1.9: https://github.com/janino-compiler/janino, BSD-3-Clause opengauss-jdbc 3.1.0-og: https://gitee.com/opengauss/openGauss-connector-jdbc, BSD-2-Clause postgresql 42.4.1: https://github.com/pgjdbc/pgjdbc, BSD-2-Clause protobuf-java 3.21.12: https://github.com/protocolbuffers/protobuf/blob/master/java, BSD-3-Clause @@ -333,7 +338,11 @@ CDDL licenses The following components are provided under the CDDL License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. + javax.activation-api 1.2.0: https://github.com/javaee/javax.annotation, CDDL jta 1.1: http://jta-spec.java.net, CDDL + jaxb-api 2.3.0: http://www.oracle.com, CDDL + jaxb-core 2.3.0: http://www.oracle.com, CDDL + jaxb-impl 2.3.0 http://www.oracle.com, CDDL ======================================================================== EPL licenses @@ -354,18 +363,10 @@ MIT licenses The following components are provided under the MIT License. See project link for details. The text of each license is also included at licenses/LICENSE-[project].txt. - animal-sniffer-annotations 1.19: https://github.com/mojohaus/animal-sniffer, MIT + bcpkix-jdk15on 1.70: https://www.bouncycastle.org, MIT bcprov-jdk15on 1.70: https://www.bouncycastle.org, MIT + bctls-jdk15on 1.70: https://www.bouncycastle.org, MIT + bcutil-jdk15on 1.70: https://www.bouncycastle.org, MIT checker-qual 3.5.0: https://github.com/typetools/checker-framework/blob/master/checker-qual, MIT jul-to-slf4j 1.7.36: https://www.slf4j.org, MIT slf4j-api 1.7.36: https://www.slf4j.org, MIT - jnanoid 2.0.0: https://github.com/aventrix/jnanoid, MIT - -======================================================================== -UPL licenses -======================================================================== - -The following components are provided under the UPL License. See project link for details. -The text of each license is also included at licenses/LICENSE-[project].txt. - - truffle-api 22.3.1: http://www.graalvm.org/, UPL diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-animal-sniffer.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-animal-sniffer.txt deleted file mode 100644 index fa89490e3d6db..0000000000000 --- a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-animal-sniffer.txt +++ /dev/null @@ -1,24 +0,0 @@ -Animal-Sniffer License ---------------- - -The MIT License - -Copyright (c) 2009 codehaus.org. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bcprov-jdk15on.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bcprov-jdk15on.txt deleted file mode 100644 index d14347eced436..0000000000000 --- a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bcprov-jdk15on.txt +++ /dev/null @@ -1,24 +0,0 @@ -Bcprov-jdk15on License ---------------- - -Copyright (c) 2000 - 2021 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) - -MIT License: - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bouncy-castle.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bouncy-castle.txt new file mode 100644 index 0000000000000..d9faf7181dd15 --- /dev/null +++ b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-bouncy-castle.txt @@ -0,0 +1,15 @@ +Bouncy Castle License +--------------- + +Please note this should be read in the same way as the MIT license. + +Please also note this licensing model is made possible through funding from donations and the sale of support contracts. + +LICENSE +Copyright (c) 2000 - 2023 The Legion of the Bouncy Castle Inc. (https://www.bouncycastle.org) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-javax.annotation-api.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-javax.annotation-api.txt new file mode 100644 index 0000000000000..b1c74f95ede8b --- /dev/null +++ b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-javax.annotation-api.txt @@ -0,0 +1,759 @@ +COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1 + +1. Definitions. + + 1.1. "Contributor" means each individual or entity that creates or + contributes to the creation of Modifications. + + 1.2. "Contributor Version" means the combination of the Original + Software, prior Modifications used by a Contributor (if any), and + the Modifications made by that particular Contributor. + + 1.3. "Covered Software" means (a) the Original Software, or (b) + Modifications, or (c) the combination of files containing Original + Software with files containing Modifications, in each case including + portions thereof. + + 1.4. "Executable" means the Covered Software in any form other than + Source Code. + + 1.5. "Initial Developer" means the individual or entity that first + makes Original Software available under this License. + + 1.6. "Larger Work" means a work which combines Covered Software or + portions thereof with code not governed by the terms of this License. + + 1.7. "License" means this document. + + 1.8. "Licensable" means having the right to grant, to the maximum + extent possible, whether at the time of the initial grant or + subsequently acquired, any and all of the rights conveyed herein. + + 1.9. "Modifications" means the Source Code and Executable form of + any of the following: + + A. Any file that results from an addition to, deletion from or + modification of the contents of a file containing Original Software + or previous Modifications; + + B. Any new file that contains any part of the Original Software or + previous Modification; or + + C. Any new file that is contributed or otherwise made available + under the terms of this License. + + 1.10. "Original Software" means the Source Code and Executable form + of computer software code that is originally released under this + License. + + 1.11. "Patent Claims" means any patent claim(s), now owned or + hereafter acquired, including without limitation, method, process, + and apparatus claims, in any patent Licensable by grantor. + + 1.12. "Source Code" means (a) the common form of computer software + code in which modifications are made and (b) associated + documentation included in or with such code. + + 1.13. "You" (or "Your") means an individual or a legal entity + exercising rights under, and complying with all of the terms of, + this License. For legal entities, "You" includes any entity which + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants. + + 2.1. The Initial Developer Grant. + + Conditioned upon Your compliance with Section 3.1 below and subject + to third party intellectual property claims, the Initial Developer + hereby grants You a world-wide, royalty-free, non-exclusive license: + + (a) under intellectual property rights (other than patent or + trademark) Licensable by Initial Developer, to use, reproduce, + modify, display, perform, sublicense and distribute the Original + Software (or portions thereof), with or without Modifications, + and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using or selling of + Original Software, to make, have made, use, practice, sell, and + offer for sale, and/or otherwise dispose of the Original Software + (or portions thereof). + + (c) The licenses granted in Sections 2.1(a) and (b) are effective on + the date Initial Developer first distributes or otherwise makes the + Original Software available to a third party under the terms of this + License. + + (d) Notwithstanding Section 2.1(b) above, no patent license is + granted: (1) for code that You delete from the Original Software, or + (2) for infringements caused by: (i) the modification of the + Original Software, or (ii) the combination of the Original Software + with other software or devices. + + 2.2. Contributor Grant. + + Conditioned upon Your compliance with Section 3.1 below and subject + to third party intellectual property claims, each Contributor hereby + grants You a world-wide, royalty-free, non-exclusive license: + + (a) under intellectual property rights (other than patent or + trademark) Licensable by Contributor to use, reproduce, modify, + display, perform, sublicense and distribute the Modifications + created by such Contributor (or portions thereof), either on an + unmodified basis, with other Modifications, as Covered Software + and/or as part of a Larger Work; and + + (b) under Patent Claims infringed by the making, using, or selling + of Modifications made by that Contributor either alone and/or in + combination with its Contributor Version (or portions of such + combination), to make, use, sell, offer for sale, have made, and/or + otherwise dispose of: (1) Modifications made by that Contributor (or + portions thereof); and (2) the combination of Modifications made by + that Contributor with its Contributor Version (or portions of such + combination). + + (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective + on the date Contributor first distributes or otherwise makes the + Modifications available to a third party. + + (d) Notwithstanding Section 2.2(b) above, no patent license is + granted: (1) for any code that Contributor has deleted from the + Contributor Version; (2) for infringements caused by: (i) third + party modifications of Contributor Version, or (ii) the combination + of Modifications made by that Contributor with other software + (except as part of the Contributor Version) or other devices; or (3) + under Patent Claims infringed by Covered Software in the absence of + Modifications made by that Contributor. + +3. Distribution Obligations. + + 3.1. Availability of Source Code. + + Any Covered Software that You distribute or otherwise make available + in Executable form must also be made available in Source Code form + and that Source Code form must be distributed only under the terms + of this License. You must include a copy of this License with every + copy of the Source Code form of the Covered Software You distribute + or otherwise make available. You must inform recipients of any such + Covered Software in Executable form as to how they can obtain such + Covered Software in Source Code form in a reasonable manner on or + through a medium customarily used for software exchange. + + 3.2. Modifications. + + The Modifications that You create or to which You contribute are + governed by the terms of this License. You represent that You + believe Your Modifications are Your original creation(s) and/or You + have sufficient rights to grant the rights conveyed by this License. + + 3.3. Required Notices. + + You must include a notice in each of Your Modifications that + identifies You as the Contributor of the Modification. You may not + remove or alter any copyright, patent or trademark notices contained + within the Covered Software, or any notices of licensing or any + descriptive text giving attribution to any Contributor or the + Initial Developer. + + 3.4. Application of Additional Terms. + + You may not offer or impose any terms on any Covered Software in + Source Code form that alters or restricts the applicable version of + this License or the recipients' rights hereunder. You may choose to + offer, and to charge a fee for, warranty, support, indemnity or + liability obligations to one or more recipients of Covered Software. + However, you may do so only on Your own behalf, and not on behalf of + the Initial Developer or any Contributor. You must make it + absolutely clear that any such warranty, support, indemnity or + liability obligation is offered by You alone, and You hereby agree + to indemnify the Initial Developer and every Contributor for any + liability incurred by the Initial Developer or such Contributor as a + result of warranty, support, indemnity or liability terms You offer. + + 3.5. Distribution of Executable Versions. + + You may distribute the Executable form of the Covered Software under + the terms of this License or under the terms of a license of Your + choice, which may contain terms different from this License, + provided that You are in compliance with the terms of this License + and that the license for the Executable form does not attempt to + limit or alter the recipient's rights in the Source Code form from + the rights set forth in this License. If You distribute the Covered + Software in Executable form under a different license, You must make + it absolutely clear that any terms which differ from this License + are offered by You alone, not by the Initial Developer or + Contributor. You hereby agree to indemnify the Initial Developer and + every Contributor for any liability incurred by the Initial + Developer or such Contributor as a result of any such terms You offer. + + 3.6. Larger Works. + + You may create a Larger Work by combining Covered Software with + other code not governed by the terms of this License and distribute + the Larger Work as a single product. In such a case, You must make + sure the requirements of this License are fulfilled for the Covered + Software. + +4. Versions of the License. + + 4.1. New Versions. + + Oracle is the initial license steward and may publish revised and/or + new versions of this License from time to time. Each version will be + given a distinguishing version number. Except as provided in Section + 4.3, no one other than the license steward has the right to modify + this License. + + 4.2. Effect of New Versions. + + You may always continue to use, distribute or otherwise make the + Covered Software available under the terms of the version of the + License under which You originally received the Covered Software. If + the Initial Developer includes a notice in the Original Software + prohibiting it from being distributed or otherwise made available + under any subsequent version of the License, You must distribute and + make the Covered Software available under the terms of the version + of the License under which You originally received the Covered + Software. Otherwise, You may also choose to use, distribute or + otherwise make the Covered Software available under the terms of any + subsequent version of the License published by the license steward. + + 4.3. Modified Versions. + + When You are an Initial Developer and You want to create a new + license for Your Original Software, You may create and use a + modified version of this License if You: (a) rename the license and + remove any references to the name of the license steward (except to + note that the license differs from this License); and (b) otherwise + make it clear that the license contains terms which differ from this + License. + +5. DISCLAIMER OF WARRANTY. + + COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, + WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, + INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE + IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR + NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF + THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE + DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY + OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, + REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN + ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS + AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +6. TERMINATION. + + 6.1. This License and the rights granted hereunder will terminate + automatically if You fail to comply with terms herein and fail to + cure such breach within 30 days of becoming aware of the breach. + Provisions which, by their nature, must remain in effect beyond the + termination of this License shall survive. + + 6.2. If You assert a patent infringement claim (excluding + declaratory judgment actions) against Initial Developer or a + Contributor (the Initial Developer or Contributor against whom You + assert such claim is referred to as "Participant") alleging that the + Participant Software (meaning the Contributor Version where the + Participant is a Contributor or the Original Software where the + Participant is the Initial Developer) directly or indirectly + infringes any patent, then any and all rights granted directly or + indirectly to You by such Participant, the Initial Developer (if the + Initial Developer is not the Participant) and all Contributors under + Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice + from Participant terminate prospectively and automatically at the + expiration of such 60 day notice period, unless if within such 60 + day period You withdraw Your claim with respect to the Participant + Software against such Participant either unilaterally or pursuant to + a written agreement with Participant. + + 6.3. If You assert a patent infringement claim against Participant + alleging that the Participant Software directly or indirectly + infringes any patent where such claim is resolved (such as by + license or settlement) prior to the initiation of patent + infringement litigation, then the reasonable value of the licenses + granted by such Participant under Sections 2.1 or 2.2 shall be taken + into account in determining the amount or value of any payment or + license. + + 6.4. In the event of termination under Sections 6.1 or 6.2 above, + all end user licenses that have been validly granted by You or any + distributor hereunder prior to termination (excluding licenses + granted to You by any distributor) shall survive termination. + +7. LIMITATION OF LIABILITY. + + UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT + (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE + INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF + COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE + TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR + CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT + LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER + FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR + LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE + POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT + APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH + PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH + LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR + LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION + AND LIMITATION MAY NOT APPLY TO YOU. + +8. U.S. GOVERNMENT END USERS. + + The Covered Software is a "commercial item," as that term is defined + in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer + software" (as that term is defined at 48 C.F.R. § + 252.227-7014(a)(1)) and "commercial computer software documentation" + as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent + with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 + (June 1995), all U.S. Government End Users acquire Covered Software + with only those rights set forth herein. This U.S. Government Rights + clause is in lieu of, and supersedes, any other FAR, DFAR, or other + clause or provision that addresses Government rights in computer + software under this License. + +9. MISCELLANEOUS. + + This License represents the complete agreement concerning subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. This License shall be governed by + the law of the jurisdiction specified in a notice contained within + the Original Software (except to the extent applicable law, if any, + provides otherwise), excluding such jurisdiction's conflict-of-law + provisions. Any litigation relating to this License shall be subject + to the jurisdiction of the courts located in the jurisdiction and + venue specified in a notice contained within the Original Software, + with the losing party responsible for costs, including, without + limitation, court costs and reasonable attorneys' fees and expenses. + The application of the United Nations Convention on Contracts for + the International Sale of Goods is expressly excluded. Any law or + regulation which provides that the language of a contract shall be + construed against the drafter shall not apply to this License. You + agree that You alone are responsible for compliance with the United + States export administration regulations (and the export control + laws and regulation of any other countries) when You use, distribute + or otherwise make available any Covered Software. + +10. RESPONSIBILITY FOR CLAIMS. + + As between Initial Developer and the Contributors, each party is + responsible for claims and damages arising, directly or indirectly, + out of its utilization of rights under this License and You agree to + work with Initial Developer and Contributors to distribute such + responsibility on an equitable basis. Nothing herein is intended or + shall be deemed to constitute any admission of liability. + +------------------------------------------------------------------------ + +NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION +LICENSE (CDDL) + +The code released under the CDDL shall be governed by the laws of the +State of California (excluding conflict-of-law provisions). Any +litigation relating to this License shall be subject to the jurisdiction +of the Federal Courts of the Northern District of California and the +state courts of the State of California, with venue lying in Santa Clara +County, California. + + + + The GNU General Public License (GPL) Version 2, June 1991 + +Copyright (C) 1989, 1991 Free Software Foundation, Inc. +51 Franklin Street, Fifth Floor +Boston, MA 02110-1335 +USA + +Everyone is permitted to copy and distribute verbatim copies +of this license document, but changing it is not allowed. + +Preamble + +The licenses for most software are designed to take away your freedom to +share and change it. By contrast, the GNU General Public License is +intended to guarantee your freedom to share and change free software--to +make sure the software is free for all its users. This General Public +License applies to most of the Free Software Foundation's software and +to any other program whose authors commit to using it. (Some other Free +Software Foundation software is covered by the GNU Library General +Public License instead.) You can apply it to your programs, too. + +When we speak of free software, we are referring to freedom, not price. +Our General Public Licenses are designed to make sure that you have the +freedom to distribute copies of free software (and charge for this +service if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs; and that you know you can do these things. + +To protect your rights, we need to make restrictions that forbid anyone +to deny you these rights or to ask you to surrender the rights. These +restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + +For example, if you distribute copies of such a program, whether gratis +or for a fee, you must give the recipients all the rights that you have. +You must make sure that they, too, receive or can get the source code. +And you must show them these terms so they know their rights. + +We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + +Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + +Finally, any free program is threatened constantly by software patents. +We wish to avoid the danger that redistributors of a free program will +individually obtain patent licenses, in effect making the program +proprietary. To prevent this, we have made it clear that any patent must +be licensed for everyone's free use or not licensed at all. + +The precise terms and conditions for copying, distribution and +modification follow. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. This License applies to any program or other work which contains a +notice placed by the copyright holder saying it may be distributed under +the terms of this General Public License. The "Program", below, refers +to any such program or work, and a "work based on the Program" means +either the Program or any derivative work under copyright law: that is +to say, a work containing the Program or a portion of it, either +verbatim or with modifications and/or translated into another language. +(Hereinafter, translation is included without limitation in the term +"modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of running +the Program is not restricted, and the output from the Program is +covered only if its contents constitute a work based on the Program +(independent of having been made by running the Program). Whether that +is true depends on what the Program does. + +1. You may copy and distribute verbatim copies of the Program's source +code as you receive it, in any medium, provided that you conspicuously +and appropriately publish on each copy an appropriate copyright notice +and disclaimer of warranty; keep intact all the notices that refer to +this License and to the absence of any warranty; and give any other +recipients of the Program a copy of this License along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Program or any portion of +it, thus forming a work based on the Program, and copy and distribute +such modifications or work under the terms of Section 1 above, provided +that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any part + thereof, to be licensed as a whole at no charge to all third parties + under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a notice + that there is no warranty (or else, saying that you provide a + warranty) and that users may redistribute the program under these + conditions, and telling the user how to view a copy of this License. + (Exception: if the Program itself is interactive but does not + normally print such an announcement, your work based on the Program + is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, and +can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based on +the Program, the distribution of the whole must be on the terms of this +License, whose permissions for other licensees extend to the entire +whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of a +storage or distribution medium does not bring the other work under the +scope of this License. + +3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections 1 + and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your cost + of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer to + distribute corresponding source code. (This alternative is allowed + only for noncommercial distribution and only if you received the + program in object code or executable form with such an offer, in + accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source code +means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to control +compilation and installation of the executable. However, as a special +exception, the source code distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies the +executable. + +If distribution of executable or object code is made by offering access +to copy from a designated place, then offering equivalent access to copy +the source code from the same place counts as distribution of the source +code, even though third parties are not compelled to copy the source +along with the object code. + +4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt otherwise +to copy, modify, sublicense or distribute the Program is void, and will +automatically terminate your rights under this License. However, parties +who have received copies, or rights, from you under this License will +not have their licenses terminated so long as such parties remain in +full compliance. + +5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and all +its terms and conditions for copying, distributing or modifying the +Program or works based on it. + +6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further restrictions +on the recipients' exercise of the rights granted herein. You are not +responsible for enforcing compliance by third parties to this License. + +7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot distribute +so as to satisfy simultaneously your obligations under this License and +any other pertinent obligations, then as a consequence you may not +distribute the Program at all. For example, if a patent license would +not permit royalty-free redistribution of the Program by all those who +receive copies directly or indirectly through you, then the only way you +could satisfy both it and this License would be to refrain entirely from +distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is implemented +by public license practices. Many people have made generous +contributions to the wide range of software distributed through that +system in reliance on consistent application of that system; it is up to +the author/donor to decide if he or she is willing to distribute +software through any other system and a licensee cannot impose that choice. + +This section is intended to make thoroughly clear what is believed to be +a consequence of the rest of this License. + +8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License may +add an explicit geographical distribution limitation excluding those +countries, so that distribution is permitted only in or among countries +not thus excluded. In such case, this License incorporates the +limitation as if written in the body of this License. + +9. The Free Software Foundation may publish revised and/or new +versions of the General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Program does not specify a version +number of this License, you may choose any version ever published by the +Free Software Foundation. + +10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the +author to ask for permission. For software which is copyrighted by the +Free Software Foundation, write to the Free Software Foundation; we +sometimes make exceptions for this. Our decision will be guided by the +two goals of preserving the free status of all derivatives of our free +software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, +EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE +ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH +YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL +NECESSARY SERVICING, REPAIR OR CORRECTION. + +12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR +DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL +DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM +(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED +INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF +THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR +OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +END OF TERMS AND CONDITIONS + +How to Apply These Terms to Your New Programs + +If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + +To do so, attach the following notices to the program. It is safest to +attach them to the start of each source file to most effectively convey +the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + One line to give the program's name and a brief idea of what it does. + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335 USA + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type + `show w'. This is free software, and you are welcome to redistribute + it under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the +appropriate parts of the General Public License. Of course, the commands +you use may be called something other than `show w' and `show c'; they +could even be mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + program `Gnomovision' (which makes passes at compilers) written by + James Hacker. + + signature of Ty Coon, 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications +with the library. If this is what you want to do, use the GNU Library +General Public License instead of this License. + +# + +Certain source files distributed by Oracle America, Inc. and/or its +affiliates are subject to the following clarification and special +exception to the GPLv2, based on the GNU Project exception for its +Classpath libraries, known as the GNU Classpath Exception, but only +where Oracle has expressly included in the particular source file's +header the words "Oracle designates this particular file as subject to +the "Classpath" exception as provided by Oracle in the LICENSE file +that accompanied this code." + +You should also note that Oracle includes multiple, independent +programs in this software package. Some of those programs are provided +under licenses deemed incompatible with the GPLv2 by the Free Software +Foundation and others. For example, the package includes programs +licensed under the Apache License, Version 2.0. Such programs are +licensed to you under their original licenses. + +Oracle facilitates your further distribution of this package by adding +the Classpath Exception to the necessary parts of its GPLv2 code, which +permits you to use that code in combination with other independent +modules not licensed under the GPLv2. However, note that this would +not permit you to commingle code under an incompatible license with +Oracle's GPLv2 licensed code by, for example, cutting and pasting such +code into a file also containing Oracle's GPLv2 licensed code and then +distributing the result. Additionally, if you were to remove the +Classpath Exception from any of the files to which it applies and +distribute the result, you would likely be required to license some or +all of the other code in that distribution under the GPLv2 as well, and +since the GPLv2 is incompatible with the license terms of some items +included in the distribution by Oracle, removing the Classpath +Exception could therefore effectively compromise your ability to +further distribute the package. + +Proceed with caution and we recommend that you obtain the advice of a +lawyer skilled in open source matters before removing the Classpath +Exception or making modifications to this package which may +subsequently be redistributed and/or involve the use of third party +software. + +CLASSPATH EXCEPTION +Linking this library statically or dynamically with other modules is +making a combined work based on this library. Thus, the terms and +conditions of the GNU General Public License version 2 cover the whole +combination. + +As a special exception, the copyright holders of this library give you +permission to link this library with independent modules to produce an +executable, regardless of the license terms of these independent +modules, and to copy and distribute the resulting executable under +terms of your choice, provided that you also meet, for each linked +independent module, the terms and conditions of the license of that +module. An independent module is a module which is not derived from or +based on this library. If you modify this library, you may extend this +exception to your version of the library, but you are not obligated to +do so. If you do not wish to do so, delete this exception statement +from your version. diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-jnanoid.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-jnanoid.txt deleted file mode 100644 index 65014f7b50223..0000000000000 --- a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-jnanoid.txt +++ /dev/null @@ -1,26 +0,0 @@ -jnanoid License ---------------- - -MIT License - -Copyright (c) 2017 The JNanoID Authors -Copyright (c) 2017 Aventrix LLC -Copyright (c) 2017 Andrey Sitnik - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-truffle-api.txt b/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-truffle-api.txt deleted file mode 100644 index d656a31cb15d7..0000000000000 --- a/distribution/proxy-native/src/main/release-docs/licenses/LICENSE-truffle-api.txt +++ /dev/null @@ -1,35 +0,0 @@ -The Universal Permissive License (UPL), Version 1.0 - -Subject to the condition set forth below, permission is hereby granted to any -person obtaining a copy of this software, associated documentation and/or -data (collectively the "Software"), free of charge and under any and all -copyright rights in the Software, and any and all patent rights owned or -freely licensable by each licensor hereunder covering either (i) the -unmodified Software as contributed to or provided by such licensor, or (ii) -the Larger Works (as defined below), to deal in both - -(a) the Software, and - -(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if -one is included with the Software each a "Larger Work" to which the Software -is contributed by such licensors), - -without restriction, including without limitation the rights to copy, create -derivative works of, display, perform, and distribute the Software and make, -use, sell, offer for sale, import, export, have made, and have sold the -Software and the Larger Work(s), and to sublicense the foregoing rights on -either these or other terms. - -This license is subject to the following condition: - -The above copyright notice and either this complete permission notice or at a -minimum a reference to the UPL must be included in all copies or substantial -portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/distribution/proxy/src/main/release-docs/LICENSE b/distribution/proxy/src/main/release-docs/LICENSE index 5a979f8383efa..f41740785cd91 100644 --- a/distribution/proxy/src/main/release-docs/LICENSE +++ b/distribution/proxy/src/main/release-docs/LICENSE @@ -219,11 +219,11 @@ The text of each license is the standard Apache 2.0 license. aggdesigner-algorithm 6.0: Apache 2.0 apiguardian-api 1.1.2: https://github.com/apiguardian-team/apiguardian, Apache 2.0 audience-annotations 0.12.0: https://github.com/apache/yetus, Apache 2.0 - avatica-core 1.22.0: https://calcite.apache.org/avatica, Apache 2.0 - avatica-metrics 1.22.0: https://calcite.apache.org/avatica, Apache 2.0 + avatica-core 1.23.0: https://calcite.apache.org/avatica, Apache 2.0 + avatica-metrics 1.23.0: https://calcite.apache.org/avatica, Apache 2.0 caffeine 2.9.3: https://github.com/ben-manes/caffeine, Apache 2.0 - calcite-core 1.32.0: https://calcite.apache.org, Apache 2.0 - calcite-linq4j 1.32.0: https://calcite.apache.org, Apache 2.0 + calcite-core 1.35.0: https://calcite.apache.org, Apache 2.0 + calcite-linq4j 1.35.0: https://calcite.apache.org, Apache 2.0 commons-codec 1.15: https://github.com/apache/commons-codec, Apache 2.0 commons-collections4 4.4: https://github.com/apache/commons-collections, Apache 2.0 commons-exec 1.3: https://github.com/apache/commons-exec, Apache 2.0 @@ -234,7 +234,6 @@ The text of each license is the standard Apache 2.0 license. curator-client 5.4.0: https://github.com/apache/curator, Apache 2.0 curator-framework 5.4.0: https://github.com/apache/curator, Apache 2.0 curator-recipes 5.4.0: https://github.com/apache/curator, Apache 2.0 - cosid-core 1.18.5: https://github.com/Ahoo-Wang/CosId, Apache 2.0 error_prone_annotations 2.11.0: https://github.com/google/error-prone, Apache 2.0 failsafe 2.4.4: https://github.com/jhalterman/failsafe, Apache 2.0 failureaccess 1.0.1: https://github.com/google/guava, Apache 2.0 @@ -324,7 +323,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. antlr4-runtime 4.10.1: https://github.com/antlr/antlr4, BSD-3-Clause asm 9.1: https://github.com/llbit/ow2-asm, BSD-3-Clause commons-compiler 3.1.8: https://github.com/janino-compiler/janino, BSD-3-Clause - janino 3.1.8: https://github.com/janino-compiler/janino, BSD-3-Clause + janino 3.1.9: https://github.com/janino-compiler/janino, BSD-3-Clause opengauss-jdbc 3.1.0-og: https://gitee.com/opengauss/openGauss-connector-jdbc, BSD-2-Clause postgresql 42.4.1: https://github.com/pgjdbc/pgjdbc, BSD-2-Clause protobuf-java 3.21.12: https://github.com/protocolbuffers/protobuf/blob/master/java, BSD-3-Clause @@ -371,14 +370,3 @@ The text of each license is also included at licenses/LICENSE-[project].txt. checker-qual 3.5.0: https://github.com/typetools/checker-framework/blob/master/checker-qual, MIT jul-to-slf4j 1.7.36: https://www.slf4j.org, MIT slf4j-api 1.7.36: https://www.slf4j.org, MIT - jnanoid 2.0.0: https://github.com/aventrix/jnanoid, MIT - -======================================================================== -UPL licenses -======================================================================== - -The following components are provided under the UPL License. See project link for details. -The text of each license is also included at licenses/LICENSE-[project].txt. - - graal-sdk 21.2.0: https://github.com/oracle/graal/tree/master/sdk, UPL 1.0 - truffle-api 21.2.0: https://github.com/oracle/graal/tree/master/truffle, UPL 1.0 diff --git a/distribution/proxy/src/main/release-docs/licenses/LICENSE-jnanoid.txt b/distribution/proxy/src/main/release-docs/licenses/LICENSE-jnanoid.txt deleted file mode 100644 index 65014f7b50223..0000000000000 --- a/distribution/proxy/src/main/release-docs/licenses/LICENSE-jnanoid.txt +++ /dev/null @@ -1,26 +0,0 @@ -jnanoid License ---------------- - -MIT License - -Copyright (c) 2017 The JNanoID Authors -Copyright (c) 2017 Aventrix LLC -Copyright (c) 2017 Andrey Sitnik - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/distribution/proxy/src/main/release-docs/licenses/LICENSE-truffle-api.txt b/distribution/proxy/src/main/release-docs/licenses/LICENSE-truffle-api.txt deleted file mode 100644 index d656a31cb15d7..0000000000000 --- a/distribution/proxy/src/main/release-docs/licenses/LICENSE-truffle-api.txt +++ /dev/null @@ -1,35 +0,0 @@ -The Universal Permissive License (UPL), Version 1.0 - -Subject to the condition set forth below, permission is hereby granted to any -person obtaining a copy of this software, associated documentation and/or -data (collectively the "Software"), free of charge and under any and all -copyright rights in the Software, and any and all patent rights owned or -freely licensable by each licensor hereunder covering either (i) the -unmodified Software as contributed to or provided by such licensor, or (ii) -the Larger Works (as defined below), to deal in both - -(a) the Software, and - -(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if -one is included with the Software each a "Larger Work" to which the Software -is contributed by such licensors), - -without restriction, including without limitation the rights to copy, create -derivative works of, display, perform, and distribute the Software and make, -use, sell, offer for sale, import, export, have made, and have sold the -Software and the Larger Work(s), and to sublicense the foregoing rights on -either these or other terms. - -This license is subject to the following condition: - -The above copyright notice and either this complete permission notice or at a -minimum a reference to the UPL must be included in all copies or substantial -portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/distribution/proxy/src/main/resources/bin/start.sh b/distribution/proxy/src/main/resources/bin/start.sh index 88f2c86c643f1..b75111486f90d 100644 --- a/distribution/proxy/src/main/resources/bin/start.sh +++ b/distribution/proxy/src/main/resources/bin/start.sh @@ -111,7 +111,7 @@ print_usage() { echo " port: proxy listen port, default is 3307" echo " config_dir: proxy config directory, default is 'conf'" echo "" - echo "start.sh [-a addresses] [-p port] [-c /path/to/conf]" + echo "start.sh [-a addresses] [-p port] [-c /path/to/conf] [-f] [-g] [-s /path/to/socket]" echo "The options are unordered." echo "-a Bind addresses, can be IPv4, IPv6, hostname. In" echo " case more than one address is specified in a" @@ -173,7 +173,7 @@ if [ $# == 0 ]; then fi if [[ $1 == -a ]] || [[ $1 == -p ]] || [[ $1 == -c ]] || [[ $1 == -f ]] || [[ $1 == -s ]]; then - while getopts ":a:p:c:f:s:" opt + while getopts ":a:p:c:fs:" opt do case $opt in a) diff --git "a/docs/blog/content/material/2023_05_10_Simplifying_Learning_ShardingSphere\342\200\231s_Template_Engine_for_Streamlined_Examples.en.md" "b/docs/blog/content/material/2023_05_10_Simplifying_Learning_ShardingSphere\342\200\231s_Template_Engine_for_Streamlined_Examples.en.md" index bfd8c02d51756..18d9db030c715 100644 --- "a/docs/blog/content/material/2023_05_10_Simplifying_Learning_ShardingSphere\342\200\231s_Template_Engine_for_Streamlined_Examples.en.md" +++ "b/docs/blog/content/material/2023_05_10_Simplifying_Learning_ShardingSphere\342\200\231s_Template_Engine_for_Streamlined_Examples.en.md" @@ -89,14 +89,14 @@ Each module generated within this framework is a standalone project, allowing fo ## 1. Locate the Example Generation Module -Navigate to the [ShardingSphere project directory](https://github.com/apache/shardingsphere) and find the `shardingsphere-example-generator` module within the corresponding example module. This module contains all the template files and generation logic required for generating examples. +Navigate to the [ShardingSphere project directory](https://github.com/apache/shardingsphere) and find the `shardingsphere-jdbc-example-generator` module within the corresponding example module. This module contains all the template files and generation logic required for generating examples. ![img](https://shardingsphere.apache.org/blog/img/2023_04_27_How_South_Korea’s_Yogiyo_Improved_Scalability_and_Performance_with_Apache_ShardingSphere.en.md6.jpeg)Figure 5 — Project structure of the generation engine ## 2. Configure the Generation Engine Parameters -The `shardingsphere-example-generator` module is a standard Java project. The project's configuration file can be found at `resources/config.yaml`. This file allows users to declare various parameters supported by the generation engine. In addition to the template parameters for specific functions, we have also provided convenient configurations that users can customize according to their needs. The specific parameters and their meanings are outlined in Table 2: +The `shardingsphere-jdbc-example-generator` module is a standard Java project. The project's configuration file can be found at `resources/config.yaml`. This file allows users to declare various parameters supported by the generation engine. In addition to the template parameters for specific functions, we have also provided convenient configurations that users can customize according to their needs. The specific parameters and their meanings are outlined in Table 2: ![img](https://shardingsphere.apache.org/blog/img/2023_04_27_How_South_Korea’s_Yogiyo_Improved_Scalability_and_Performance_with_Apache_ShardingSphere.en.md7.jpeg) @@ -104,21 +104,21 @@ The `shardingsphere-example-generator` module is a standard Java project. The pr After configuring the parameters, it’s time to generate the corresponding configuration module. This can be done in two ways: -- **Running the Main Class:** Locate the `ExampleGeneratorMain` class under the `shardingsphere-example-generator` module and run the main method. This will generate the example in the configured output directory. +- **Running the Main Class:** Locate the `ExampleGeneratorMain` class under the `shardingsphere-jdbc-example-generator` module and run the main method. This will generate the example in the configured output directory. - **Using Maven Command Line:** Alternatively, you can use the Maven command line to trigger the generation process. The command should be executed as follows: ``` // generate configuration based on config.yaml -./mvnw -B clean install -f examples/shardingsphere-example-generator/pom.xml -Pexample-generator +./mvnw -B clean install -f examples/shardingsphere-jdbc-example-generator/pom.xml -Pexample-generator // generation configuration based on command parameters -./mvnw -B clean install -f examples/shardingsphere-example-generator/pom.xml -Pexample-generator -Dproducts=jdbc -Dmodes=cluster-zookeeper -Dtransactions=local -Dfeatures=shadow -Dframeworks=jdbc +./mvnw -B clean install -f examples/shardingsphere-jdbc-example-generator/pom.xml -Pexample-generator -Dproducts=jdbc -Dmodes=cluster-zookeeper -Dtransactions=local -Dfeatures=shadow -Dframeworks=jdbc ``` ## 4. View and Utilize the Examples -Once the generation process is triggered, the associated example code will be generated in the configured output directory. Typically, this would be in the `shardingsphere-example-generator/target/generated-sources/ directory`. The generated directory structure can be seen in Figure 6 below: +Once the generation process is triggered, the associated example code will be generated in the configured output directory. Typically, this would be in the `shardingsphere-jdbc-example-generator/target/generated-sources/ directory`. The generated directory structure can be seen in Figure 6 below: ![img](https://shardingsphere.apache.org/blog/img/2023_04_27_How_South_Korea’s_Yogiyo_Improved_Scalability_and_Performance_with_Apache_ShardingSphere.en.md8.jpeg)Figure 6 — Directory structure for generating examples diff --git a/docs/community/content/team/_index.cn.md b/docs/community/content/team/_index.cn.md index e0c4187475622..b489131011098 100644 --- a/docs/community/content/team/_index.cn.md +++ b/docs/community/content/team/_index.cn.md @@ -156,6 +156,11 @@ chapter = true + + + + + @@ -173,6 +178,9 @@ chapter = true 钟红胜 + + 王光远 + @@ -180,11 +188,6 @@ chapter = true - - diff --git a/docs/community/content/team/_index.en.md b/docs/community/content/team/_index.en.md index 93bd69c14bc8b..395c2f352d1fa 100644 --- a/docs/community/content/team/_index.en.md +++ b/docs/community/content/team/_index.en.md @@ -156,6 +156,11 @@ chapter = true + +
- - - - @@ -217,9 +220,6 @@ chapter = true
- 王小满 - 孙海生 + + + +
@@ -173,6 +178,9 @@ chapter = true Hongsheng Zhong + Guangyuan Wang +
@@ -180,11 +188,6 @@ chapter = true - - diff --git a/docs/document/content/dev-manual/mask.cn.md b/docs/document/content/dev-manual/mask.cn.md index f6f5215f50469..8d88ccce78768 100644 --- a/docs/document/content/dev-manual/mask.cn.md +++ b/docs/document/content/dev-manual/mask.cn.md @@ -26,9 +26,4 @@ chapter = true | MASK_FROM_X_TO_Y | 遮盖自 x 至 y 数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.cover.MaskFromXToYMaskAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskFromXToYMaskAlgorithm.java) | | MASK_BEFORE_SPECIAL_CHARS | 特殊字符前遮盖数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.cover.MaskBeforeSpecialCharsAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskBeforeSpecialCharsAlgorithm.java) | | MASK_AFTER_SPECIAL_CHARS | 特殊字符后遮盖数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.cover.MaskAfterSpecialCharsAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskAfterSpecialCharsAlgorithm.java) | -| PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE | 身份证号随机替换数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.replace.PersonalIdentityNumberRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithm.java) | -| MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE | 军官证随机替换数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.replace.MilitaryIdentityNumberRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithm.java) | -| TELEPHONE_RANDOM_REPLACE | ⼿机号随机替换数据脱敏算法 | [`org.apache.shardingsphere.mask.algorithm.replace.TelephoneRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithm.java) | -| LANDLINE_NUMBER_RANDOM_REPLACE | 座机号码随机替换 | [`org.apache.shardingsphere.mask.algorithm.replace.LandlineNumberRandomAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithm.java) | | GENERIC_TABLE_RANDOM_REPLACE | 通⽤表格随机替换 | [`org.apache.shardingsphere.mask.algorithm.replace.GenericTableRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java) | -| UNIFIED_CREDIT_CODE_RANDOM_REPLACE | 统⼀信⽤码随机替换 | [`org.apache.shardingsphere.mask.algorithm.replace.UnifiedCreditCodeRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithm.java) | diff --git a/docs/document/content/dev-manual/mask.en.md b/docs/document/content/dev-manual/mask.en.md index dc77ef2926ba1..67580110eea07 100644 --- a/docs/document/content/dev-manual/mask.en.md +++ b/docs/document/content/dev-manual/mask.en.md @@ -26,9 +26,4 @@ Data masking algorithm definition | MASK_FROM_X_TO_Y | Mask from x to y data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.cover.MaskFromXToYMaskAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskFromXToYMaskAlgorithm.java) | | MASK_BEFORE_SPECIAL_CHARS | Mask before special chars data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.cover.MaskBeforeSpecialCharsAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskBeforeSpecialCharsAlgorithm.java) | | MASK_AFTER_SPECIAL_CHARS | Mask after special chars data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.cover.MaskAfterSpecialCharsAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/cover/MaskAfterSpecialCharsAlgorithm.java) | -| PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE | Personal identity number random replace data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.PersonalIdentityNumberRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithm.java) | -| MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE | Military identity number random replace data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.MilitaryIdentityNumberRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithm.java) | -| TELEPHONE_RANDOM_REPLACE | Telephone random replace data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.TelephoneRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithm.java) | -| LANDLINE_NUMBER_RANDOM_REPLACE | Landline number random replace data masking algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.LandlineNumberRandomAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithm.java) | | GENERIC_TABLE_RANDOM_REPLACE | Generic table random replace algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.GenericTableRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java) | -| UNIFIED_CREDIT_CODE_RANDOM_REPLACE | Unified credit code random replace algorithm | [`org.apache.shardingsphere.mask.algorithm.replace.UnifiedCreditCodeRandomReplaceAlgorithm`](https://github.com/apache/shardingsphere/blob/master/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithm.java) | diff --git a/docs/document/content/faq/_index.cn.md b/docs/document/content/faq/_index.cn.md index 7438133330834..d5e3865a03ae7 100644 --- a/docs/document/content/faq/_index.cn.md +++ b/docs/document/content/faq/_index.cn.md @@ -7,6 +7,10 @@ chapter = true ## MODE +### [MODE] 单机模式 `Standalone` 和 `Compatible_Standalone` 区别? + +在 5.4.0 版本中调整了元数据存储结构,`Standalone` 代表新版本的元数据结构,`Compatible_Standalone` 则代表 5.4.0 之前版本的元数据结构。 + ### [MODE] 集群模式 `Cluster` 和 `Compatible_Cluster` 区别? 回答: diff --git a/docs/document/content/faq/_index.en.md b/docs/document/content/faq/_index.en.md index 96f8a8ed0675f..f6e7d01b2383b 100644 --- a/docs/document/content/faq/_index.en.md +++ b/docs/document/content/faq/_index.en.md @@ -7,6 +7,13 @@ chapter = true ## MODE +### [MODE] What is the difference between standalone mode `Standalone` and `Compatible_Standalone`? + +Answer: + +The metadata structure was adjusted in version 5.4.0, `Standalone` represents the metadata structure of the new version, +and `Compatible_Standalone` represents the metadata structure of versions before 5.4.0. + ### [MODE] What is the difference between cluster mode `Cluster` and `Compatible_Cluster`? Answer: diff --git a/docs/document/content/reference/mask/_index.cn.md b/docs/document/content/reference/mask/_index.cn.md index 3d51efa76ae4c..8dea432389a81 100644 --- a/docs/document/content/reference/mask/_index.cn.md +++ b/docs/document/content/reference/mask/_index.cn.md @@ -22,7 +22,7 @@ Apache ShardingSphere 通过对用户查询的 SQL 进行解析,并依据用 **数据源配置**:指数据源配置。 -**脱敏算法配置**:指使用什么脱敏算法。目前 ShardingSphere 内置了多种脱敏算法:MD5、KEEP_FIRST_N_LAST_M、KEEP_FROM_X_TO_Y 、MASK_FIRST_N_LAST_M、MASK_FROM_X_TO_Y、MASK_BEFORE_SPECIAL_CHARS、MASK_AFTER_SPECIAL_CHARS、PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE、MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE 和 TELEPHONE_RANDOM_REPLACE。用户还可以通过实现 ShardingSphere 提供的接口,自行实现一套脱敏算法。 +**脱敏算法配置**:指使用什么脱敏算法。目前 ShardingSphere 内置了多种脱敏算法:MD5、KEEP_FIRST_N_LAST_M、KEEP_FROM_X_TO_Y 、MASK_FIRST_N_LAST_M、MASK_FROM_X_TO_Y、MASK_BEFORE_SPECIAL_CHARS、MASK_AFTER_SPECIAL_CHARS 和 GENERIC_TABLE_RANDOM_REPLACE。用户还可以通过实现 ShardingSphere 提供的接口,自行实现一套脱敏算法。 **脱敏表配置**:用于告诉 ShardingSphere 数据表里哪个列用于数据脱敏、使用什么算法脱敏。 diff --git a/docs/document/content/reference/mask/_index.en.md b/docs/document/content/reference/mask/_index.en.md index 1a869e21061c1..491100654332d 100644 --- a/docs/document/content/reference/mask/_index.en.md +++ b/docs/document/content/reference/mask/_index.en.md @@ -22,7 +22,7 @@ Desensitization configuration is mainly divided into three parts: data source co **Data source configuration**: the configuration of the data source. -**Mask algorithm configuration**: currently, ShardingSphere has a variety of built-in desensitization algorithms: MD5, KEEP_FIRST_N_LAST_M, KEEP_FROM_X_TO_Y , MASK_FIRST_N_LAST_M, MASK_FROM_X_TO_Y, MASK_BEFORE_SPECIAL_CHARS, MASK_AFTER_SPECIAL_CHARS, PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE, MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE, and TELEPHONE_RANDOM_REPLACE. +**Mask algorithm configuration**: currently, ShardingSphere has a variety of built-in desensitization algorithms: MD5, KEEP_FIRST_N_LAST_M, KEEP_FROM_X_TO_Y , MASK_FIRST_N_LAST_M, MASK_FROM_X_TO_Y, MASK_BEFORE_SPECIAL_CHARS, MASK_AFTER_SPECIAL_CHARS and GENERIC_TABLE_RANDOM_REPLACE. Users can also implement a set of desensitization algorithms by implementing the interface provided by ShardingSphere. diff --git a/docs/document/content/user-manual/common-config/builtin-algorithm/mask.cn.md b/docs/document/content/user-manual/common-config/builtin-algorithm/mask.cn.md index 2aa745c565991..7eb51f1532125 100644 --- a/docs/document/content/user-manual/common-config/builtin-algorithm/mask.cn.md +++ b/docs/document/content/user-manual/common-config/builtin-algorithm/mask.cn.md @@ -95,46 +95,6 @@ weight = 9 ### 替换脱敏算法 -#### 身份证随机替换脱敏算法 - -类型:PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE - -可配置属性: - -| *名称* | *数据类型* | *说明* | -|-----------------------------|--------|-----------------------| -| alpha-two-country-area-code | String | 两位字母国家/地区编码(可选,默认:CN) | - -#### 军官证随机替换脱敏算法 - -类型:MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE - -可配置属性: - -| *名称* | *数据类型* | *说明* | -|------------|--------|-------------------------------| -| type-codes | String | 军官证种类编码(以英文逗号分隔,例如:军,人,士,文,职) | - -#### 手机号随机替换脱敏算法 - -类型:TELEPHONE_RANDOM_REPLACE - -可配置属性: - -| *名称* | *数据类型* | *说明* | -|-----------------|--------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| network-numbers | String | ⽹号(以英文逗号分隔,默认值:130,131,132,133,134,135,136,137,138,139,150,151,152,153,155,156,157,158,159,166,170,176,177,178,180,181,182,183,184,185,186,187,188,189,191,198,199) | - -#### 座机号码随机替换 - -类型:LANDLINE_NUMBER_RANDOM_REPLACE - -可配置属性: - -| *名称* | *数据类型* | *说明* | -|------------------|--------|---------------| -| landline-numbers | String | 座机号码(以英文逗号分隔) | - #### 通⽤表格随机替换 类型:GENERIC_TABLE_RANDOM_REPLACE @@ -148,18 +108,6 @@ weight = 9 | digital-codes | String | 数字码表(以英文逗号分隔,默认值:0,1,2,3,4,5,6,7,8,9) | | special-codes | String | 特殊字符码表(以英文逗号分隔,默认值:~,!,@,#,$,%,^,&,*,:,<,>,¦) | -#### 统⼀信⽤码随机替换 - -类型:UNIFIED_CREDIT_CODE_RANDOM_REPLACE - -可配置属性: - -| *名称* | *数据类型* | *说明* | -|-------------------------------|--------|-------------------| -| registration-department-codes | String | 登记管理部门代码(以英文逗号分隔) | -| category-codes | String | 机构类别代码(以英文逗号分隔) | -| administrative-division-codes | String | 行政区划随机码表(以英文逗号分隔) | - ## 操作步骤 1. 在脱敏规则中配置脱敏算法; 2. 为脱敏算法指定脱敏算法类型。 diff --git a/docs/document/content/user-manual/common-config/builtin-algorithm/mask.en.md b/docs/document/content/user-manual/common-config/builtin-algorithm/mask.en.md index 8b493cb4eaaaf..d29b40d51caba 100644 --- a/docs/document/content/user-manual/common-config/builtin-algorithm/mask.en.md +++ b/docs/document/content/user-manual/common-config/builtin-algorithm/mask.en.md @@ -95,46 +95,6 @@ Attributes: ### Replace Data Masking Algorithm -#### Personal Identity Number Random Replace Data Masking Algorithm - -Type: PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE - -Attributes: - -| *Name* | *DataType* | *Description* | -|-----------------------------|------------|-----------------------------------------------------------| -| alpha-two-country-area-code | String | alpha two country area code (Optional, default value: CN) | - -#### Military Identity Number Random Replace Data Masking Algorithm - -类型:MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE - -可配置属性: - -| *Name* | *DataType* | *Description* | -|-------------------------------|------------|------------------------------------------------------------| -| type-codes | String | military identity number type codes (separate with comma) | - -#### Telephone Random Replace Data Masking Algorithm - -Type: TELEPHONE_RANDOM_REPLACE - -Attributes: - -| *Name* | *DataType* | *Description* | -|-----------------|------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| network-numbers | String | Network numbers (separate with comma, default value: 130,131,132,133,134,135,136,137,138,139,150,151,152,153,155,156,157,158,159,166,170,176,177,178,180,181,182,183,184,185,186,187,188,189,191,198,199) | - -#### Landline Number Random Replace Data Masking Algorithm - -Type: LANDLINE_NUMBER_RANDOM_REPLACE - -Attributes: - -| *Name* | *DataType* | *Description* | -|------------------|------------|----------------------------------------| -| landline-numbers | String | Landline numbers (separate with comma) | - #### Generic table random replace algorithm. Type: GENERIC_TABLE_RANDOM_REPLACE @@ -148,18 +108,6 @@ Attributes: | digital-random-codes | String | Numbers (separate with comma, default value: 0,1,2,3,4,5,6,7,8,9) | | special-codes | String | Special codes (separate with comma, default value: ~,!,@,#,$,%,^,&,*,:,<,>,¦) | -#### Unified credit code random replace algorithm - -Type: UNIFIED_CREDIT_CODE_RANDOM_REPLACE - -Attributes: - -| *Name* | *DataType* | *Description* | -|-------------------------------|------------|----------------------------------------------------| -| registration-department-codes | String | Registration department code (separate with comma) | -| category-codes | String | Category code (separate with comma) | -| administrative-division-codes | String | Administrative division code (separate with comma) | - ## Operating Procedure 1. Configure maskAlgorithms in a mask rule. 2. Use relevant algorithm types in maskAlgorithms. diff --git a/docs/document/content/user-manual/error-code/sql-error-code.cn.md b/docs/document/content/user-manual/error-code/sql-error-code.cn.md index 6d05733aa0570..779f387141d15 100644 --- a/docs/document/content/user-manual/error-code/sql-error-code.cn.md +++ b/docs/document/content/user-manual/error-code/sql-error-code.cn.md @@ -19,8 +19,9 @@ SQL 错误码以标准的 SQL State,Vendor Code 和详细错误信息提供, | 42000 | 10002 | Can not support 3-tier structure for actual data node \`%s\` with JDBC \`%s\`. | | HY004 | 10003 | Invalid format for actual data node \`%s\`. | | 42000 | 10004 | Unsupported SQL node conversion for SQL statement \`%s\`. | -| HY000 | 10005 | Column '%s' in field list is ambiguous. | -| 42S02 | 10006 | Unknown column '%s' in 'field list'. | +| HY000 | 10005 | Column '%s' in %s is ambiguous. | +| 42S02 | 10006 | Unknown column '%s' in '%s'. | +| 42S02 | 10007 | Table or view \`%s\` does not exist. | | 42000 | 10010 | Rule does not exist. | | 42S02 | 10020 | Schema \`%s\` does not exist. | | 42S02 | 10021 | Single table \`%s\` does not exist. | @@ -38,16 +39,16 @@ SQL 错误码以标准的 SQL State,Vendor Code 和详细错误信息提供, ### 语法 -| SQL State | Vendor Code | 错误信息 | -|-----------|-------------|----------------------------------------------| -| 42000 | 12000 | You have an error in your SQL syntax: %s | -| 42000 | 12001 | Can not accept SQL type \`%s\`. | -| 42000 | 12002 | SQL String can not be NULL or empty. | -| 42000 | 12010 | Can not support variable \`%s\`. | -| 42S02 | 12011 | Can not find column label \`%s\`. | -| 42S02 | 12012 | Can not find driver url provider for \`%s`\. | -| HV008 | 12020 | Column index \`%d\` is out of range. | -| 0A000 | 12100 | DROP TABLE ... CASCADE is not supported. | +| SQL State | Vendor Code | 错误信息 | +|-----------|-------------|---------------------------------------------| +| 42000 | 12000 | You have an error in your SQL syntax: %s | +| 42000 | 12001 | Can not accept SQL type \`%s\`. | +| 42000 | 12002 | SQL String can not be NULL or empty. | +| 42000 | 12010 | Can not support variable \`%s\`. | +| 42S02 | 12011 | Can not find column label \`%s\`. | +| 42S02 | 12012 | Can not find url provider for \`%s`\. | +| HV008 | 12020 | Column index \`%d\` is out of range. | +| 0A000 | 12100 | DROP TABLE ... CASCADE is not supported. | ### 连接 @@ -70,7 +71,6 @@ SQL 错误码以标准的 SQL State,Vendor Code 和详细错误信息提供, | 25000 | 14000 | Switch transaction type failed, please terminate the current transaction. | | 25000 | 14001 | Can not find transaction manager of \`%s\`. | | 25000 | 14002 | Transaction timeout should more than 0s. | -| 25000 | 14100 | JDBC does not support operations across multiple logical databases in transaction. | | 25000 | 14200 | Can not start new XA transaction in a active transaction. | | 25000 | 14201 | Failed to create \`%s\` XA data source. | | 25000 | 14202 | Max length of xa unique resource name \`%s\` exceeded: should be less than 45. | diff --git a/docs/document/content/user-manual/error-code/sql-error-code.en.md b/docs/document/content/user-manual/error-code/sql-error-code.en.md index 705b09c015b76..bd7f6229a5649 100644 --- a/docs/document/content/user-manual/error-code/sql-error-code.en.md +++ b/docs/document/content/user-manual/error-code/sql-error-code.en.md @@ -19,8 +19,9 @@ SQL error codes provide by standard `SQL State`, `Vendor Code` and `Reason`, whi | 42000 | 10002 | Can not support 3-tier structure for actual data node \`%s\` with JDBC \`%s\`. | | HY004 | 10003 | Invalid format for actual data node \`%s\`. | | 42000 | 10004 | Unsupported SQL node conversion for SQL statement \`%s\`. | -| HY000 | 10005 | Column '%s' in field list is ambiguous. | -| 42S02 | 10006 | Unknown column '%s' in 'field list'. | +| HY000 | 10005 | Column '%s' in %s is ambiguous. | +| 42S02 | 10006 | Unknown column '%s' in '%s'. | +| 42S02 | 10007 | Table or view \`%s\` does not exist. | | 42000 | 10010 | Rule does not exist. | | 42S02 | 10020 | Schema \`%s\` does not exist. | | 42S02 | 10021 | Single table \`%s\` does not exist. | @@ -38,16 +39,16 @@ SQL error codes provide by standard `SQL State`, `Vendor Code` and `Reason`, whi ### Syntax -| SQL State | Vendor Code | Reason | -|-----------|-------------|----------------------------------------------| -| 42000 | 12000 | You have an error in your SQL syntax: %s | -| 42000 | 12001 | Can not accept SQL type \`%s\`. | -| 42000 | 12002 | SQL String can not be NULL or empty. | -| 42000 | 12010 | Can not support variable \`%s\`. | -| 42S02 | 12011 | Can not find column label \`%s\`. | -| 42S02 | 12012 | Can not find driver url provider for \`%s`\. | -| HV008 | 12020 | Column index \`%d\` is out of range. | -| 0A000 | 12100 | DROP TABLE ... CASCADE is not supported. | +| SQL State | Vendor Code | Reason | +|-----------|-------------|---------------------------------------------| +| 42000 | 12000 | You have an error in your SQL syntax: %s | +| 42000 | 12001 | Can not accept SQL type \`%s\`. | +| 42000 | 12002 | SQL String can not be NULL or empty. | +| 42000 | 12010 | Can not support variable \`%s\`. | +| 42S02 | 12011 | Can not find column label \`%s\`. | +| 42S02 | 12012 | Can not find url provider for \`%s`\. | +| HV008 | 12020 | Column index \`%d\` is out of range. | +| 0A000 | 12100 | DROP TABLE ... CASCADE is not supported. | ### Connection @@ -70,7 +71,6 @@ SQL error codes provide by standard `SQL State`, `Vendor Code` and `Reason`, whi | 25000 | 14000 | Switch transaction type failed, please terminate the current transaction. | | 25000 | 14001 | Can not find transaction manager of \`%s\`. | | 25000 | 14002 | Transaction timeout should more than 0s. | -| 25000 | 14100 | JDBC does not support operations across multiple logical databases in transaction. | | 25000 | 14200 | Can not start new XA transaction in a active transaction. | | 25000 | 14201 | Failed to create \`%s\` XA data source. | | 25000 | 14202 | Max length of xa unique resource name \`%s\` exceeded: should be less than 45. | diff --git a/docs/document/content/user-manual/shardingsphere-jdbc/_index.cn.md b/docs/document/content/user-manual/shardingsphere-jdbc/_index.cn.md index 9cc8f54921b26..c7d777fb048f9 100644 --- a/docs/document/content/user-manual/shardingsphere-jdbc/_index.cn.md +++ b/docs/document/content/user-manual/shardingsphere-jdbc/_index.cn.md @@ -18,4 +18,4 @@ ShardingSphere-JDBC 提供了 2 种配置方式,用于不同的使用场景。 如果前一个规则是面向数据源聚合的,下一个规则在配置数据源时,则需要使用前一个规则配置的聚合后的逻辑数据源名称; 同理,如果前一个规则是面向表聚合的,下一个规则在配置表时,则需要使用前一个规则配置的聚合后的逻辑表名称。 -更多使用细节请参见[使用示例](https://github.com/apache/shardingsphere/tree/master/examples/shardingsphere-example-generator)。 +更多使用细节请参见[使用示例](https://github.com/apache/shardingsphere/tree/master/examples/shardingsphere-jdbc-example-generator)。 diff --git a/docs/document/content/user-manual/shardingsphere-jdbc/_index.en.md b/docs/document/content/user-manual/shardingsphere-jdbc/_index.en.md index ae71d0115590c..e7d4fc9b10b75 100644 --- a/docs/document/content/user-manual/shardingsphere-jdbc/_index.en.md +++ b/docs/document/content/user-manual/shardingsphere-jdbc/_index.en.md @@ -19,4 +19,4 @@ It should be noted that the superposition between rules are data source and tabl If the previous rule is data source oriented aggregation, the next rule needs to use the aggregated logical data source name configured by the previous rule when configuring the data source; Similarly, if the previous rule is table oriented aggregation, the next rule needs to use the aggregated logical table name configured by the previous rule when configuring the table. -Please refer to [Example](https://github.com/apache/shardingsphere/tree/master/examples/shardingsphere-example-generator) for more details. +Please refer to [Example](https://github.com/apache/shardingsphere/tree/master/examples/shardingsphere-jdbc-example-generator) for more details. diff --git a/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.cn.md b/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.cn.md index 443b945b95403..d3e5fd7e12cb2 100644 --- a/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.cn.md +++ b/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.cn.md @@ -29,6 +29,92 @@ spring.datasource.driver-class-name=org.apache.shardingsphere.driver.ShardingSph spring.datasource.url=jdbc:shardingsphere:classpath:xxx.yaml ``` +`spring.datasource.url` 中的 YAML 配置文件当前支持通过三种方式获取,绝对路径 `absolutepath:`、Apollo 配置中心 `apollo:` 以及 CLASSPATH `classpath:`,具体可参考 `org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider` 的实现。 + ### 使用数据源 直接使用该数据源;或者将 ShardingSphereDataSource 配置在 JPA、Hibernate、MyBatis 等 ORM 框架中配合使用。 + +## 针对 Spring Boot OSS 3 的特殊处理 + +Spring Boot OSS 3 对 Jakarta EE 和 Java 17 进行了 “大爆炸” 升级,涉及大量复杂情况。 + +对于正在使用 Java EE 8 API 及其实现的 ShardingSphere JDBC 而言,如果用户希望在 Spring Boot OSS 3 等基于 Jakarta EE 9+ API 的 Web +Framework 上使用 ShardingSphere JDBC,则需要引入 Java EE 8 的 JAXB 的实现,并指定一个特定的 SnakeYAML 版本。 + +这在 Maven 的 `pom.xml` 体现为如下内容。你也可以使用其他的 JAXB API 的实现。此配置同样适用于其他基于 Jakarta EE 的 Web Framework,如 +Quarkus 3,Micronaut Framework 4 和 Helidon 3。 + +```xml + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.yaml + snakeyaml + 1.33 + + + org.glassfish.jaxb + jaxb-runtime + 2.3.8 + + + +``` + +如果用户是通过 https://start.spring.io/ 创建了 Spring Boot 项目,或者在 `dependencyManagement` 的 XML 标签导入了 +`org.springframework.boot:spring-boot-dependencies` 的 POM 文件,则可通过如下内容来简化配置。 + +```xml + + + 1.33 + + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.glassfish.jaxb + jaxb-runtime + 2.3.8 + + + +``` + +此外,ShardingSphere 的 XA 分布式事务尚未在 Spring Boot OSS 3 上就绪。 + +## 针对低版本的 Spring Boot OSS 2 的特殊处理 + +ShardingSphere 的所有特性均可在 Spring Boot OSS 2 上使用,但低版本的 Spring Boot OSS 可能需要手动指定 SnakeYAML 的版本为 1.33 。 +这在 Maven 的 `pom.xml` 体现为如下内容。 + +```xml + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.yaml + snakeyaml + 1.33 + + + +``` + +如果用户是通过 https://start.spring.io/ 创建了 Spring Boot 项目,或者在 `dependencyManagement` 的 XML 标签导入了 +`org.springframework.boot:spring-boot-dependencies`的 POM 文件,同样可以选择通过配置 `snakeyaml.version` 的 `properties` +来简化内容。 diff --git a/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.en.md b/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.en.md index 2520d84a48f2c..30c796166788e 100644 --- a/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.en.md +++ b/docs/document/content/user-manual/shardingsphere-jdbc/yaml-config/jdbc-driver/spring-boot/_index.en.md @@ -29,6 +29,94 @@ spring.datasource.driver-class-name=org.apache.shardingsphere.driver.ShardingSph spring.datasource.url=jdbc:shardingsphere:classpath:xxx.yaml ``` +The YAML configuration file in 'spring.datasource.url' currently support in three ways, the absolute path 'absolutepath:', Apollo configuration center 'apollo:', and CLASSPATH 'classpath:', which can be referred to `org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider`'s implementation for details. + ### Use Data Source Use this data source directly; or configure ShardingSphereDataSource to be used in conjunction with ORM frameworks such as JPA, Hibernate, and MyBatis. + +## Special handling for Spring Boot OSS 3 + +Spring Boot OSS 3 has made a "big bang" upgrade to Jakarta EE and Java 17, with all complications involved. + +For ShardingSphere JDBC that is using the Java EE 8 API and its implementation, if you want to use ShardingSphere JDBC +on a Jakarta EE 9+ API-based web framework such as Spring Boot OSS 3, you need to introduce a JAXB implementation of +Java EE 8 and specify a specific version of SnakeYAML. + +This is reflected in Maven's `pom.xml` as follows. You can also use other JAXB API implementations. This configuration +also applies to other Jakarta EE-based Web Frameworks, such as Quarkus 3, Micronaut Framework 4 and Helidon 3. + +```xml + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.yaml + snakeyaml + 1.33 + + + org.glassfish.jaxb + jaxb-runtime + 2.3.8 + + + +``` + +If the user created the Spring Boot project from https://start.spring.io/, or the `dependencyManagement` XML tag was +imported POM file for `org.springframework.boot:spring-boot-dependencies`, users can simplify configuration by +following things. + +```xml + + + 1.33 + + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.glassfish.jaxb + jaxb-runtime + 2.3.8 + + + +``` + +In addition, ShardingSphere's XA distributed transactions are not yet ready on Spring Boot OSS 3. + +## Special handling for earlier versions of Spring Boot OSS 2 + +All features of ShardingSphere are available on Spring Boot OSS 2, but earlier versions of Spring Boot OSS may require +manually specifying version 1.33 for SnakeYAML. +This is reflected in Maven's `pom.xml` as follows. + +```xml + + + + org.apache.shardingsphere + shardingsphere-jdbc-core + ${shardingsphere.version} + + + org.yaml + snakeyaml + 1.33 + + + +``` +If the user created the Spring Boot project from https://start.spring.io/, or the `dependencyManagement` XML tag was +imported POM file for `org.springframework.boot:spring-boot-dependencies`, users can also choose to simplify the content +by configuring `properties` for `snakeyaml.version`. diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.cn.md index ab6a3d1bd3f67..5983b73914cb9 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.cn.md @@ -77,7 +77,7 @@ value ::= ALTER ENCRYPT RULE t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id,CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id,CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` @@ -88,4 +88,4 @@ COLUMNS( ### 相关链接 - [保留字](/cn/user-manual/shardingsphere-proxy/distsql/syntax/reserved-word/) -- [加密算法](/cn/user-manual/common-config/builtin-algorithm/encrypt/) \ No newline at end of file +- [加密算法](/cn/user-manual/common-config/builtin-algorithm/encrypt/) diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.en.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.en.md index 10b1eb6efa49c..89f22f3cf7a58 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.en.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/alter-encrypt-rule.en.md @@ -76,7 +76,7 @@ value ::= ALTER ENCRYPT RULE t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id,CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id,CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` @@ -87,4 +87,4 @@ COLUMNS( ### Related links - [Reserved word](/en/user-manual/shardingsphere-proxy/distsql/syntax/reserved-word/) -- [Encryption Algorithm](/en/user-manual/common-config/builtin-algorithm/encrypt/) \ No newline at end of file +- [Encryption Algorithm](/en/user-manual/common-config/builtin-algorithm/encrypt/) diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.cn.md index 579b7d96ef611..837c220105632 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.cn.md @@ -81,12 +81,12 @@ value ::= CREATE ENCRYPT RULE t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )), t_encrypt_2 ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` @@ -96,12 +96,12 @@ COLUMNS( CREATE ENCRYPT RULE IF NOT EXISTS t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )), t_encrypt_2 ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` @@ -112,4 +112,4 @@ COLUMNS( ### 相关链接 - [保留字](/cn/user-manual/shardingsphere-proxy/distsql/syntax/reserved-word/) -- [加密算法](/cn/user-manual/common-config/builtin-algorithm/encrypt/) \ No newline at end of file +- [加密算法](/cn/user-manual/common-config/builtin-algorithm/encrypt/) diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.en.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.en.md index 429cd76cb7d98..4128ed78ea4bb 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.en.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/encrypt/create-encrypt-rule.en.md @@ -81,12 +81,12 @@ value ::= CREATE ENCRYPT RULE t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )), t_encrypt_2 ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` @@ -96,12 +96,12 @@ COLUMNS( CREATE ENCRYPT RULE IF NOT EXISTS t_encrypt ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER =order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )), t_encrypt_2 ( COLUMNS( (NAME=user_id,CIPHER=user_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='AES',PROPERTIES('aes-key-value'='123456abc')))), -(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='MD5'))) +(NAME=order_id, CIPHER=order_cipher,ENCRYPT_ALGORITHM(TYPE(NAME='RC4',PROPERTIES('rc4-key-value'='123456abc')))) )); ``` diff --git a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/sharding/create-sharding-table-rule.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/sharding/create-sharding-table-rule.cn.md index 572a7e6677b40..0afb8a6894613 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/sharding/create-sharding-table-rule.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/distsql/syntax/rdl/rule-definition/sharding/create-sharding-table-rule.cn.md @@ -83,7 +83,7 @@ strategyType ::= - 只能使用自动分片算法,可参考[自动分片算法](/cn/user-manual/common-config/builtin-algorithm/sharding/#自动分片算法)。 - `algorithmType` 为分片算法类型,分片算法类型请参考[分片算法](/cn/user-manual/common-config/builtin-algorithm/sharding/); - 自动生成的算法命名规则为 `tableName` _ `strategyType` _ `algorithmType`; -- 自动生成的主键策略命名规则为 `tableName` _ `strategyType; +- 自动生成的主键策略命名规则为 `tableName` _ `strategyType`; - `KEY_GENERATE_STRATEGY` 用于指定主键生成策略,为可选项,关于主键生成策略可参考[分布式主键](/cn/user-manual/common-config/builtin-algorithm/keygen/); - `AUDIT_STRATEGY` diff --git a/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.cn.md index c0e5f29ef3e0d..973e550be6911 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.cn.md @@ -5,14 +5,13 @@ weight = 2 ## 背景信息 -本节主要介绍如何通过 `GraalVM` 的 `native-image` 组件构建 ShardingSphere-Proxy 的 `Native Image` 和对应的 `Docker Image` -。 +本节主要介绍如何通过 `GraalVM` 的 `native-image` 组件构建 ShardingSphere-Proxy 的 `Native Image` 和对应的 `Docker Image`。 ## 注意事项 -- ShardingSphere Proxy 尚未准备好与 GraalVM Native Image 集成。 - 其在 https://github.com/apache/shardingsphere/pkgs/container/shardingsphere-proxy-native 存在每夜构建。 - 假设存在包含`server.yaml` 的 `conf` 文件夹为 `./custom/conf`,你可通过如下的 `docker-compose.yml` 文件进行测试。 +- ShardingSphere Proxy 尚未准备好与 GraalVM Native Image 集成。 Proxy 的 Native Image 产物在 + https://github.com/apache/shardingsphere/pkgs/container/shardingsphere-proxy-native 存在每夜构建。假设存在包 + 含`server.yaml` 的 `conf` 文件夹为 `./custom/conf`,你可通过如下的 `docker-compose.yml` 文件进行测试。 ```yaml version: "3.8" @@ -30,37 +29,40 @@ services: 应当在 https://github.com/oracle/graalvm-reachability-metadata 打开新的 issue , 并提交包含 ShardingSphere 自身或依赖的第三方库缺失的 GraalVM Reachability Metadata 的 PR。 -- ShardingSphere 的 master 分支尚未准备好处理 Native Image 中的单元测试, - 需要等待 Junit 5 Platform 的集成,你总是需要在构建 GraalVM Native Image 的过程中, +- ShardingSphere 的 master 分支尚未准备好处理 Native Image 中的单元测试 , 你总是需要在构建 GraalVM Native Image 的过程中, 加上特定于 `GraalVM Native Build Tools` 的 `-DskipNativeTests` 或 `-DskipTests` 参数跳过 Native Image 中的单元测试。 -- 如下 3 个算法类由于涉及到 GraalVM Truffle Espresso 不方便在 host JVM 和 guest JVM 之间交互的 `groovy.lang.Closure` - 类,暂未可在 GraalVM Native Image 下使用。 +- 如下的算法类由于涉及到 https://github.com/oracle/graal/issues/5522 , 暂未可在 GraalVM Native Image 下使用。 - `org.apache.shardingsphere.sharding.algorithm.sharding.inline.InlineShardingAlgorithm` - `org.apache.shardingsphere.sharding.algorithm.sharding.inline.ComplexInlineShardingAlgorithm` - `org.apache.shardingsphere.sharding.algorithm.sharding.hint.HintInlineShardingAlgorithm` -- 当前阶段,GraalVM Native Image 形态的 ShardingSphere Proxy 处于混合 AOT ( GraalVM Native Image ) 和 JIT ( GraalVM - Truffle Espresso ) 运行的阶段。由于 https://github.com/oracle/graal/issues/4555 尚未关闭,GraalVM Truffle Espresso - 运行需要的 `.so` 文件并不会进入 GraalVM Native Image 内。因此如果你需要在 Docker Image 外运行 ShardingSphere Proxy - Native 的二进制文件,你需要确保系统环境变量 `JAVA_HOME` 指向 GraalVM 的 `bin` 目录,并且此 GraalVM - 实例已经通过 `GraalVM Updater` 安装了 `espresso` 组件。 +- 当前阶段,GraalVM Native Image 形态的 ShardingSphere Proxy 不支持使用带 Groovy + 语法的 `行表达式`, 这首先导致 `数据分片` 功能的`actualDataNodes`属性只能使用纯列表来配置, 例如 `ds_0.t_order_0, ds_0.t_order_1` + 或 `ds_0.t_user_0, ds_15.t_user_1023`。此问题在 https://github.com/oracle/graal/issues/5522 追踪。 - 本节假定处于 Linux(amd64,aarch64), MacOS(amd64)或 Windows(amd64)环境。 如果你位于 MacOS(aarch64/M1) 环境,你需要关注尚未关闭的 https://github.com/oracle/graal/issues/2666 。 +- `org.apache.shardingsphere:shardingsphere-cluster-mode-repository-etcd` 受 + https://github.com/micronaut-projects/micronaut-gcp/issues/532 影响,不可使用。 + ## 前提条件 -1. 根据 https://www.graalvm.org/downloads/ 要求安装和配置 JDK 17 对应的 `GraalVM CE` 或 `GraalVM EE`。 - 同时可以通过 `SDKMAN!` 安装 JDK 17 对应的 `GraalVM CE`。 +1. 根据 https://www.graalvm.org/downloads/ 要求安装和配置 JDK 17 对应的 `GraalVM Community Edition` 或 `Oracle GraalVM` + 。或者使用 `SDKMAN!` + 。如果你希望使用携带了 [GraalVM Free Terms and Conditions license](https://www.oracle.com/downloads/licenses/graal-free-license.html) + 的 `Oracle GraalVM`,下面的命令应更改为 `sdk install java 17.0.8-graal`。 -2. 通过 `GraalVM Updater` 工具安装 `native-image` 和 `espresso` 组件。 +```shell +sdk install java 17.0.8-graalce +``` -3. 根据 https://www.graalvm.org/22.3/reference-manual/native-image/#prerequisites 的要求安装本地工具链。 +2. 根据 https://www.graalvm.org/latest/reference-manual/native-image/#prerequisites 的要求安装本地工具链。 -4. 如果需要构建 Docker Image, 确保 `docker-ce` 已安装。 +3. 如果需要构建 Docker Image, 确保 `docker-ce` 已安装。 -5. 首先需要在项目的根目录下,执行如下命令以为所有子模块采集 Standard 形态的 GraalVM 可达性元数据。 +4. 首先需要在项目的根目录下,执行如下命令以为所有子模块采集 Standard 形态的 GraalVM 可达性元数据。 ```shell ./mvnw -PgenerateStandardMetadata -DskipNativeTests -B -T1C clean test @@ -94,12 +96,12 @@ services: com.mysql mysql-connector-j - 8.0.32 + 8.1.0 org.apache.shardingsphere shardingsphere-sql-translator-jooq-provider - 5.3.1 + 5.4.0 ``` @@ -142,31 +144,28 @@ services: - 如果你不对 Git Source 做任何更改, 上文提及的命令将使用 `oraclelinux:9-slim` 作为 Base Docker Image。 但如果你希望使用 `busybox:glic`,`gcr.io/distroless/base` 或 `scratch` 等更小体积的 Docker Image 作为 Base Docker - Image,你需要根据 https://www.graalvm.org/22.3/reference-manual/native-image/guides/build-static-executables/ 的要求, + Image,你需要根据 https://www.graalvm.org/latest/reference-manual/native-image/guides/build-static-executables/ 的要求, 做为 `pom.xml`的 `native profile` 添加 `-H:+StaticExecutableWithDynamicLibC` 的 `jvmArgs` 等操作。 另请注意,某些第三方依赖将需要在 `Dockerfile` 安装更多系统库,例如 `libdl`。 - 因此请确保根据你的使用情况调整 `distribution/proxy-native` - 下的 `pom.xml` 和 `Dockerfile` 的内容。 + 因此请确保根据你的使用情况调整 `distribution/proxy-native` 下的 `pom.xml` 和 `Dockerfile` 的内容。 # 可观察性 -- 针对 GraalVM Native Image 形态的 ShardingSphere - Proxy,其提供的可观察性的能力与 https://shardingsphere.apache.org/document/current/cn/user-manual/shardingsphere-proxy/observability/ - 并不一致。 +- 针对 GraalVM Native Image 形态的 ShardingSphere Proxy,其提供的可观察性的能力与 + https://shardingsphere.apache.org/document/current/cn/user-manual/shardingsphere-proxy/observability/ 并不一致。 -- 你可以使用 https://www.graalvm.org/22.3/tools/ 提供的一系列命令行工具或可视化工具观察 GraalVM Native Image - 的内部行为,并根据其要求使用 VSCode 完成调试工作。 - 如果你正在使用 IntelliJ IDEA 并且希望调试生成的 GraalVM Native - Image,你可以关注 https://blog.jetbrains.com/idea/2022/06/intellij-idea-2022-2-eap-5/#Experimental_GraalVM_Native_Debugger_for_Java - 及其后继。如果你使用的不是 Linux,则无法对 GraalVM Native Image 进行 - Debug,请关注尚未关闭的 https://github.com/oracle/graal/issues/5648 。 +- 你可以使用 https://www.graalvm.org/latest/tools/ 提供的一系列命令行工具或可视化工具观察 GraalVM Native Image 的内部行为, + 并根据其要求使用 VSCode 完成调试工作。如果你正在使用 IntelliJ IDEA 并且希望调试生成的 GraalVM Native Image,你可以关注 + https://blog.jetbrains.com/idea/2022/06/intellij-idea-2022-2-eap-5/#Experimental_GraalVM_Native_Debugger_for_Java + 及其后继。如果你使用的不是 Linux,则无法对 GraalVM Native Image 进行 Debug,请关注尚未关闭的 + https://github.com/oracle/graal/issues/5648 。 - 对于使用 `ShardingSphere Agent` 等 APM Java Agent 的情形, GraalVM 的 `native-image` 组件尚未完全支持在构建 Native Image 时使用 javaagent,你需要关注尚未关闭的 https://github.com/oracle/graal/issues/1065。 - 以下部分采用 `Apache SkyWalking Java Agent` 作为示例,可用于跟踪 GraalVM 社区的对应 issue。 -1. 下载 https://archive.apache.org/dist/skywalking/java-agent/8.12.0/apache-skywalking-java-agent-8.12.0.tgz , +1. 下载 https://archive.apache.org/dist/skywalking/java-agent/8.16.0/apache-skywalking-java-agent-8.16.0.tgz , 并解压到 ShardingSphere Git Source 的 `distribution/proxy-native`。 2. 修改 `distribution/proxy-native/pom.xml` 的 `native profile`, diff --git a/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.en.md b/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.en.md index 5d37f04ac1fbb..993d9dcd48263 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.en.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/startup/graalvm-native-image.en.md @@ -10,9 +10,8 @@ corresponding `Docker Image` through the `native-image` component of `GraalVM`. ## Notice -- ShardingSphere Proxy is not yet ready to integrate with GraalVM Native Image. - Fixes documentation for building GraalVM Native Image It exists nightly builds - at https://github.com/apache/shardingsphere/pkgs/container/shardingsphere-proxy-native. +- ShardingSphere Proxy is not yet ready to integrate with GraalVM Native Image. Proxy's Native Image artifacts are + built nightly at https://github.com/apache/shardingsphere/pkgs/container/shardingsphere-proxy-native . Assuming there is a `conf` folder containing `server.yaml` as `./custom/conf`, you can test it with the following `docker-compose.yml` file. @@ -28,46 +27,49 @@ services: - "3307:3307" ```` -- If you find that the build process has missing GraalVM Reachability Metadata, - A new issue should be opened at https://github.com/oracle/graalvm-reachability-metadata, - And submit a PR containing GraalVM Reachability Metadata missing from ShardingSphere itself or dependent third-party - libraries. +- If you find that the build process has missing GraalVM Reachability Metadata, a new issue should be opened + at https://github.com/oracle/graalvm-reachability-metadata, and submit a PR containing GraalVM Reachability Metadata + missing from ShardingSphere itself or dependent third-party libraries. - The master branch of ShardingSphere is not yet ready to handle unit tests in Native Image, - Need to wait for the integration of Junit 5 Platform, you always need to build GraalVM Native Image in the process, + you always need to build GraalVM Native Image in the process, Plus `-DskipNativeTests` or `-DskipTests` parameter specific to `GraalVM Native Build Tools` to skip unit tests in Native Image. -- The following three algorithm classes are not available under GraalVM Native Image because they involve - the `groovy.lang.Closure` class that is inconvenient for GraalVM Truffle Espresso to interact between the host JVM and - the guest JVM. +- The following algorithm classes are not available under GraalVM Native Image due + to https://github.com/oracle/graal/issues/5522 involved. - `org.apache.shardingsphere.sharding.algorithm.sharding.inline.InlineShardingAlgorithm` - `org.apache.shardingsphere.sharding.algorithm.sharding.inline.ComplexInlineShardingAlgorithm` - `org.apache.shardingsphere.sharding.algorithm.sharding.hint.HintInlineShardingAlgorithm` -- At the current stage, ShardingSphere Proxy in GraalVM Native Image is in the stage of mixed AOT ( GraalVM - Native Image ) and JIT ( GraalVM Truffle Espresso ) operation. Since https://github.com/oracle/graal/issues/4555 has - not been closed, the `.so` file required for GraalVM Truffle Espresso to run does not enter the GraalVM Native Image. - So if you need to run ShardingSphere Proxy Native binary files outside the Docker Image, you need to ensure - that the system environment variable `JAVA_HOME` points to the `bin` directory of GraalVM, and this - GraalVM instance already has the `espresso` component installed via the `GraalVM Updater`. +- At this stage, ShardingSphere Proxy in the form of GraalVM Native Image does not support the use + of `Row Value Expressions` with Groovy syntax, which first results in the `actualDataNodes` property of the `Sharding` + feature being only configurable using a pure list, such as `ds_0.t_order_0, ds_0.t_order_1` + or `ds_0.t_user_0, ds_15.t_user_1023`. This issue is tracked in https://github.com/oracle/graal/issues/5522 . - This section assumes a Linux (amd64, aarch64), MacOS (amd64) or Windows (amd64) environment. If you are on MacOS (aarch64/M1) environment, you need to follow https://github.com/oracle/graal/issues/2666 which is not closed yet. +- 'org.apache.shardingsphere:shardingsphere-cluster-mode-repository-etcd' is affected by + https://github.com/micronaut-projects/micronaut-gcp/issues/532 and cannot be used. + ## Premise -1. Install and configure `GraalVM CE` or `GraalVM EE` for JDK 17 according to https://www.graalvm.org/downloads/. - `GraalVM CE` for JDK 17 can also be installed via `SDKMAN!`. +1. Install and configure `GraalVM Community Edition` or `Oracle GraalVM` for JDK 17 according + to https://www.graalvm.org/downloads/. Or use `SDKMAN!`. If you wish to use `Oracle GraalVM` + with [GraalVM Free Terms and Conditions license](https://www.oracle.com/downloads/licenses/graal-free-license.html), + the following command should be changed to `sdk install java 17.0.8-graal`. -2. Install the `native-image` and `espresso` component via the `GraalVM Updater` tool. +```shell +sdk install java 17.0.8-graalce +``` -3. Install the local toolchain as required by https://www.graalvm.org/22.3/reference-manual/native-image/#prerequisites. +2. Install the local toolchain as required by https://www.graalvm.org/latest/reference-manual/native-image/#prerequisites. -4. If you need to build a Docker Image, make sure `docker-ce` is installed. +3. If you need to build a Docker Image, make sure `docker-ce` is installed. -5. First, you need to execute the following command in the root directory of the project to collect the GraalVM +4. First, you need to execute the following command in the root directory of the project to collect the GraalVM Reachability Metadata of the Standard form for all submodules. ```shell @@ -104,12 +106,12 @@ services: com.mysql mysql-connector-j - 8.0.32 + 8.1.0 org.apache.shardingsphere shardingsphere-sql-translator-jooq-provider - 5.3.1 + 5.4.0 ``` @@ -120,12 +122,11 @@ services: ./mvnw -am -pl distribution/proxy-native -B -T1C -Prelease.native -DskipTests clean package ``` -3. To start Native Image through the command line, you need to bring 4 parameters. - The first parameter is the port used by ShardingSphere Proxy, the second parameter is the `/conf` folder - containing `server.yaml` written by you, the third parameter is the Address of the bound port, and the fourth parameter is - Force Start, if it is true, it will ensure that ShardingSphere Proxy Native can start normally no matter whether it - is connected or not. - Assuming the folder `./custom/conf` already exists, the example is +3. To start Native Image through the command line, you need to bring 4 parameters. The first parameter is the `Port` + used by ShardingSphere Proxy, the second parameter is the `/conf` folder containing `server.yaml` written by you, the + third parameter is the `Address` of the bound port, and the fourth parameter is `Force Start`, if it is true, it will + ensure that ShardingSphere Proxy Native can start normally no matter whether it is connected or not. Assuming the + folder `./custom/conf` already exists, the example is ```bash ./apache-shardingsphere-proxy-native 3307 ./custom/conf "0.0.0.0" false @@ -154,26 +155,24 @@ services: ``` - If you don't make any changes to the Git Source, the commands mentioned above will use `oraclelinux:9-slim` as the - Base Docker Image. - But if you want to use a smaller Docker Image like `busybox:glic`, `gcr.io/distroless/base` or `scratch` as the Base - Docker Image, you need according - to https://www.graalvm.org/22.3/reference-manual/native-image/guides/build-static-executables/, - Add operations such as `-H:+StaticExecutableWithDynamicLibC` to `jvmArgs` as the `native profile` of `pom.xml`. - Also note that some 3rd party dependencies will require more system libraries such as `libdl` to be installed in - the `Dockerfile`. - So make sure to tune `distribution/proxy-native` according to your usage - `pom.xml` and `Dockerfile` below. + Base Docker Image. But if you want to use a smaller Docker Image like `busybox:glic`, `gcr.io/distroless/base` or + `scratch` as the Base Docker Image, you need according + to https://www.graalvm.org/latest/reference-manual/native-image/guides/build-static-executables/, + add operations such as `-H:+StaticExecutableWithDynamicLibC` to `jvmArgs` as the `native profile` of `pom.xml`. + Also note that some 3rd-party dependencies will require more system libraries such as `libdl` to be installed in + the `Dockerfile`. So make sure to tune `distribution/proxy-native` according to your usage `pom.xml` and `Dockerfile` + below. # Observability - ShardingSphere for GraalVM Native Image form Proxy, which provides observability capabilities with https://shardingsphere.apache.org/document/current/cn/user-manual/shardingsphere-proxy/observability/ - Not consistent. + not consistent. - You can observe GraalVM Native Image using a series of command line tools or visualization tools available - at https://www.graalvm.org/22.3/tools/, and use VSCode to debug it according to its requirements. - If you are using IntelliJ IDEA and want to debug the generated GraalVM Native Image, You can - follow https://blog.jetbrains.com/idea/2022/06/intellij-idea-2022-2-eap-5/#Experimental_GraalVM_Native_Debugger_for_Java + at https://www.graalvm.org/latest/tools/, and use VSCode to debug it according to its requirements. + If you are using IntelliJ IDEA and want to debug the generated GraalVM Native Image, You can follow + https://blog.jetbrains.com/idea/2022/06/intellij-idea-2022-2-eap-5/#Experimental_GraalVM_Native_Debugger_for_Java and its successors. If you are not using Linux, you cannot debug GraalVM Native Image, please pay attention to https://github.com/oracle/graal/issues/5648 which has not been closed yet. @@ -184,7 +183,7 @@ services: - The following sections use the `Apache SkyWalking Java Agent` as an example, which can be used to track corresponding issues from the GraalVM community. -1. Download https://archive.apache.org/dist/skywalking/java-agent/8.12.0/apache-skywalking-java-agent-8.12.0.tgz and `untar` it +1. Download https://archive.apache.org/dist/skywalking/java-agent/8.16.0/apache-skywalking-java-agent-8.16.0.tgz and `untar` it to `distribution/proxy-native` in ShardingSphere Git Source. 2. Modify the `native profile` of `distribution/proxy-native/pom.xml`, diff --git a/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.cn.md b/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.cn.md index ad5dd02b2683c..79b3227be9124 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.cn.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.cn.md @@ -118,4 +118,4 @@ authority: ## 相关参考 -权限提供者具体实现可以参考 [权限提供者](/cn/dev-manual/proxy)。 +权限提供者具体实现可以参考 [权限提供者](/cn/user-manual/shardingsphere-proxy/yaml-config/authority/)。 diff --git a/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.en.md b/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.en.md index c3226d22a0950..307b119c6513d 100644 --- a/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.en.md +++ b/docs/document/content/user-manual/shardingsphere-proxy/yaml-config/authority.en.md @@ -119,4 +119,4 @@ Explanation: ## Related References -Please refer to [Authority Provider](/en/dev-manual/proxy) for the specific implementation of authority provider. +Please refer to [Authority Provider](/en/user-manual/shardingsphere-proxy/yaml-config/authority/) for the specific implementation of authority provider. diff --git a/examples/README.md b/examples/README.md index 1b40916951ac1..6d1fa83bf5b63 100644 --- a/examples/README.md +++ b/examples/README.md @@ -40,7 +40,7 @@ shardingsphere-example │   ├── example-raw-jdbc │   ├── example-spring-jpa │   └── example-spring-mybatis - ├── shardingsphere-example-generator + ├── shardingsphere-jdbc-example-generator ├── shardingsphere-parser-example ├── shardingsphere-proxy-example │   ├── shardingsphere-proxy-boot-mybatis-example @@ -54,7 +54,7 @@ shardingsphere-example | Example | Description | |------------------------------------------------------------------------------|--------------------------------------------------------------------------------| -| [ShardingSphere-JDBC Examples](shardingsphere-example-generator/README.md) | Generate the examples by configuration and show how to use ShardingSphere-JDBC | +| [ShardingSphere-JDBC Examples](shardingsphere-jdbc-example-generator/README.md) | Generate the examples by configuration and show how to use ShardingSphere-JDBC | | [DistSQL](shardingsphere-proxy-example/shardingsphere-proxy-distsql-example) | show how to use DistSQL in ShardingSphere-Proxy | | APM(Pending) | show how to use APM in ShardingSphere | | proxy(Pending) | show how to use ShardingSphere-Proxy | diff --git a/examples/README_ZH.md b/examples/README_ZH.md index 0589f2529d59a..d77a88f01e949 100644 --- a/examples/README_ZH.md +++ b/examples/README_ZH.md @@ -6,7 +6,7 @@ **注意事项** -- *`shardingsphere-example-generator`模块是一个全新的示例体验模块* +- *`shardingsphere-jdbc-example-generator`模块是一个全新的示例体验模块* - *如果采用手动模式,请在首次运行示例之前执行[初始化脚本](https://github.com/apache/shardingsphere/blob/master/examples/src/resources/manual_schema.sql)。* @@ -41,7 +41,7 @@ shardingsphere-example │   ├── example-raw-jdbc │   ├── example-spring-jpa │   └── example-spring-mybatis - ├── shardingsphere-example-generator + ├── shardingsphere-jdbc-example-generator ├── shardingsphere-parser-example ├── shardingsphere-proxy-example │   ├── shardingsphere-proxy-boot-mybatis-example @@ -55,7 +55,7 @@ shardingsphere-example | 例子 | 描述 | |------------------------------------------------------------------------------|--------------------------------------| -| [ShardingSphere-JDBC示例](shardingsphere-example-generator/README_ZH.md) | 通过配置生成ShardingSphere-JDBC的演示示例 | +| [ShardingSphere-JDBC示例](shardingsphere-jdbc-example-generator/README.md) | 通过配置生成ShardingSphere-JDBC的演示示例 | | [DistSQL](shardingsphere-proxy-example/shardingsphere-proxy-distsql-example) | 演示在 ShardingSphere-Proxy 中使用 DistSQL | | APM 监控(Pending) | 演示在 ShardingSphere 中使用 APM 监控 | | proxy(Pending) | 演示使用 ShardingSphere-Proxy | diff --git a/examples/example-core/config-utility/pom.xml b/examples/example-core/config-utility/pom.xml deleted file mode 100644 index 3d71b69a1f356..0000000000000 --- a/examples/example-core/config-utility/pom.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - example-core - ${revision} - - config-utility - ${project.artifactId} - - - - org.apache.shardingsphere - shardingsphere-sharding-core - - - org.apache.shardingsphere - shardingsphere-encrypt-core - - - diff --git a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingDatabaseAlgorithm.java b/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingDatabaseAlgorithm.java deleted file mode 100644 index 8cf45e1cfe15a..0000000000000 --- a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingDatabaseAlgorithm.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.algorithm; - -import com.google.common.collect.Range; -import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.Set; - -public final class StandardModuloShardingDatabaseAlgorithm implements StandardShardingAlgorithm { - - @Override - public String doSharding(final Collection databaseNames, final PreciseShardingValue shardingValue) { - for (String each : databaseNames) { - if (each.endsWith(String.valueOf(shardingValue.getValue() % 2))) { - return each; - } - } - throw new UnsupportedOperationException(""); - } - - @Override - public Collection doSharding(final Collection databaseNames, final RangeShardingValue shardingValueRange) { - Set result = new LinkedHashSet<>(); - if (Range.closed(1, 5).encloses(shardingValueRange.getValueRange())) { - for (String each : databaseNames) { - if (each.endsWith("0")) { - result.add(each); - } - } - } else if (Range.closed(6, 10).encloses(shardingValueRange.getValueRange())) { - for (String each : databaseNames) { - if (each.endsWith("1")) { - result.add(each); - } - } - } else if (Range.closed(1, 10).encloses(shardingValueRange.getValueRange())) { - result.addAll(databaseNames); - } else { - throw new UnsupportedOperationException(""); - } - return result; - } - - @Override - public String getType() { - return "STANDARD_TEST_DB"; - } -} diff --git a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingTableAlgorithm.java b/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingTableAlgorithm.java deleted file mode 100644 index ee99a459bd187..0000000000000 --- a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/algorithm/StandardModuloShardingTableAlgorithm.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.algorithm; - -import com.google.common.collect.Range; -import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; - -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.Set; - -public final class StandardModuloShardingTableAlgorithm implements StandardShardingAlgorithm { - - @Override - public String doSharding(final Collection tableNames, final PreciseShardingValue shardingValue) { - for (String each : tableNames) { - if (each.endsWith(String.valueOf(shardingValue.getValue() % 2))) { - return each; - } - } - throw new UnsupportedOperationException(""); - } - - @Override - public Collection doSharding(final Collection tableNames, final RangeShardingValue shardingValue) { - Set result = new LinkedHashSet<>(); - if (Range.closed(200000000000000000L, 400000000000000000L).encloses(shardingValue.getValueRange())) { - for (String each : tableNames) { - if (each.endsWith("0")) { - result.add(each); - } - } - } else { - throw new UnsupportedOperationException(""); - } - return result; - } - - @Override - public String getType() { - return "STANDARD_TEST_TBL"; - } -} diff --git a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/fixture/TestQueryAssistedShardingEncryptAlgorithm.java b/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/fixture/TestQueryAssistedShardingEncryptAlgorithm.java deleted file mode 100644 index 0ddab4cd49fd3..0000000000000 --- a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/fixture/TestQueryAssistedShardingEncryptAlgorithm.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.fixture; - -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; -import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; - -public final class TestQueryAssistedShardingEncryptAlgorithm implements AssistedEncryptAlgorithm { - - @Override - public String encrypt(final Object plainValue, final EncryptContext encryptContext) { - return "assistedEncryptValue"; - } - - @Override - public String getType() { - return "assistedTest"; - } -} diff --git a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/ShardingType.java b/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/ShardingType.java deleted file mode 100644 index ac96eb5087087..0000000000000 --- a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/ShardingType.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.type; - -public enum ShardingType { - - SHARDING_DATABASES, - - SHARDING_TABLES, - - SHARDING_DATABASES_AND_TABLES, - - SHARDING_SHADOW_DATABASES, - - ENCRYPT_SHADOW, - - READWRITE_SPLITTING, - - READWRITE_SPLITTING_SHADOW, - - SHARDING_READWRITE_SPLITTING, - - ENCRYPT, - - SHADOW, - - SHADOW_DEFAULT_ALGORITHM, - - SHARDING_AUTO_TABLES, - - SHARDING_HINT_DATABASES_ONLY, - - SHARDING_HINT_DATABASES_TABLES, - - READWRITE_SPLITTING_HINT, - - SHARDING_DATABASES_INTERVAL -} diff --git a/examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm b/examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm deleted file mode 100644 index 82f134be5bdfc..0000000000000 --- a/examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm +++ /dev/null @@ -1,19 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.example.algorithm.StandardModuloShardingTableAlgorithm -org.apache.shardingsphere.example.algorithm.StandardModuloShardingDatabaseAlgorithm diff --git a/examples/example-core/example-api/pom.xml b/examples/example-core/example-api/pom.xml deleted file mode 100644 index 94ed6c54b4aef..0000000000000 --- a/examples/example-core/example-api/pom.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - example-core - ${revision} - - example-api - ${project.artifactId} - diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/DataSourceUtils.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/DataSourceUtils.java deleted file mode 100644 index d1f5329a2634a..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/DataSourceUtils.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api; - -import com.zaxxer.hikari.HikariDataSource; - -import javax.sql.DataSource; - -public final class DataSourceUtils { - - private static final String HOST = "localhost"; - - private static final int PORT = 3306; - - private static final String USER_NAME = "root"; - - private static final String PASSWORD = ""; - - public static DataSource createDataSource(final String dataSourceName) { - HikariDataSource result = new HikariDataSource(); - result.setDriverClassName("com.mysql.jdbc.Driver"); - result.setJdbcUrl(String.format("jdbc:mysql://%s:%s/%s?serverTimezone=UTC&useSSL=false&useUnicode=true&characterEncoding=UTF-8", HOST, PORT, dataSourceName)); - result.setUsername(USER_NAME); - result.setPassword(PASSWORD); - return result; - } -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/ExampleExecuteTemplate.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/ExampleExecuteTemplate.java deleted file mode 100644 index 93efe772cd1a5..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/ExampleExecuteTemplate.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api; - -import org.apache.shardingsphere.example.core.api.service.ExampleService; - -import java.sql.SQLException; - -public final class ExampleExecuteTemplate { - - public static void run(final ExampleService exampleService) throws SQLException { - try { - exampleService.initEnvironment(); - exampleService.processSuccess(); - } finally { - exampleService.cleanEnvironment(); - } - } - - public static void runFailure(final ExampleService exampleService) throws SQLException { - try { - exampleService.initEnvironment(); - exampleService.processFailure(); - } finally { - exampleService.cleanEnvironment(); - } - } -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/ShadowUser.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/ShadowUser.java deleted file mode 100644 index 9ef34bc091e0e..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/ShadowUser.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.entity; - -import java.io.Serializable; - -public class ShadowUser implements Serializable { - - private static final long serialVersionUID = -6711618386636677067L; - - private int userId; - - private int userType; - - private String username; - - private String usernamePlain; - - private String pwd; - - private String assistedQueryPwd; - - public int getUserId() { - return userId; - } - - public void setUserId(int userId) { - this.userId = userId; - } - - public int getUserType() { - return userType; - } - - public void setUserType(int userType) { - this.userType = userType; - } - - public String getUsername() { - return username; - } - - public void setUsername(String username) { - this.username = username; - } - - public String getUsernamePlain() { - return usernamePlain; - } - - public void setUsernamePlain(String usernamePlain) { - this.usernamePlain = usernamePlain; - } - - public String getPwd() { - return pwd; - } - - public void setPwd(String pwd) { - this.pwd = pwd; - } - - public String getAssistedQueryPwd() { - return assistedQueryPwd; - } - - public void setAssistedQueryPwd(String assistedQueryPwd) { - this.assistedQueryPwd = assistedQueryPwd; - } - - @Override - public String toString() { - return String.format("user_id: %d, user_type: %d, username: %s, username_plain: %s, pwd: %s, assisted_query_pwd: %s", userId, userType, username, usernamePlain, pwd, - assistedQueryPwd); - } -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AccountRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AccountRepository.java deleted file mode 100644 index 30964911f58f0..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AccountRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.Account; - -public interface AccountRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AddressRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AddressRepository.java deleted file mode 100644 index 2f7edcc7d9506..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/AddressRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.Address; - -public interface AddressRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/CommonRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/CommonRepository.java deleted file mode 100644 index 1bb4bff47a470..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/CommonRepository.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import java.sql.SQLException; -import java.util.List; - -public interface CommonRepository { - - /** - * Create table if not exist. - * - * @throws SQLException SQL exception - */ - void createTableIfNotExists() throws SQLException; - - /** - * Drop table. - * - * @throws SQLException SQL exception - */ - void dropTable() throws SQLException; - - /** - * Truncate table. - * - * @throws SQLException SQL exception - */ - void truncateTable() throws SQLException; - - /** - * insert data. - * - * @param entity entity - * @return generated primary key - * @throws SQLException SQL exception - */ - P insert(T entity) throws SQLException; - - /** - * Delete data. - * - * @param primaryKey primaryKey - * @throws SQLException SQL exception - */ - void delete(P primaryKey) throws SQLException; - - /** - * Select all data. - * - * @return all data - * @throws SQLException SQL exception - */ - List selectAll() throws SQLException; -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderItemRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderItemRepository.java deleted file mode 100644 index f84c6eab669c8..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderItemRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.OrderItem; - -public interface OrderItemRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderRepository.java deleted file mode 100644 index 255cf73c7b50e..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.Order; - -public interface OrderRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderStatisticsInfoRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderStatisticsInfoRepository.java deleted file mode 100644 index 7a044e72d92b8..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/OrderStatisticsInfoRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.OrderStatisticsInfo; - -public interface OrderStatisticsInfoRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/ShadowUserRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/ShadowUserRepository.java deleted file mode 100644 index 926801a80c156..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/ShadowUserRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; - -public interface ShadowUserRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/UserRepository.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/UserRepository.java deleted file mode 100644 index dc762f6fc740d..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/repository/UserRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.repository; - -import org.apache.shardingsphere.example.core.api.entity.User; - -public interface UserRepository extends CommonRepository { -} diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/service/ExampleService.java b/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/service/ExampleService.java deleted file mode 100644 index b876523c864a2..0000000000000 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/service/ExampleService.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.api.service; - -import java.sql.SQLException; - -public interface ExampleService { - - /** - * Initialize environment. - * - * @throws SQLException SQL exception - */ - void initEnvironment() throws SQLException; - - /** - * Clean environment. - * - * @throws SQLException SQL exception - */ - void cleanEnvironment() throws SQLException; - - /** - * Process success. - * - * @throws SQLException SQL exception - */ - void processSuccess() throws SQLException; - - /** - * Process failure. - * - * @throws SQLException SQL exception - */ - void processFailure() throws SQLException; - - /** - * Print data. - * - * @throws SQLException SQL exception - */ - void printData() throws SQLException; -} diff --git a/examples/example-core/example-raw-jdbc/pom.xml b/examples/example-core/example-raw-jdbc/pom.xml deleted file mode 100644 index 4c5b287c84854..0000000000000 --- a/examples/example-core/example-raw-jdbc/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - example-core - ${revision} - - example-raw-jdbc - ${project.artifactId} - - - - org.apache.shardingsphere.example - config-utility - ${project.parent.version} - - - org.apache.shardingsphere.example - example-api - ${project.parent.version} - - - diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AccountRepositoryImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AccountRepositoryImpl.java deleted file mode 100644 index 563741f442700..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AccountRepositoryImpl.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.repository; - -import org.apache.shardingsphere.example.core.api.entity.Account; -import org.apache.shardingsphere.example.core.api.repository.AccountRepository; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.LinkedList; -import java.util.List; - -public class AccountRepositoryImpl implements AccountRepository { - - private final DataSource dataSource; - - public AccountRepositoryImpl(final DataSource dataSource) { - this.dataSource = dataSource; - } - - @Override - public void createTableIfNotExists() throws SQLException { - String sql = "CREATE TABLE IF NOT EXISTS t_account (account_id BIGINT NOT NULL AUTO_INCREMENT, user_id INT NOT NULL, status VARCHAR(50), PRIMARY KEY (account_id))"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void dropTable() throws SQLException { - String sql = "DROP TABLE t_account"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void truncateTable() throws SQLException { - String sql = "TRUNCATE TABLE t_account"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public Long insert(final Account account) throws SQLException { - String sql = "INSERT INTO t_account (user_id, status) VALUES (?, ?)"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) { - preparedStatement.setInt(1, account.getUserId()); - preparedStatement.setString(2, account.getStatus()); - preparedStatement.executeUpdate(); - try (ResultSet resultSet = preparedStatement.getGeneratedKeys()) { - if (resultSet.next()) { - account.setAccountId(resultSet.getLong(1)); - } - } - } - return account.getAccountId(); - } - - @Override - public void delete(final Long accountId) throws SQLException { - String sql = "DELETE FROM t_account WHERE account_id=?"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setLong(1, accountId); - preparedStatement.executeUpdate(); - } - } - - @Override - public List selectAll() throws SQLException { - String sql = "SELECT * FROM t_account"; - return getAccounts(sql); - } - - protected List getAccounts(final String sql) throws SQLException { - List result = new LinkedList<>(); - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql); - ResultSet resultSet = preparedStatement.executeQuery()) { - while (resultSet.next()) { - Account account = new Account(); - account.setAccountId(resultSet.getLong(1)); - account.setUserId(resultSet.getInt(2)); - account.setStatus(resultSet.getString(3)); - result.add(account); - } - } - return result; - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderStatisticsInfoRepositoryImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderStatisticsInfoRepositoryImpl.java deleted file mode 100644 index 72797ce657323..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderStatisticsInfoRepositoryImpl.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.repository; - -import org.apache.shardingsphere.example.core.api.entity.OrderStatisticsInfo; -import org.apache.shardingsphere.example.core.api.repository.OrderStatisticsInfoRepository; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.Date; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.LinkedList; -import java.util.List; - -public class OrderStatisticsInfoRepositoryImpl implements OrderStatisticsInfoRepository { - - private final DataSource dataSource; - - public OrderStatisticsInfoRepositoryImpl(final DataSource dataSource) { - this.dataSource = dataSource; - } - - @Override - public void createTableIfNotExists() throws SQLException { - String sql = "CREATE TABLE IF NOT EXISTS order_statistics_info (id BIGINT NOT NULL AUTO_INCREMENT, user_id BIGINT NOT NULL, order_date DATE NOT NULL, order_num INT, PRIMARY KEY (id))"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void dropTable() throws SQLException { - String sql = "DROP TABLE order_statistics_info"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void truncateTable() throws SQLException { - String sql = "TRUNCATE TABLE order_statistics_info"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public Long insert(final OrderStatisticsInfo orderStatisticsInfo) throws SQLException { - String sql = "INSERT INTO order_statistics_info (user_id, order_date, order_num) VALUES (?, ?, ?)"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)) { - preparedStatement.setLong(1, orderStatisticsInfo.getUserId()); - preparedStatement.setDate(2, Date.valueOf(orderStatisticsInfo.getOrderDate())); - preparedStatement.setInt(3, orderStatisticsInfo.getOrderNum()); - preparedStatement.executeUpdate(); - try (ResultSet resultSet = preparedStatement.getGeneratedKeys()) { - if (resultSet.next()) { - orderStatisticsInfo.setId(resultSet.getLong(1)); - } - } - } - return orderStatisticsInfo.getId(); - } - - @Override - public void delete(final Long id) throws SQLException { - String sql = "DELETE FROM order_statistics_info WHERE id=?"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setLong(1, id); - preparedStatement.executeUpdate(); - } - } - - @Override - public List selectAll() throws SQLException { - String sql = "SELECT * FROM order_statistics_info"; - return getOrderStatisticsInfos(sql); - } - - protected List getOrderStatisticsInfos(final String sql) throws SQLException { - List result = new LinkedList<>(); - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql); - ResultSet resultSet = preparedStatement.executeQuery()) { - while (resultSet.next()) { - OrderStatisticsInfo orderStatisticsInfo = new OrderStatisticsInfo(); - orderStatisticsInfo.setId(resultSet.getLong(1)); - orderStatisticsInfo.setUserId(resultSet.getLong(2)); - orderStatisticsInfo.setOrderDate(resultSet.getDate(3).toLocalDate()); - orderStatisticsInfo.setOrderNum(resultSet.getInt(4)); - result.add(orderStatisticsInfo); - } - } - return result; - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/ShadowUserRepositoryImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/ShadowUserRepositoryImpl.java deleted file mode 100644 index 996b03b6786f5..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/ShadowUserRepositoryImpl.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.repository; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.LinkedList; -import java.util.List; - -public final class ShadowUserRepositoryImpl implements ShadowUserRepository { - - private static final String SQL_NOTE = "/*shadow:true,foo:bar*/"; - - private final DataSource dataSource; - - public ShadowUserRepositoryImpl(final DataSource dataSource) { - this.dataSource = dataSource; - } - - @Override - public void createTableIfNotExists() throws SQLException { - String sql = "CREATE TABLE IF NOT EXISTS t_user (user_id INT NOT NULL AUTO_INCREMENT, user_type INT(11), username VARCHAR(200), pwd VARCHAR(200), PRIMARY KEY (user_id))"; - createTableIfNotExistsShadow(sql); - createTableIfNotExistsNative(sql); - } - - private void createTableIfNotExistsNative(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - private void createTableIfNotExistsShadow(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql + SQL_NOTE); - } - } - - @Override - public void dropTable() throws SQLException { - String sql = "DROP TABLE IF EXISTS t_user;"; - dropTableShadow(sql); - dropTableNative(sql); - } - - private void dropTableNative(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - private void dropTableShadow(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql + SQL_NOTE); - } - } - - @Override - public void truncateTable() throws SQLException { - String sql = "TRUNCATE TABLE t_user"; - truncateTableShadow(sql); - truncateTableNative(sql); - } - - private void truncateTableNative(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - private void truncateTableShadow(final String sql) throws SQLException { - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql + SQL_NOTE); - } - } - - @Override - public Long insert(final ShadowUser entity) throws SQLException { - String sql = "INSERT INTO t_user (user_id, user_type, username, pwd) VALUES (?, ?, ?, ?)"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setInt(1, entity.getUserId()); - preparedStatement.setInt(2, entity.getUserType()); - preparedStatement.setString(3, entity.getUsername()); - preparedStatement.setString(4, entity.getPwd()); - preparedStatement.executeUpdate(); - } - return (long) entity.getUserId(); - } - - @Override - public void delete(final Long id) throws SQLException { - String sql = "DELETE FROM t_user WHERE user_id = ? AND user_type = ?"; - deleteUser(sql, id, (int) (id % 2)); - deleteUser(sql, id, (int) (id % 2)); - } - - private void deleteUser(final String sql, final Long id, final int userType) throws SQLException { - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setLong(1, id); - preparedStatement.setInt(2, userType); - preparedStatement.executeUpdate(); - } - } - - @Override - public List selectAll() throws SQLException { - String sql = "SELECT * FROM t_user where user_type = ?"; - List users = getUsers(sql, 1); - users.addAll(getUsers(sql, 0)); - return users; - } - - private List getUsers(final String sql, final int userType) throws SQLException { - List result = new LinkedList<>(); - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setInt(1, userType); - ResultSet resultSet = preparedStatement.executeQuery(); - while (resultSet.next()) { - ShadowUser user = new ShadowUser(); - user.setUserId(resultSet.getInt("user_id")); - user.setUserType(resultSet.getInt("user_type")); - user.setUsername(resultSet.getString("username")); - user.setPwd(resultSet.getString("pwd")); - result.add(user); - } - } - return result; - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/UserRepositoryImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/UserRepositoryImpl.java deleted file mode 100644 index 52d6eb31ce6f0..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/UserRepositoryImpl.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.repository; - -import org.apache.shardingsphere.example.core.api.entity.User; -import org.apache.shardingsphere.example.core.api.repository.UserRepository; - -import javax.sql.DataSource; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.LinkedList; -import java.util.List; - -public final class UserRepositoryImpl implements UserRepository { - - private final DataSource dataSource; - - public UserRepositoryImpl(final DataSource dataSource) { - this.dataSource = dataSource; - } - - @Override - public void createTableIfNotExists() throws SQLException { - String sql = "CREATE TABLE IF NOT EXISTS t_user " - + "(user_id INT NOT NULL AUTO_INCREMENT, username VARCHAR(200), pwd VARCHAR(200), PRIMARY KEY (user_id))"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void dropTable() throws SQLException { - String sql = "DROP TABLE t_user"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public void truncateTable() throws SQLException { - String sql = "TRUNCATE TABLE t_user"; - try (Connection connection = dataSource.getConnection(); - Statement statement = connection.createStatement()) { - statement.executeUpdate(sql); - } - } - - @Override - public Long insert(final User entity) throws SQLException { - String sql = "INSERT INTO t_user (user_id, username, pwd) VALUES (?, ?, ?)"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setInt(1, entity.getUserId()); - preparedStatement.setString(2, entity.getUsername()); - preparedStatement.setString(3, entity.getPwd()); - preparedStatement.executeUpdate(); - } - return (long) entity.getUserId(); - } - - @Override - public void delete(final Long id) throws SQLException { - String sql = "DELETE FROM t_user WHERE user_id=?"; - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql)) { - preparedStatement.setLong(1, id); - preparedStatement.executeUpdate(); - } - } - - @Override - public List selectAll() throws SQLException { - String sql = "SELECT * FROM t_user"; - return getUsers(sql); - } - - private List getUsers(final String sql) throws SQLException { - List result = new LinkedList<>(); - try (Connection connection = dataSource.getConnection(); - PreparedStatement preparedStatement = connection.prepareStatement(sql); - ResultSet resultSet = preparedStatement.executeQuery()) { - while (resultSet.next()) { - User user = new User(); - user.setUserId(resultSet.getInt("user_id")); - user.setUsername(resultSet.getString("username")); - user.setPwd(resultSet.getString("pwd")); - result.add(user); - } - } - return result; - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/AccountServiceImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/AccountServiceImpl.java deleted file mode 100644 index b862737f988be..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/AccountServiceImpl.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.service; - -import org.apache.shardingsphere.example.core.api.entity.Account; -import org.apache.shardingsphere.example.core.api.repository.AccountRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jdbc.repository.AccountRepositoryImpl; - -import javax.sql.DataSource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -public final class AccountServiceImpl implements ExampleService { - - private final AccountRepository accountRepository; - - public AccountServiceImpl(final DataSource dataSource) { - accountRepository = new AccountRepositoryImpl(dataSource); - } - - public AccountServiceImpl(final AccountRepository accountRepository) { - this.accountRepository = accountRepository; - } - - @Override - public void initEnvironment() throws SQLException { - accountRepository.createTableIfNotExists(); - accountRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - accountRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List accountIds = insertData(); - printData(); - deleteData(accountIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - Account account = insertAccounts(i); - result.add(account.getAccountId()); - } - return result; - } - - private Account insertAccounts(final int i) throws SQLException { - Account account = new Account(); - account.setUserId(i); - account.setStatus("INSERT_TEST"); - accountRepository.insert(account); - return account; - } - - private void deleteData(final List accountIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : accountIds) { - accountRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print Account Data -----------------------"); - for (Object each : accountRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderStatisticsInfoServiceImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderStatisticsInfoServiceImpl.java deleted file mode 100644 index 0d95739166d0c..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderStatisticsInfoServiceImpl.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.service; - -import org.apache.shardingsphere.example.core.api.entity.OrderStatisticsInfo; -import org.apache.shardingsphere.example.core.api.repository.OrderStatisticsInfoRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jdbc.repository.OrderStatisticsInfoRepositoryImpl; - -import javax.sql.DataSource; -import java.sql.SQLException; -import java.time.LocalDate; -import java.util.ArrayList; -import java.util.Collection; - -public final class OrderStatisticsInfoServiceImpl implements ExampleService { - - private final OrderStatisticsInfoRepository orderStatisticsInfoRepository; - - public OrderStatisticsInfoServiceImpl(final DataSource dataSource) { - orderStatisticsInfoRepository = new OrderStatisticsInfoRepositoryImpl(dataSource); - } - - @Override - public void initEnvironment() throws SQLException { - orderStatisticsInfoRepository.createTableIfNotExists(); - orderStatisticsInfoRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - orderStatisticsInfoRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - Collection ids = insertData(); - printData(); - deleteData(ids); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private Collection insertData() throws SQLException { - System.out.println("------------------- Insert Data --------------------"); - Collection result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - OrderStatisticsInfo orderStatisticsInfo = insertOrderStatisticsInfo(i); - result.add(orderStatisticsInfo.getId()); - } - return result; - } - - private OrderStatisticsInfo insertOrderStatisticsInfo(final int i) throws SQLException { - OrderStatisticsInfo result = new OrderStatisticsInfo(); - result.setUserId((long) i); - if (0 == i % 2) { - result.setOrderDate(LocalDate.now().plusYears(-1)); - } else { - result.setOrderDate(LocalDate.now()); - } - result.setOrderNum(i * 10); - orderStatisticsInfoRepository.insert(result); - return result; - } - - private void deleteData(final Collection ids) throws SQLException { - System.out.println("-------------------- Delete Data -------------------"); - for (Long each : ids) { - orderStatisticsInfoRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------- Print Order Data ------------------"); - for (Object each : orderStatisticsInfoRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/ShadowUserServiceImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/ShadowUserServiceImpl.java deleted file mode 100644 index b1059ddab9d49..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/ShadowUserServiceImpl.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.service; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -public final class ShadowUserServiceImpl implements ExampleService { - - private final ShadowUserRepository userRepository; - - public ShadowUserServiceImpl(final ShadowUserRepository userRepository) { - this.userRepository = userRepository; - } - - @Override - public void initEnvironment() throws SQLException { - userRepository.createTableIfNotExists(); - userRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - userRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - ShadowUser user = new ShadowUser(); - user.setUserId(i); - user.setUserType(i % 2); - user.setUsername("test_" + i); - user.setPwd("pwd" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/UserServiceImpl.java b/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/UserServiceImpl.java deleted file mode 100644 index beb485543b57e..0000000000000 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/UserServiceImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jdbc.service; - -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.api.entity.User; -import org.apache.shardingsphere.example.core.api.repository.UserRepository; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -public final class UserServiceImpl implements ExampleService { - - private final UserRepository userRepository; - - public UserServiceImpl(final UserRepository userRepository) { - this.userRepository = userRepository; - } - - @Override - public void initEnvironment() throws SQLException { - userRepository.createTableIfNotExists(); - userRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - userRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - User user = new User(); - user.setUserId(i); - user.setUsername("test_" + i); - user.setPwd("pwd" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-jpa/pom.xml b/examples/example-core/example-spring-jpa/pom.xml deleted file mode 100644 index aefcd4167db55..0000000000000 --- a/examples/example-core/example-spring-jpa/pom.xml +++ /dev/null @@ -1,64 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - example-core - ${revision} - - example-spring-jpa - ${project.artifactId} - - - - org.apache.shardingsphere.example - config-utility - ${project.parent.version} - - - org.apache.shardingsphere.example - example-api - ${project.parent.version} - - - - org.hibernate.javax.persistence - hibernate-jpa-2.1-api - - - org.hibernate - hibernate-core - - - org.hibernate - hibernate-entitymanager - - - org.springframework - spring-orm - - - org.springframework - spring-context-support - - - diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AccountEntity.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AccountEntity.java deleted file mode 100644 index 80d88f5becf7b..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AccountEntity.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.entity; - -import org.apache.shardingsphere.example.core.api.entity.Account; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_account") -public final class AccountEntity extends Account { - - private static final long serialVersionUID = -4004643361026020655L; - - @Id - @Column(name = "account_id") - @GeneratedValue(strategy = GenerationType.IDENTITY) - @Override - public long getAccountId() { - return super.getAccountId(); - } - - @Column(name = "user_id") - @Override - public int getUserId() { - return super.getUserId(); - } - - @Column(name = "status") - @Override - public String getStatus() { - return super.getStatus(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderEntity.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderEntity.java deleted file mode 100644 index c0c050c6ac93e..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderEntity.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.entity; - -import org.apache.shardingsphere.example.core.api.entity.Order; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_order") -public final class OrderEntity extends Order { - - private static final long serialVersionUID = 4743102234543827854L; - - @Id - @Column(name = "order_id") - @GeneratedValue(strategy = GenerationType.IDENTITY) - @Override - public long getOrderId() { - return super.getOrderId(); - } - - @Column(name = "user_id") - @Override - public int getUserId() { - return super.getUserId(); - } - - @Column(name = "address_id") - @Override - public long getAddressId() { - return super.getAddressId(); - } - - @Column(name = "status") - @Override - public String getStatus() { - return super.getStatus(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderItemEntity.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderItemEntity.java deleted file mode 100644 index a2125e2e381af..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/OrderItemEntity.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.entity; - -import org.apache.shardingsphere.example.core.api.entity.OrderItem; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_order_item") -public final class OrderItemEntity extends OrderItem { - - private static final long serialVersionUID = 5685474394188443341L; - - @Id - @Column(name = "order_item_id") - @GeneratedValue(strategy = GenerationType.IDENTITY) - @Override - public long getOrderItemId() { - return super.getOrderItemId(); - } - - @Column(name = "order_id") - @Override - public long getOrderId() { - return super.getOrderId(); - } - - @Column(name = "user_id") - @Override - public int getUserId() { - return super.getUserId(); - } - - @Column(name = "status") - @Override - public String getStatus() { - return super.getStatus(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/ShadowUserEntity.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/ShadowUserEntity.java deleted file mode 100644 index 412db3f82a663..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/ShadowUserEntity.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.entity; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_user") -public final class ShadowUserEntity extends ShadowUser { - - private static final long serialVersionUID = -3708998745561667721L; - - @Id - @Column(name = "user_id") - @Override - public int getUserId() { - return super.getUserId(); - } - - @Column(name = "user_type") - @Override - public int getUserType() { - return super.getUserType(); - } - - @Column(name = "username") - @Override - public String getUsername() { - return super.getUsername(); - } - - @Column(name = "pwd") - @Override - public String getPwd() { - return super.getPwd(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/UserEntity.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/UserEntity.java deleted file mode 100644 index 66cc70b2e20f6..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/UserEntity.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.entity; - -import org.apache.shardingsphere.example.core.api.entity.User; - -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_user") -public final class UserEntity extends User { - - private static final long serialVersionUID = -3708998745561667721L; - - @Id - @Column(name = "user_id") - @Override - public int getUserId() { - return super.getUserId(); - } - - @Column(name = "username") - @Override - public String getUsername() { - return super.getUsername(); - } - - @Column(name = "pwd") - @Override - public String getPwd() { - return super.getPwd(); - } - -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AccountRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AccountRepositoryImpl.java deleted file mode 100644 index c5445f35e5bf2..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AccountRepositoryImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.entity.Account; -import org.apache.shardingsphere.example.core.api.repository.AccountRepository; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.List; - -@Repository -@Transactional -public class AccountRepositoryImpl implements AccountRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public Long insert(final Account account) { - entityManager.persist(account); - return account.getAccountId(); - } - - @Override - public void delete(final Long accountId) { - Query query = entityManager.createQuery("DELETE FROM AccountEntity o WHERE o.accountId = ?1"); - query.setParameter(1, accountId); - query.executeUpdate(); - } - - @SuppressWarnings("unchecked") - @Override - public List selectAll() { - return (List) entityManager.createQuery("SELECT o FROM AccountEntity o").getResultList(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AddressRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AddressRepositoryImpl.java deleted file mode 100644 index dbaa7b7ef22b9..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/AddressRepositoryImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.entity.Address; -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.List; - -@Repository -@Transactional -public class AddressRepositoryImpl implements AddressRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public Long insert(final Address entity) { - entityManager.persist(entity); - return entity.getAddressId(); - } - - @Override - public void delete(final Long addressCode) { - Query query = entityManager.createQuery("DELETE FROM AddressEntity i WHERE i.addressId = ?1"); - query.setParameter(1, addressCode); - query.executeUpdate(); - } - - @Override - @SuppressWarnings("unchecked") - public List
selectAll() { - return (List
) entityManager.createQuery("SELECT i FROM AddressEntity i").getResultList(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderItemRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderItemRepositoryImpl.java deleted file mode 100644 index e091d7e0fbea0..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderItemRepositoryImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; -import org.apache.shardingsphere.example.core.api.entity.OrderItem; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.List; - -@Repository -@Transactional -public class OrderItemRepositoryImpl implements OrderItemRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public Long insert(final OrderItem orderItem) { - entityManager.persist(orderItem); - return orderItem.getOrderItemId(); - } - - @Override - public void delete(final Long orderItemId) { - Query query = entityManager.createQuery("DELETE FROM OrderItemEntity i WHERE i.orderItemId = ?1 AND i.userId = 51"); - query.setParameter(1, orderItemId); - query.executeUpdate(); - } - - @Override - @SuppressWarnings("unchecked") - public List selectAll() { - return (List) entityManager.createQuery("SELECT o from OrderItemEntity o").getResultList(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderRepositoryImpl.java deleted file mode 100644 index 5621c3fe0fc70..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/OrderRepositoryImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; -import org.apache.shardingsphere.example.core.api.entity.Order; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.List; - -@Repository -@Transactional -public class OrderRepositoryImpl implements OrderRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public Long insert(final Order order) { - entityManager.persist(order); - return order.getOrderId(); - } - - @Override - public void delete(final Long orderId) { - Query query = entityManager.createQuery("DELETE FROM OrderEntity o WHERE o.orderId = ?1"); - query.setParameter(1, orderId); - query.executeUpdate(); - } - - @SuppressWarnings("unchecked") - @Override - public List selectAll() { - return (List) entityManager.createQuery("SELECT o FROM OrderEntity o").getResultList(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/ShadowUserRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/ShadowUserRepositoryImpl.java deleted file mode 100644 index 6af770bdea381..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/ShadowUserRepositoryImpl.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.ArrayList; -import java.util.List; - -@Repository -@Transactional -public class ShadowUserRepositoryImpl implements ShadowUserRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public Long insert(final ShadowUser entity) { - entityManager.persist(entity); - return null; - } - - @Override - public void delete(final Long id) { - Query query = entityManager.createQuery("DELETE FROM ShadowUserEntity o WHERE o.userId = ?1 and o.userType = ?2"); - query.setParameter(1, id.intValue()); - query.setParameter(2, id.intValue() % 2); - query.executeUpdate(); - } - - @Override - @SuppressWarnings("unchecked") - public List selectAll() { - List users = new ArrayList<>(); - Query query = entityManager.createQuery("SELECT o FROM ShadowUserEntity o WHERE o.userType = ?1"); - query.setParameter(1, 0); - users.addAll(query.getResultList()); - query.setParameter(1, 1); - users.addAll(query.getResultList()); - return users; - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/UserRepositoryImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/UserRepositoryImpl.java deleted file mode 100644 index 6ae276f63e1b8..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/repository/UserRepositoryImpl.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.repository; - -import org.apache.shardingsphere.example.core.api.repository.UserRepository; -import org.apache.shardingsphere.example.core.api.entity.User; -import org.springframework.stereotype.Repository; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; -import javax.persistence.Query; -import javax.transaction.Transactional; -import java.util.List; - -@Repository -@Transactional -public class UserRepositoryImpl implements UserRepository { - - @PersistenceContext - private EntityManager entityManager; - - @Override - public void createTableIfNotExists() { - throw new UnsupportedOperationException("createTableIfNotExists for JPA"); - } - - @Override - public void dropTable() { - throw new UnsupportedOperationException("dropTable for JPA"); - } - - @Override - public void truncateTable() { - throw new UnsupportedOperationException("truncateTable for JPA"); - } - - @Override - public Long insert(final User entity) { - entityManager.persist(entity); - return null; - } - - @Override - public void delete(final Long id) { - Query query = entityManager.createQuery("DELETE FROM UserEntity o WHERE o.userId = ?1"); - query.setParameter(1, id.intValue()); - query.executeUpdate(); - } - - @Override - @SuppressWarnings("unchecked") - public List selectAll() { - return (List) entityManager.createQuery("SELECT o FROM UserEntity o").getResultList(); - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/AccountServiceImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/AccountServiceImpl.java deleted file mode 100644 index 47871042172ef..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/AccountServiceImpl.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.service; - -import org.apache.shardingsphere.example.core.api.repository.AccountRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jpa.entity.AccountEntity; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service -public class AccountServiceImpl implements ExampleService { - - @Resource - private AccountRepository accountRepository; - - @Override - public void initEnvironment() throws SQLException { - } - - @Override - public void cleanEnvironment() { - } - - @Override - @Transactional - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List accountIds = insertData(); - printData(); - deleteData(accountIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - @Transactional - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - AccountEntity account = new AccountEntity(); - account.setUserId(i); - account.setStatus("INSERT_TEST_JPA"); - accountRepository.insert(account); - result.add(account.getAccountId()); - } - return result; - } - - private void deleteData(final List accountIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : accountIds) { - accountRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print Account Data -----------------------"); - for (Object each : accountRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/OrderServiceImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/OrderServiceImpl.java deleted file mode 100644 index 246690b9bb5b3..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/OrderServiceImpl.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.service; - -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jpa.entity.AddressEntity; -import org.apache.shardingsphere.example.core.jpa.entity.OrderEntity; -import org.apache.shardingsphere.example.core.jpa.entity.OrderItemEntity; -import org.springframework.context.annotation.Primary; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service -@Primary -public class OrderServiceImpl implements ExampleService { - - @Resource - private OrderRepository orderRepository; - - @Resource - private OrderItemRepository orderItemRepository; - - @Resource - private AddressRepository addressRepository; - - @Override - public void initEnvironment() throws SQLException { - for (int i = 1; i <= 10; i++) { - AddressEntity entity = new AddressEntity(); - entity.setAddressId((long) i); - entity.setAddressName("address_" + i); - addressRepository.insert(entity); - } - } - - @Override - public void cleanEnvironment() { - } - - @Override - @Transactional - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List orderIds = insertData(); - printData(); - deleteData(orderIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - @Transactional - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - OrderEntity order = new OrderEntity(); - order.setUserId(i); - order.setAddressId(i); - order.setStatus("INSERT_TEST_JPA"); - orderRepository.insert(order); - OrderItemEntity item = new OrderItemEntity(); - item.setOrderId(order.getOrderId()); - item.setUserId(i); - item.setStatus("INSERT_TEST_JPA"); - orderItemRepository.insert(item); - result.add(order.getOrderId()); - } - return result; - } - - private void deleteData(final List orderIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : orderIds) { - orderRepository.delete(each); - orderItemRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print Order Data -----------------------"); - for (Object each : orderRepository.selectAll()) { - System.out.println(each); - } - System.out.println("---------------------------- Print OrderItem Data -------------------"); - for (Object each : orderItemRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/ShadowUserServiceImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/ShadowUserServiceImpl.java deleted file mode 100644 index 28f87a418f0c9..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/ShadowUserServiceImpl.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.service; - -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jpa.entity.ShadowUserEntity; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service("shadowExample") -public class ShadowUserServiceImpl implements ExampleService { - - @Resource - private ShadowUserRepository userRepository; - - @Override - public void initEnvironment() { - } - - @Override - public void cleanEnvironment() { - } - - @Override - @Transactional - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - @Transactional - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - ShadowUserEntity user = new ShadowUserEntity(); - user.setUserId(i); - user.setUserType(i % 2); - user.setUsername("test_jpa_" + i); - user.setPwd("pwd_jpa_" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/UserServiceImpl.java b/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/UserServiceImpl.java deleted file mode 100644 index 29e7c70b853b7..0000000000000 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/service/UserServiceImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.jpa.service; - -import org.apache.shardingsphere.example.core.api.repository.UserRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jpa.entity.UserEntity; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service("encryptExample") -public class UserServiceImpl implements ExampleService { - - @Resource - private UserRepository userRepository; - - @Override - public void initEnvironment() { - } - - @Override - public void cleanEnvironment() { - } - - @Override - @Transactional - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - @Override - @Transactional - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - UserEntity user = new UserEntity(); - user.setUserId(i); - user.setUsername("test_jpa_" + i); - user.setPwd("pwd_jpa_" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-mybatis/pom.xml b/examples/example-core/example-spring-mybatis/pom.xml deleted file mode 100644 index 97fa1e09bc58a..0000000000000 --- a/examples/example-core/example-spring-mybatis/pom.xml +++ /dev/null @@ -1,59 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - example-core - ${revision} - - example-spring-mybatis - common::example-spring-mybatis - - - - org.apache.shardingsphere.example - config-utility - ${project.parent.version} - - - org.apache.shardingsphere.example - example-api - ${project.parent.version} - - - org.mybatis - mybatis - - - org.mybatis - mybatis-spring - - - org.springframework - spring-orm - - - org.springframework - spring-context-support - - - diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderRepository.java b/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderRepository.java deleted file mode 100644 index 7ce783f73b6d3..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderRepository.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.mybatis.repository; - -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; -import org.apache.ibatis.annotations.Mapper; - -@Mapper -public interface MybatisOrderRepository extends OrderRepository { -} diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisShadowUserRepository.java b/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisShadowUserRepository.java deleted file mode 100644 index e231e4b0880eb..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisShadowUserRepository.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.mybatis.repository; - -import org.apache.ibatis.annotations.Mapper; -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; - -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@Mapper -public interface MybatisShadowUserRepository extends ShadowUserRepository { - - @Override - default void createTableIfNotExists() throws SQLException { - createTableIfNotExistsNative(); - createTableIfNotExistsShadow(); - } - - void createTableIfNotExistsNative(); - - void createTableIfNotExistsShadow(); - - @Override - default void truncateTable() throws SQLException { - truncateTableShadow(); - truncateTableNative(); - } - - void truncateTableNative(); - - void truncateTableShadow(); - - @Override - default void dropTable() throws SQLException { - dropTableShadow(); - dropTableNative(); - } - - void dropTableNative(); - - void dropTableShadow(); - - @Override - default List selectAll() throws SQLException { - List result = new ArrayList<>(); - result.addAll(selectAllByShadow(0)); - result.addAll(selectAllByShadow(1)); - return result; - } - - List selectAllByShadow(int userType) throws SQLException; - - @Override - default void delete(Long primaryKey) throws SQLException { - Map idTypeMapping = new HashMap<>(2); - idTypeMapping.put("userId", primaryKey); - idTypeMapping.put("userType", primaryKey % 2); - deleteOne(idTypeMapping); - } - - void deleteOne(Map idTypeMapping); -} diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisUserRepository.java b/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisUserRepository.java deleted file mode 100644 index 5b3682bfae2fe..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisUserRepository.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.mybatis.repository; - -import org.apache.shardingsphere.example.core.api.repository.UserRepository; - -public interface MybatisUserRepository extends UserRepository { -} diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/ShadowUserServiceImpl.java b/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/ShadowUserServiceImpl.java deleted file mode 100644 index fe6833c02df48..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/ShadowUserServiceImpl.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.mybatis.service; - -import org.apache.shardingsphere.example.core.api.entity.ShadowUser; -import org.apache.shardingsphere.example.core.api.repository.ShadowUserRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.springframework.stereotype.Service; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service("shadow") -public class ShadowUserServiceImpl implements ExampleService { - - @Resource - private ShadowUserRepository userRepository; - - @Override - public void initEnvironment() throws SQLException { - userRepository.createTableIfNotExists(); - userRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - userRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - ShadowUser user = new ShadowUser(); - user.setUserId(i); - user.setUserType(i % 2); - user.setUsername("test_mybatis_" + i); - user.setPwd("pwd_mybatis_" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/UserServiceImpl.java b/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/UserServiceImpl.java deleted file mode 100644 index 2cf7ebda3963f..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/UserServiceImpl.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.core.mybatis.service; - -import org.apache.shardingsphere.example.core.api.repository.UserRepository; -import org.apache.shardingsphere.example.core.api.entity.User; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.springframework.stereotype.Service; - -import javax.annotation.Resource; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - -@Service("encrypt") -public class UserServiceImpl implements ExampleService { - - @Resource - private UserRepository userRepository; - - @Override - public void initEnvironment() throws SQLException { - userRepository.createTableIfNotExists(); - userRepository.truncateTable(); - } - - @Override - public void cleanEnvironment() throws SQLException { - userRepository.dropTable(); - } - - @Override - public void processSuccess() throws SQLException { - System.out.println("-------------- Process Success Begin ---------------"); - List userIds = insertData(); - printData(); - deleteData(userIds); - printData(); - System.out.println("-------------- Process Success Finish --------------"); - } - - private List insertData() throws SQLException { - System.out.println("---------------------------- Insert Data ----------------------------"); - List result = new ArrayList<>(10); - for (int i = 1; i <= 10; i++) { - User user = new User(); - user.setUserId(i); - user.setUsername("test_mybatis_" + i); - user.setPwd("pwd_mybatis_" + i); - userRepository.insert(user); - result.add((long) user.getUserId()); - } - return result; - } - - @Override - public void processFailure() throws SQLException { - System.out.println("-------------- Process Failure Begin ---------------"); - insertData(); - System.out.println("-------------- Process Failure Finish --------------"); - throw new RuntimeException("Exception occur for transaction test."); - } - - private void deleteData(final List userIds) throws SQLException { - System.out.println("---------------------------- Delete Data ----------------------------"); - for (Long each : userIds) { - userRepository.delete(each); - } - } - - @Override - public void printData() throws SQLException { - System.out.println("---------------------------- Print User Data -----------------------"); - for (Object each : userRepository.selectAll()) { - System.out.println(each); - } - } -} diff --git a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/ShadowUserMapper.xml b/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/ShadowUserMapper.xml deleted file mode 100644 index 4f2624a5e6864..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/ShadowUserMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - - - CREATE TABLE IF NOT EXISTS t_user (user_id INT NOT NULL AUTO_INCREMENT, user_type INT(11), username VARCHAR(200), pwd VARCHAR(200), PRIMARY KEY (user_id)); - - - - CREATE TABLE IF NOT EXISTS t_user (user_id INT NOT NULL AUTO_INCREMENT, user_type INT(11), username VARCHAR(200), pwd VARCHAR(200), PRIMARY KEY (user_id)) /*shadow:true,foo:bar*/; - - - - TRUNCATE TABLE t_user; - - - - TRUNCATE TABLE t_user /*shadow:true,foo:bar*/; - - - - DROP TABLE IF EXISTS t_user; - - - - DROP TABLE IF EXISTS t_user /*shadow:true,foo:bar*/; - - - - INSERT INTO t_user (user_id, user_type, username, pwd) VALUES (#{userId,jdbcType=INTEGER}, #{userType,jdbcType=INTEGER}, #{username,jdbcType=VARCHAR}, #{pwd,jdbcType=VARCHAR}); - - - - DELETE FROM t_user WHERE user_id = #{userId,jdbcType=INTEGER} AND user_type = #{userType,jdbcType=INTEGER}; - - - - - - diff --git a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/UserMapper.xml b/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/UserMapper.xml deleted file mode 100644 index eff8a889439a0..0000000000000 --- a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/UserMapper.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - - - - - - - - CREATE TABLE IF NOT EXISTS t_user (user_id INT NOT NULL AUTO_INCREMENT, username VARCHAR(200), pwd VARCHAR(200), PRIMARY KEY (user_id)); - - - - TRUNCATE TABLE t_user; - - - - DROP TABLE IF EXISTS t_user; - - - - INSERT INTO t_user (user_id, username, pwd) VALUES (#{userId,jdbcType=INTEGER}, #{username,jdbcType=VARCHAR}, #{pwd,jdbcType=VARCHAR}) - - - - DELETE FROM t_user WHERE user_id = #{userId,jdbcType=INTEGER}; - - - - diff --git a/examples/example-core/pom.xml b/examples/example-core/pom.xml deleted file mode 100644 index ee2ae414d1fdc..0000000000000 --- a/examples/example-core/pom.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere.example - shardingsphere-examples - ${revision} - - example-core - pom - ${project.artifactId} - - - config-utility - example-api - example-raw-jdbc - example-spring-mybatis - example-spring-jpa - - - - - - - - org.eclipse.m2e - lifecycle-mapping - 1.0.0 - - - - - org.codehaus.mojo - flatten-maven-plugin - [1.1.0,) - - flatten - - - - - - - - - - - - - diff --git a/examples/pom.xml b/examples/pom.xml index e06f3cb1e08bd..0e7929d8cf509 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -27,10 +27,9 @@ ${project.artifactId} - example-core shardingsphere-proxy-example shardingsphere-parser-example - shardingsphere-example-generator + shardingsphere-jdbc-example-generator diff --git a/examples/shardingsphere-example-generator/README_ZH.md b/examples/shardingsphere-example-generator/README_ZH.md deleted file mode 100644 index 1921c0ed2b043..0000000000000 --- a/examples/shardingsphere-example-generator/README_ZH.md +++ /dev/null @@ -1,29 +0,0 @@ -# ShardingSphere 使用示例代码生成器 - -基于模板引擎生成 ShardingSphere 使用示例代码。 - -## 使用步骤 - -1. 配置参数 - -文件位置:`src/main/resources/config.yaml` - -2. 生成代码 - -运行 `org.apache.shardingsphere.example.generator.ExampleGeneratorMain` 即可生成对应的示例代码。 - -生成后的代码位于:`target/generated-sources/shardingsphere-${product}-sample` - -## 配置项说明 - -| *属性名称* | *说明* | *可选项* | -| :---------- | ---------- |:----------------------------------------------------------------------------------------------------------------------------------------------| -| product | 产品 | jdbc、proxy | -| mode | 运行模式 | cluster-zookeeper、cluster-etcd、standalone | -| transaction | 事务类型 | local, xa-atomikos, xa-narayana | -| features | 功能 | sharding, readwrite-splitting, encrypt, shadow, mask | -| frameworks | 框架 | jdbc、spring-boot-starter-jdbc、spring-boot-starter-jpa、spring-boot-starter-mybatis、spring-namespace-jpa、spring-namespace-mybatis | -| host | 数据库主机名 | | -| port | 数据库端口 | | -| username | 数据库用户名 | | -| password | 数据库密码 | | diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/ProxyExampleGenerator.java b/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/ProxyExampleGenerator.java deleted file mode 100644 index 52f4973b882ae..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/ProxyExampleGenerator.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.example.generator.core.impl; - -import freemarker.template.Configuration; -import freemarker.template.TemplateException; -import org.apache.shardingsphere.example.generator.core.ExampleGenerator; -import org.apache.shardingsphere.example.generator.core.GenerateUtils; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - -/** - * Proxy example generator. - */ -public final class ProxyExampleGenerator implements ExampleGenerator { - - @Override - public void generate(final Configuration templateConfig, final Map dataModel, final String relativePath) throws IOException, TemplateException { - GenerateUtils.generateDirs(templateConfig, dataModel, Collections.singleton("conf"), relativePath + RESOURCES_PATH); - String outputPath = GenerateUtils.generatePath(templateConfig, dataModel, relativePath); - processFile(templateConfig, dataModel, outputPath); - } - - private void processFile(final Configuration templateConfig, final Map dataModel, - final String baseOutputPath) throws TemplateException, IOException { - String outputPath = baseOutputPath + RESOURCES_PATH + "/conf/"; - GenerateUtils.processFile(templateConfig, dataModel, getType() + "/config-example_db.ftl", outputPath + "config-example_db.yaml"); - GenerateUtils.processFile(templateConfig, dataModel, getType() + "/server.ftl", outputPath + "server.yaml"); - GenerateUtils.processFile(templateConfig, dataModel, getType() + "/pom.ftl", baseOutputPath + "pom.xml"); - } - - public String getType() { - return "proxy"; - } -} diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/config-example_db.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/config-example_db.ftl deleted file mode 100644 index 0bae2b16c4251..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/config-example_db.ftl +++ /dev/null @@ -1,64 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# You can get more configuration items from the following URL: -# https://shardingsphere.apache.org/document/current/en/user-manual/shardingsphere-jdbc/yaml-config/rules/ - -schemaName: example_db - -dataSources: - primary_ds_0: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:primary_ds;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - primary_ds_0_replica_0: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:replica_ds_0;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - primary_ds_0_replica_1: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:replica_ds_1;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - primary_ds_1: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:primary_ds;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - primary_ds_1_replica_0: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:replica_ds_0;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - primary_ds_1_replica_1: - dataSourceClassName: com.zaxxer.hikari.HikariDataSource - driverClassName: org.h2.Driver - jdbcUrl: jdbc:h2:mem:replica_ds_1;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=MYSQL - username: sa - password: - -rules: -<#list feature?split(",") as item> - <#include "feature/${item}.ftl"> - diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/encrypt.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/encrypt.ftl deleted file mode 100644 index 48c96e09f15a8..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/encrypt.ftl +++ /dev/null @@ -1,29 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - - !ENCRYPT - tables: - t_order: - columns: - columnName: - cipher: - name: cipher - encryptorName: encryptor - encryptors: - encryptor: - type: AES - props: - aes-key-value: 123456abc diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/readwrite-splitting.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/readwrite-splitting.ftl deleted file mode 100644 index caf067d1a5593..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/readwrite-splitting.ftl +++ /dev/null @@ -1,27 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - - !READWRITE_SPLITTING - dataSources: - readwrite_ds: - writeDataSourceName: write_ds - readDataSourceNames: - - read_ds_0 - - read_ds_1 - loadBalancerName: read_balance - loadBalancers: - read_balance: - type: ROUND_ROBIN diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/shadow.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/shadow.ftl deleted file mode 100644 index 7348d479e5ee1..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/shadow.ftl +++ /dev/null @@ -1,46 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - - !SHADOW - dataSources: - shadowDataSource: - productionDataSourceName: ds - shadowDataSourceName: shadow_ds - tables: - t_order: - dataSourceNames: - - shadowDataSource - shadowAlgorithmNames: - - user-id-insert-match-algorithm - - user-id-select-match-algorithm - - sql-hint-algorithm - shadowAlgorithms: - user-id-insert-match-algorithm: - type: REGEX_MATCH - props: - operation: insert - column: user_id - regex: "[1]" - user-id-select-match-algorithm: - type: REGEX_MATCH - props: - operation: insert - column: user_id - regex: "[1]" - sql-hint-algorithm: - type: SQL_HINT - props: - foo: bar diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/sharding.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/sharding.ftl deleted file mode 100644 index f9548d8e783b1..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/feature/sharding.ftl +++ /dev/null @@ -1,79 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - - !SHARDING - tables: - t_order: - actualDataNodes: ${r'ds_${0..1}.t_order_${0..1}'} - tableStrategy: - standard: - shardingColumn: order_id - shardingAlgorithmName: t_order_inline - keyGenerateStrategy: - column: order_id - keyGeneratorName: snowflake - t_order_item: - actualDataNodes: ${r'ds_${0..1}.t_order_item_${0..1}'} - tableStrategy: - standard: - shardingColumn: order_id - shardingAlgorithmName: t_order_item_inline - keyGenerateStrategy: - column: order_item_id - keyGeneratorName: snowflake - auditStrategy: - auditorNames: - - sharding_key_required_auditor - allowHintDisable: true - autoTables: - t_order_auto: - actualDataSources: ds_0 - shardingStrategy: - standard: - shardingColumn: user_id - shardingAlgorithmName: t_order_inline - bindingTables: - - t_order,t_order_item - defaultDatabaseStrategy: - standard: - shardingColumn: user_id - shardingAlgorithmName: database_inline - defaultTableStrategy: - none: - defaultKeyGenerateStrategy: - none: - - shardingAlgorithms: - database_inline: - type: INLINE - props: - algorithm-expression: ${r'ds_${user_id % 2}'} - t_order_inline: - type: INLINE - props: - algorithm-expression: ${r't_order_${order_id % 2}'} - t_order_item_inline: - type: INLINE - props: - algorithm-expression: ${r't_order_item_${order_id % 2}'} - - keyGenerators: - snowflake: - type: SNOWFLAKE - - auditors: - sharding_key_required_auditor: - type: DML_SHARDING_CONDITIONS diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-etcd.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-etcd.ftl deleted file mode 100644 index 43a4341b1fe16..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-etcd.ftl +++ /dev/null @@ -1,24 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - -mode: - type: Cluster - repository: - type: etcd - props: - namespace: demo_yaml - server-lists: localhost:2379 diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-zookeeper.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-zookeeper.ftl deleted file mode 100644 index dc3b41f6e31f3..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/cluster-zookeeper.ftl +++ /dev/null @@ -1,24 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - -mode: - type: Cluster - repository: - type: ZooKeeper - props: - namespace: demo_yaml - server-lists: localhost:2181 diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/standalone.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/standalone.ftl deleted file mode 100644 index 0f5d55003b46a..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/mode/standalone.ftl +++ /dev/null @@ -1,21 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. - --> - -mode: - type: Standalone - repository: - type: JDBC diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/pom.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/pom.ftl deleted file mode 100644 index dbcf50b3366a7..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/pom.ftl +++ /dev/null @@ -1,31 +0,0 @@ - - - - - 4.0.0 - org.apache.shardingsphere.example - ${feature?replace(',', '-')}--${framework}--${mode}--${transaction} - ${shardingsphereVersion} - ${r'${project.artifactId}'} - - - - - diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/server.ftl b/examples/shardingsphere-example-generator/src/main/resources/template/proxy/server.ftl deleted file mode 100644 index 4f96b92717be7..0000000000000 --- a/examples/shardingsphere-example-generator/src/main/resources/template/proxy/server.ftl +++ /dev/null @@ -1,40 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# You can get more configuration items about proxy conf from the following URL: -# https://shardingsphere.apache.org/document/current/en/user-manual/shardingsphere-proxy/configuration/ - -<#if mode?exists> - <#include "mode/${mode}.ftl" /> - - -authority: - users: - - user: root - password: root - - user: sharding - password: sharding - privilege: - type: ALL_PERMITTED - -props: - max-connections-size-per-query: 1 - executor-size: 16 # Infinite by default. - proxy-frontend-flush-threshold: 128 # The default value is 128. - sql-show: false - check-table-metadata-enabled: false - sql-simple: false diff --git a/examples/shardingsphere-example-generator/README.md b/examples/shardingsphere-jdbc-example-generator/README.md similarity index 67% rename from examples/shardingsphere-example-generator/README.md rename to examples/shardingsphere-jdbc-example-generator/README.md index 96e20cabdbc86..f9275dbce5d1e 100644 --- a/examples/shardingsphere-example-generator/README.md +++ b/examples/shardingsphere-jdbc-example-generator/README.md @@ -16,14 +16,13 @@ Generated codes: `target/generated-sources/shardingsphere-${product}-sample` ## Configuration Item Explanation -| *Name* | *Description* | *Options* | -| :---------- | ----------------- |:-------------------------------------------------------------------------------------------------------------------------------------------------| -| product | product | jdbc, proxy | -| mode | operating mode | cluster-zookeeper, cluster-etcd, standalone | -| transaction | transaction type | local, xa-atomikos, xa-narayana | -| features | feature set | sharding, readwrite-splitting, encrypt, shadow, mask | -| frameworks | framework set | jdbc, spring-boot-starter-jdbc, spring-boot-starter-jpa, spring-boot-starter-mybatis, spring-namespace-jpa, spring-namespace-mybatis | -| host | database host | | -| port | database port | | -| username | database username | | -| password | database password | | +| *Name* | *Description* | *Options* | +|:------------|-------------------|:-------------------------------------------------------------------------------------------------------------------------------------| +| mode | operating mode | cluster-zookeeper, cluster-etcd, standalone | +| transaction | transaction type | local, xa-atomikos, xa-narayana | +| features | feature set | sharding, readwrite-splitting, encrypt, shadow, mask | +| frameworks | framework set | jdbc, spring-boot-starter-jdbc, spring-boot-starter-jpa, spring-boot-starter-mybatis, spring-namespace-jpa, spring-namespace-mybatis | +| host | database host | | +| port | database port | | +| username | database username | | +| password | database password | | diff --git a/examples/shardingsphere-example-generator/pom.xml b/examples/shardingsphere-jdbc-example-generator/pom.xml similarity index 98% rename from examples/shardingsphere-example-generator/pom.xml rename to examples/shardingsphere-jdbc-example-generator/pom.xml index 935810e3a54b7..ac26e201d2dd7 100644 --- a/examples/shardingsphere-example-generator/pom.xml +++ b/examples/shardingsphere-jdbc-example-generator/pom.xml @@ -25,7 +25,7 @@ shardingsphere-examples ${revision} - shardingsphere-example-generator + shardingsphere-jdbc-example-generator diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/ExampleGeneratorMain.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/ExampleGeneratorMain.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/ExampleGeneratorMain.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/ExampleGeneratorMain.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java similarity index 92% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java index 89466849d1c09..249b266256338 100644 --- a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java +++ b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGenerator.java @@ -25,9 +25,11 @@ import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; +import java.io.File; import java.io.IOException; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Objects; import java.util.Properties; /** @@ -36,8 +38,6 @@ @SingletonSPI public interface ExampleGenerator extends TypedSPI { - String DEFAULT_OUTPUT = "./examples/shardingsphere-example-generator/target/generated-sources/"; - String PROJECT_PATH = "shardingsphere-${product}-sample/${feature?replace(',', '-')}--${framework}--${mode}--${transaction}/"; String RESOURCES_PATH = "src/main/resources"; @@ -50,7 +50,8 @@ public interface ExampleGenerator extends TypedSPI { */ default String buildOutputPath(YamlExampleConfiguration exampleConfig) { if (Strings.isNullOrEmpty(exampleConfig.getOutput())) { - return DEFAULT_OUTPUT + PROJECT_PATH; + File file = new File(Objects.requireNonNull(this.getClass().getClassLoader().getResource("")).getPath()); + return file.getParent() + "/generated-sources/" + PROJECT_PATH; } return exampleConfig.getOutput() + PROJECT_PATH; } @@ -64,11 +65,12 @@ default String buildOutputPath(YamlExampleConfiguration exampleConfig) { * @throws TemplateException template exception */ default void generate(final Configuration templateConfig, final YamlExampleConfiguration exampleConfig) throws IOException, TemplateException { + String outputPath = buildOutputPath(exampleConfig); for (String eachMode : exampleConfig.getModes()) { for (String eachTransaction : exampleConfig.getTransactions()) { for (String eachFramework : exampleConfig.getFrameworks()) { for (String eachFeature : GenerateUtils.generateCombination(exampleConfig.getFeatures())) { - generate(templateConfig, buildDataModel(exampleConfig.getProps(), eachMode, eachTransaction, eachFramework, eachFeature), buildOutputPath(exampleConfig)); + generate(templateConfig, buildDataModel(exampleConfig.getProps(), eachMode, eachTransaction, eachFramework, eachFeature), outputPath); } } } diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java similarity index 97% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java index dc4010dca6e6c..930422cc37a43 100644 --- a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java +++ b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/ExampleGeneratorFactory.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; +import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Properties; @@ -78,9 +79,6 @@ private YamlExampleConfiguration buildExampleConfiguration() { if (props.containsKey("output")) { result.setOutput(props.getProperty("output")); } - if (props.containsKey("products")) { - result.setProducts(getSysEnvByKey(props, "products")); - } if (props.containsKey("modes")) { result.setModes(getSysEnvByKey(props, "modes")); } @@ -96,6 +94,7 @@ private YamlExampleConfiguration buildExampleConfiguration() { result.setFrameworks(getSysEnvByKey(props, "frameworks")); } } + result.setProducts(Collections.singletonList("jdbc")); return result; } diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/GenerateUtils.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/GenerateUtils.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/GenerateUtils.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/GenerateUtils.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java similarity index 85% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java index bbdc166c5aa70..4464e46c94a61 100644 --- a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java +++ b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/impl/JDBCExampleGenerator.java @@ -40,10 +40,10 @@ public void generate(final Configuration templateConfig, final Map(Arrays.asList("jdbc", "proxy"))), - MODES("modes", new HashSet<>(Arrays.asList("memory", "proxy", "cluster-zookeeper", "cluster-etcd", "standalone"))), TRANSACTIONS("transactions", new HashSet<>(Arrays.asList("local", "xa-atomikos", "xa-narayana", "base-seata"))), diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java similarity index 98% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java index b42c74029a39a..edd5b4bbaf7fe 100644 --- a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java +++ b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/core/yaml/config/YamlExampleConfigurationValidator.java @@ -39,7 +39,6 @@ public final class YamlExampleConfigurationValidator { */ public static void validate(final YamlExampleConfiguration config) { Map> configMap = new HashMap<>(5, 1); - configMap.put("products", config.getProducts()); configMap.put("modes", config.getModes()); configMap.put("transactions", config.getTransactions()); configMap.put("features", config.getFeatures()); diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenarioFactory.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenarioFactory.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenarioFactory.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/ExampleScenarioFactory.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/FeatureExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/FeatureExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/FeatureExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/FeatureExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/EncryptExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/EncryptExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/EncryptExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/EncryptExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/MaskExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/MaskExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/MaskExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/MaskExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ReadwriteSplittingExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ReadwriteSplittingExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ReadwriteSplittingExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ReadwriteSplittingExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShadowExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShadowExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShadowExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShadowExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShardingExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShardingExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShardingExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/feature/type/ShardingExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/FrameworkExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/FrameworkExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/FrameworkExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/FrameworkExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/JDBCExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/JDBCExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/JDBCExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/JDBCExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJdbcExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJdbcExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJdbcExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJdbcExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJpaExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJpaExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJpaExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterJpaExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterMyBatisExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterMyBatisExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterMyBatisExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringBootStarterMyBatisExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJdbcExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJdbcExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJdbcExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJdbcExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJpaExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJpaExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJpaExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceJpaExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceMyBatisExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceMyBatisExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceMyBatisExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/framework/type/SpringNamespaceMyBatisExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/TransactionExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/TransactionExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/TransactionExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/TransactionExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/LocalExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/LocalExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/LocalExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/LocalExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/SeataExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/SeataExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/SeataExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/SeataExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XAAtomikosExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XAAtomikosExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XAAtomikosExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XAAtomikosExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XANarayanaExampleScenario.java b/examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XANarayanaExampleScenario.java similarity index 100% rename from examples/shardingsphere-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XANarayanaExampleScenario.java rename to examples/shardingsphere-jdbc-example-generator/src/main/java/org/apache/shardingsphere/example/generator/scenario/transaction/type/XANarayanaExampleScenario.java diff --git a/examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator b/examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator similarity index 91% rename from examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator index b606c021eb9f7..dc34689c5af63 100644 --- a/examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator +++ b/examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.core.ExampleGenerator @@ -16,4 +16,3 @@ # org.apache.shardingsphere.example.generator.core.impl.JDBCExampleGenerator -org.apache.shardingsphere.example.generator.core.impl.ProxyExampleGenerator diff --git a/examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.feature.FeatureExampleScenario b/examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.feature.FeatureExampleScenario similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.feature.FeatureExampleScenario rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.feature.FeatureExampleScenario diff --git a/examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.framework.FrameworkExampleScenario b/examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.framework.FrameworkExampleScenario similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.framework.FrameworkExampleScenario rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.framework.FrameworkExampleScenario diff --git a/examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.transaction.TransactionExampleScenario b/examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.transaction.TransactionExampleScenario similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.transaction.TransactionExampleScenario rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/META-INF/services/org.apache.shardingsphere.example.generator.scenario.transaction.TransactionExampleScenario diff --git a/examples/shardingsphere-example-generator/src/main/resources/config.yaml b/examples/shardingsphere-jdbc-example-generator/src/main/resources/config.yaml similarity index 92% rename from examples/shardingsphere-example-generator/src/main/resources/config.yaml rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/config.yaml index 1fbee7703a99d..789bc03c12598 100644 --- a/examples/shardingsphere-example-generator/src/main/resources/config.yaml +++ b/examples/shardingsphere-jdbc-example-generator/src/main/resources/config.yaml @@ -15,12 +15,8 @@ # limitations under the License. # -output: - - ./examples/shardingsphere-example-generator/target/generated-sources/ - -# supported: JDBC, PROXY -products: - - jdbc +#output: +# - ./examples/shardingsphere-jdbc-example-generator/target/generated-sources/ # supported: cluster-zookeeper,cluster-etcd,standalone modes: diff --git a/examples/shardingsphere-example-generator/src/main/resources/logback.xml b/examples/shardingsphere-jdbc-example-generator/src/main/resources/logback.xml similarity index 93% rename from examples/shardingsphere-example-generator/src/main/resources/logback.xml rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/logback.xml index 2c9638ba4764c..0dbb297e5f2d5 100644 --- a/examples/shardingsphere-example-generator/src/main/resources/logback.xml +++ b/examples/shardingsphere-jdbc-example-generator/src/main/resources/logback.xml @@ -17,7 +17,7 @@ --> - + ${log.context.name} diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/TestQueryAssistedShardingEncryptAlgorithm.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/TestQueryAssistedShardingEncryptAlgorithm.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/TestQueryAssistedShardingEncryptAlgorithm.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/TestQueryAssistedShardingEncryptAlgorithm.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/TransactionConfiguration.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/TransactionConfiguration.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/TransactionConfiguration.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/TransactionConfiguration.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/Configuration.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/Configuration.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/Configuration.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/Configuration.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/encrypt.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/encrypt.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/encrypt.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/encrypt.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/mask.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/mask.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/mask.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/mask.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/readwrite-splitting.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/readwrite-splitting.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/readwrite-splitting.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/readwrite-splitting.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/shadow.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/shadow.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/shadow.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/shadow.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/sharding.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/sharding.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/config/sharding.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/config/sharding.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/Address.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/Address.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/Address.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/Address.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/Order.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/Order.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/Order.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/Order.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/OrderItem.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/OrderItem.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/entity/OrderItem.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/entity/OrderItem.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/JDBCExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/JDBCExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/JDBCExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/JDBCExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarJdbcExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarJdbcExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarJdbcExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarJdbcExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarJpaExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarJpaExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarJpaExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarJpaExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarMyBatisExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarMyBatisExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringBootStarMyBatisExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringBootStarMyBatisExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceJdbcExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceJdbcExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceJdbcExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceJdbcExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceJpaExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceJpaExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceJpaExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceJpaExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceMyBatisExampleMain.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceMyBatisExampleMain.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/main/SpringNamespaceMyBatisExampleMain.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/main/SpringNamespaceMyBatisExampleMain.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/AddressRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/AddressRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/AddressRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/AddressRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/OrderItemRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/OrderItemRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/OrderItemRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/OrderItemRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/OrderRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/OrderRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jdbc/OrderRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jdbc/OrderRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/AddressRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/AddressRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/AddressRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/AddressRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/OrderItemRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/OrderItemRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/OrderItemRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/OrderItemRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/OrderRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/OrderRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/jpa/OrderRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/jpa/OrderRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/AddressRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/AddressRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/AddressRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/AddressRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/OrderItemRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/OrderItemRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/OrderItemRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/OrderItemRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/OrderRepository.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/OrderRepository.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/repository/mybatis/OrderRepository.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/repository/mybatis/OrderRepository.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/service/ExampleService.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/service/ExampleService.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/java/service/ExampleService.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/java/service/ExampleService.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/pom.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/pom.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/pom.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/pom.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/file.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/file.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/file.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/file.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/jbossts-properties.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/jbossts-properties.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/jbossts-properties.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/jbossts-properties.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/logback.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/logback.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/logback.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/logback.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/AddressMapper.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/AddressMapper.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/AddressMapper.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/AddressMapper.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/OrderItemMapper.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/OrderItemMapper.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/OrderItemMapper.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/OrderItemMapper.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/OrderMapper.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/OrderMapper.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/mappers/OrderMapper.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/mappers/OrderMapper.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/properties/application.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/properties/application.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/properties/application.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/properties/application.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/registry.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/registry.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/registry.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/registry.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/seata.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/seata.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/seata.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/seata.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/spi/encryptAlgorithm.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/spi/encryptAlgorithm.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/spi/encryptAlgorithm.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/spi/encryptAlgorithm.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/xml/application.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/xml/application.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/xml/application.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/xml/application.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/config.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/config.ftl similarity index 98% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/config.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/config.ftl index 5de1f4edcd0ec..fc998745fe7cd 100644 --- a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/config.ftl +++ b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/config.ftl @@ -52,7 +52,7 @@ rules: <#if feature?contains("shadow")> - <#include "./sql-parse/sql-parse.ftl" /> + <#include "sql-parse/sql-parse.ftl" /> <#if transaction!="local" && transaction!="base-seata"> diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/db-discovery.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/db-discovery.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/db-discovery.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/db-discovery.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/encrypt.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/encrypt.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/encrypt.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/encrypt.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/mask.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/mask.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/mask.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/mask.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/readwrite-splitting.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/readwrite-splitting.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/readwrite-splitting.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/readwrite-splitting.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/shadow.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/shadow.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/shadow.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/shadow.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/sharding.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/sharding.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/feature/sharding.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/feature/sharding.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/cluster-etcd.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/cluster-etcd.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/cluster-etcd.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/cluster-etcd.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/cluster-zookeeper.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/cluster-zookeeper.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/cluster-zookeeper.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/cluster-zookeeper.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/standalone.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/standalone.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/mode/standalone.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/mode/standalone.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/sql-parse/sql-parse.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/sql-parse/sql-parse.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/sql-parse/sql-parse.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/sql-parse/sql-parse.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/transaction/xa-atomikos.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/transaction/xa-atomikos.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/transaction/xa-atomikos.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/transaction/xa-atomikos.ftl diff --git a/examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/transaction/xa-narayana.ftl b/examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/transaction/xa-narayana.ftl similarity index 100% rename from examples/shardingsphere-example-generator/src/main/resources/template/jdbc/resources/yaml/transaction/xa-narayana.ftl rename to examples/shardingsphere-jdbc-example-generator/src/main/resources/template/resources/yaml/transaction/xa-narayana.ftl diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/pom.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/pom.xml index 46bc560198d55..03c1ebbca8de0 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/pom.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/pom.xml @@ -29,12 +29,6 @@ ${project.artifactId} - - org.apache.shardingsphere.example - example-spring-mybatis - ${project.parent.version} - - org.mybatis.spring.boot mybatis-spring-boot-starter diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/ProxySpringBootStarterExample.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/ProxySpringBootStarterExample.java index 72bf8ae9280a1..cda9bb6728676 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/ProxySpringBootStarterExample.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/ProxySpringBootStarterExample.java @@ -17,8 +17,7 @@ package org.apache.shardingsphere.example.proxy.spring.boot.mybatis; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.mybatis.spring.annotation.MapperScan; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.service.OrderService; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; @@ -35,7 +34,6 @@ * 2. Please make sure ShardingSphere-Proxy is running before you run this example. */ @ComponentScan("org.apache.shardingsphere.example") -@MapperScan(basePackages = "org.apache.shardingsphere.example.core.mybatis.repository") @SpringBootApplication public class ProxySpringBootStarterExample { @@ -46,20 +44,16 @@ public static void main(final String[] args) throws SQLException { } private static void process(final ConfigurableApplicationContext applicationContext) throws SQLException { - ExampleService exampleService = getExampleService(applicationContext); - exampleService.initEnvironment(); - exampleService.processSuccess(); + OrderService orderService = applicationContext.getBean(OrderService.class); + orderService.initEnvironment(); + orderService.processSuccess(); try { - exampleService.processFailure(); + orderService.processFailure(); } catch (final Exception ex) { System.out.println(ex.getMessage()); - exampleService.printData(); + orderService.printData(); } finally { - exampleService.cleanEnvironment(); + orderService.cleanEnvironment(); } } - - private static ExampleService getExampleService(final ConfigurableApplicationContext applicationContext) { - return applicationContext.getBean(ExampleService.class); - } } diff --git a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AddressEntity.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Address.java similarity index 60% rename from examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AddressEntity.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Address.java index c84e06a5c4a5d..b6ef6253c55ad 100644 --- a/examples/example-core/example-spring-jpa/src/main/java/org/apache/shardingsphere/example/core/jpa/entity/AddressEntity.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Address.java @@ -15,31 +15,31 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jpa.entity; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity; -import org.apache.shardingsphere.example.core.api.entity.Address; +import java.io.Serializable; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.Id; -import javax.persistence.Table; - -@Entity -@Table(name = "t_address") -public final class AddressEntity extends Address { +public class Address implements Serializable { + + private static final long serialVersionUID = 661434701950670670L; - private static final long serialVersionUID = 4743102234543827855L; + private Long addressId; + + private String addressName; - @Id - @Column(name = "address_id") - @Override public Long getAddressId() { - return super.getAddressId(); + return addressId; + } + + public void setAddressId(final Long addressId) { + this.addressId = addressId; } - @Column(name = "address_name") - @Override public String getAddressName() { - return super.getAddressName(); + return addressName; + } + + public void setAddressName(final String addressName) { + this.addressName = addressName; } } diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Account.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Order.java similarity index 65% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Account.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Order.java index 70bc19af4c978..5b8175b56326d 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Account.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/Order.java @@ -15,26 +15,28 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity; import java.io.Serializable; -public class Account implements Serializable { +public class Order implements Serializable { - private static final long serialVersionUID = -5889545274302226912L; + private static final long serialVersionUID = 661434701950670670L; - private long accountId; + private long orderId; private int userId; + private long addressId; + private String status; - public long getAccountId() { - return accountId; + public long getOrderId() { + return orderId; } - public void setAccountId(final long accountId) { - this.accountId = accountId; + public void setOrderId(final long orderId) { + this.orderId = orderId; } public int getUserId() { @@ -53,8 +55,16 @@ public void setStatus(final String status) { this.status = status; } + public long getAddressId() { + return addressId; + } + + public void setAddressId(final long addressId) { + this.addressId = addressId; + } + @Override public String toString() { - return String.format("account_id: %s, user_id: %s, status: %s", accountId, userId, status); + return String.format("order_id: %s, user_id: %s, address_id: %s, status: %s", orderId, userId, addressId, status); } } diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/User.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/OrderItem.java similarity index 60% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/User.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/OrderItem.java index 9ea69bc95057e..fed87a6b63669 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/User.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/entity/OrderItem.java @@ -15,46 +15,56 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity; import java.io.Serializable; -public class User implements Serializable { +public class OrderItem implements Serializable { private static final long serialVersionUID = 263434701950670170L; + private long orderItemId; + + private long orderId; + private int userId; - private String username; + private String status; - private String pwd; + public long getOrderItemId() { + return orderItemId; + } - public int getUserId() { - return userId; + public void setOrderItemId(final long orderItemId) { + this.orderItemId = orderItemId; } - public void setUserId(final int userId) { - this.userId = userId; + public long getOrderId() { + return orderId; } - public String getUsername() { - return username; + public void setOrderId(final long orderId) { + this.orderId = orderId; } - public void setUsername(final String username) { - this.username = username; + public int getUserId() { + return userId; + } + + public void setUserId(final int userId) { + this.userId = userId; } - public String getPwd() { - return pwd; + public String getStatus() { + return status; } - public void setPwd(final String pwd) { - this.pwd = pwd; + public void setStatus(final String status) { + this.status = status; } @Override public String toString() { - return String.format("user_id: %d, username: %s, pwd: %s", userId, username, pwd); + return String.format("order_item_id:%s, order_id: %s, user_id: %s, status: %s", orderItemId, orderId, userId, status); } } diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderItemRepositoryImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/AddressRepository.java similarity index 59% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderItemRepositoryImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/AddressRepository.java index f5c69166a525f..fc15d659589e3 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderItemRepositoryImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/AddressRepository.java @@ -15,23 +15,26 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.repository; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository; -import org.apache.shardingsphere.example.core.api.entity.OrderItem; +import org.apache.ibatis.annotations.Mapper; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Address; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.List; -public final class RangeOrderItemRepositoryImpl extends OrderItemRepositoryImpl { +@Mapper +public interface AddressRepository { - public RangeOrderItemRepositoryImpl(final DataSource dataSource) { - super(dataSource); - } + void createTableIfNotExists() throws SQLException; - @Override - public List selectAll() throws SQLException { - String sql = "SELECT i.* FROM t_order o, t_order_item i WHERE o.order_id = i.order_id AND o.user_id BETWEEN 1 AND 5"; - return getOrderItems(sql); - } + void dropTable() throws SQLException; + + void truncateTable() throws SQLException; + + void insert(Address address) throws SQLException; + + void delete(Long primaryKey) throws SQLException; + + List
selectAll() throws SQLException; } diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisAddressRepository.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderItemRepository.java similarity index 55% rename from examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisAddressRepository.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderItemRepository.java index 749a33eea5e42..09709248a84c5 100644 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisAddressRepository.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderItemRepository.java @@ -15,11 +15,27 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.mybatis.repository; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository; import org.apache.ibatis.annotations.Mapper; -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Address; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.OrderItem; + +import java.sql.SQLException; +import java.util.List; @Mapper -public interface MybatisAddressRepository extends AddressRepository { +public interface OrderItemRepository { + + void createTableIfNotExists() throws SQLException; + + void dropTable() throws SQLException; + + void truncateTable() throws SQLException; + + void insert(OrderItem orderItem) throws SQLException; + + void delete(Long primaryKey) throws SQLException; + + List
selectAll() throws SQLException; } diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderItemRepository.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderRepository.java similarity index 56% rename from examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderItemRepository.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderRepository.java index d070bd9cdd158..08528268ed2d4 100644 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/repository/MybatisOrderItemRepository.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/repository/OrderRepository.java @@ -15,11 +15,27 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.mybatis.repository; +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository; -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; import org.apache.ibatis.annotations.Mapper; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Address; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Order; + +import java.sql.SQLException; +import java.util.List; @Mapper -public interface MybatisOrderItemRepository extends OrderItemRepository { +public interface OrderRepository { + + void createTableIfNotExists() throws SQLException; + + void dropTable() throws SQLException; + + void truncateTable() throws SQLException; + + void insert(Order order) throws SQLException; + + void delete(Long primaryKey) throws SQLException; + + List
selectAll() throws SQLException; } diff --git a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/OrderServiceImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/service/OrderService.java similarity index 69% rename from examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/OrderServiceImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/service/OrderService.java index 6320eac6ef8bd..752270029a504 100644 --- a/examples/example-core/example-spring-mybatis/src/main/java/org/apache/shardingsphere/example/core/mybatis/service/OrderServiceImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/java/org/apache/shardingsphere/example/proxy/spring/boot/mybatis/service/OrderService.java @@ -1,30 +1,11 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +package org.apache.shardingsphere.example.proxy.spring.boot.mybatis.service; -package org.apache.shardingsphere.example.core.mybatis.service; - -import org.apache.shardingsphere.example.core.api.entity.Address; -import org.apache.shardingsphere.example.core.api.entity.Order; -import org.apache.shardingsphere.example.core.api.entity.OrderItem; -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.springframework.context.annotation.Primary; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Address; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.Order; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.entity.OrderItem; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository.AddressRepository; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository.OrderItemRepository; +import org.apache.shardingsphere.example.proxy.spring.boot.mybatis.repository.OrderRepository; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -34,8 +15,7 @@ import java.util.List; @Service -@Primary -public class OrderServiceImpl implements ExampleService { +public class OrderService { @Resource private OrderRepository orderRepository; @@ -45,8 +25,7 @@ public class OrderServiceImpl implements ExampleService { @Resource private AddressRepository addressRepository; - - @Override + public void initEnvironment() throws SQLException { orderRepository.createTableIfNotExists(); orderItemRepository.createTableIfNotExists(); @@ -65,14 +44,13 @@ private void initAddressTable() throws SQLException { addressRepository.insert(entity); } } - - @Override + public void cleanEnvironment() throws SQLException { orderRepository.dropTable(); orderItemRepository.dropTable(); + addressRepository.dropTable(); } - @Override @Transactional public void processSuccess() throws SQLException { System.out.println("-------------- Process Success Begin ---------------"); @@ -83,7 +61,6 @@ public void processSuccess() throws SQLException { System.out.println("-------------- Process Success Finish --------------"); } - @Override @Transactional public void processFailure() throws SQLException { System.out.println("-------------- Process Failure Begin ---------------"); @@ -91,7 +68,7 @@ public void processFailure() throws SQLException { System.out.println("-------------- Process Failure Finish --------------"); throw new RuntimeException("Exception occur for transaction test."); } - + private List insertData() throws SQLException { System.out.println("---------------------------- Insert Data ----------------------------"); List result = new ArrayList<>(10); @@ -110,7 +87,7 @@ private List insertData() throws SQLException { } return result; } - + private void deleteData(final List orderIds) throws SQLException { System.out.println("---------------------------- Delete Data ----------------------------"); for (Long each : orderIds) { @@ -119,7 +96,6 @@ private void deleteData(final List orderIds) throws SQLException { } } - @Override public void printData() throws SQLException { System.out.println("---------------------------- Print Order Data -----------------------"); for (Object each : orderRepository.selectAll()) { @@ -130,4 +106,4 @@ public void printData() throws SQLException { System.out.println(each); } } -} +} \ No newline at end of file diff --git a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/AddressMapper.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/AddressMapper.xml similarity index 92% rename from examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/AddressMapper.xml rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/AddressMapper.xml index dbb2dc1c662f3..e08a4c9f87023 100644 --- a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/AddressMapper.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/AddressMapper.xml @@ -17,8 +17,8 @@ --> - - + + diff --git a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderItemMapper.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderItemMapper.xml similarity index 93% rename from examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderItemMapper.xml rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderItemMapper.xml index c96a233b9ec84..23890722aca7e 100644 --- a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderItemMapper.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderItemMapper.xml @@ -17,8 +17,8 @@ --> - - + + diff --git a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderMapper.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderMapper.xml similarity index 93% rename from examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderMapper.xml rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderMapper.xml index 2f8aba8b2c339..df15e5c820f44 100644 --- a/examples/example-core/example-spring-mybatis/src/main/resources/META-INF/mappers/OrderMapper.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/META-INF/mappers/OrderMapper.xml @@ -17,8 +17,8 @@ --> - - + + diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/conf/server.yaml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/conf/server.yaml index f0c06271b7098..852d0d178d085 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/conf/server.yaml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-boot-mybatis-example/src/main/resources/conf/server.yaml @@ -16,11 +16,10 @@ # ###################################################################################################### -# +# # If you want to configure governance, authorization and proxy properties, please refer to this file. -# +# ###################################################################################################### -# #mode: # type: Cluster diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/pom.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/pom.xml index 748fab8b64e4f..f50b974abf9df 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/pom.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/pom.xml @@ -29,12 +29,6 @@ ${project.artifactId} - - org.apache.shardingsphere.example - example-raw-jdbc - ${project.parent.version} - - org.apache.shardingsphere shardingsphere-jdbc-core diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/DistSQLFeatureExample.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/DistSQLFeatureExample.java index 55688a8b48ab6..ceaed8d86aecf 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/DistSQLFeatureExample.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/DistSQLFeatureExample.java @@ -42,12 +42,14 @@ private static void execute(final DataSource dataSource) { DistSQLExecutor featureExecutor = selectFeature().getExecutor(); featureExecutor.init(statement); featureExecutor.execute(); - } catch (Exception e) { - log.error(e.getMessage()); + // CHECKSTYLE:OFF + } catch (final Exception ex) { + // CHECKSTYLE:ON + log.error(ex.getMessage()); } } - private static FeatureType selectFeature(){ + private static FeatureType selectFeature() { // return FeatureType.RESOURCE; // return FeatureType.SHADOW; // return FeatureType.ENCRYPT; diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/encrypt/EncryptExecutor.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/encrypt/EncryptExecutor.java index 6bb35bc232d1c..0f663da4bc0a8 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/encrypt/EncryptExecutor.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/encrypt/EncryptExecutor.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.example.proxy.distsql.feature.encrypt; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.example.proxy.distsql.feature.AbstractFeatureExecutor; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.sql.ResultSet; import java.sql.SQLException; @@ -58,7 +58,7 @@ public void init(Statement statement) { } @Override - public void execute() throws SQLException, InterruptedException { + public void execute() throws SQLException { executeUseSchema(); executeShowRule(); executeAddRule(); @@ -72,7 +72,7 @@ public void execute() throws SQLException, InterruptedException { private void executeShowRule() throws SQLException { log.info("show rule..."); ResultSet resultSet = statement.executeQuery(SHOW_RULE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeAddRule() throws SQLException { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/readwritesplitting/ReadWriteSplittingExecutor.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/readwritesplitting/ReadWriteSplittingExecutor.java index 65b65123a50b6..44f39b807731e 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/readwritesplitting/ReadWriteSplittingExecutor.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/readwritesplitting/ReadWriteSplittingExecutor.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.example.proxy.distsql.feature.readwritesplitting; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.example.proxy.distsql.feature.AbstractFeatureExecutor; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.sql.ResultSet; import java.sql.SQLException; @@ -53,7 +53,7 @@ public void init(Statement statement) { } @Override - public void execute() throws SQLException, InterruptedException { + public void execute() throws SQLException { executeUseSchema(); executeShowRule(); executeAddRule(); @@ -67,7 +67,7 @@ public void execute() throws SQLException, InterruptedException { private void executeShowRule() throws SQLException { log.info("show rule..."); ResultSet resultSet = statement.executeQuery(SHOW_RULE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeAddRule() throws SQLException { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/resource/ResourceExecutor.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/resource/ResourceExecutor.java index 7cfa763c27b3a..38cac8f26356f 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/resource/ResourceExecutor.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/resource/ResourceExecutor.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.example.proxy.distsql.feature.resource; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.example.proxy.distsql.feature.AbstractFeatureExecutor; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.sql.ResultSet; import java.sql.SQLException; @@ -61,7 +61,7 @@ public void init(Statement statement) { } @Override - public void execute() throws SQLException, InterruptedException { + public void execute() throws SQLException { executeUseSchema(); executeShowResources(); executeAddResource(); @@ -75,7 +75,7 @@ public void execute() throws SQLException, InterruptedException { private void executeShowResources() throws SQLException { log.info("show schema resources..."); ResultSet resultSet = statement.executeQuery(SHOW_RESOURCE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeAddResource() throws SQLException { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/shadow/ShadowExecutor.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/shadow/ShadowExecutor.java index 8b4d0fd67b97b..f501717f1ab19 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/shadow/ShadowExecutor.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/shadow/ShadowExecutor.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.example.proxy.distsql.feature.shadow; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.example.proxy.distsql.feature.AbstractFeatureExecutor; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.sql.ResultSet; import java.sql.SQLException; @@ -59,7 +59,7 @@ public void init(Statement statement) { } @Override - public void execute() throws SQLException, InterruptedException { + public void execute() throws SQLException { executeUseSchema(); executeShowRule(); executeAddRule(); @@ -80,19 +80,19 @@ public void execute() throws SQLException, InterruptedException { private void executeShowRule() throws SQLException { log.info("show rule..."); ResultSet resultSet = statement.executeQuery(SHOW_RULE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeShowTableRule() throws SQLException { log.info("show table rule..."); ResultSet resultSet = statement.executeQuery(SHOW_TABLE_RULE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeShowAlgorithm() throws SQLException { log.info("show algorithm..."); ResultSet resultSet = statement.executeQuery(SHOW_ALGORITHM); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeAddRule() throws SQLException { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/sharding/ShardingExecutor.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/sharding/ShardingExecutor.java index ef09dec8cb8e0..0fa6284fd4d3a 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/sharding/ShardingExecutor.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-distsql-example/src/main/java/org/apache/shardingsphere/example/proxy/distsql/feature/sharding/ShardingExecutor.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.example.proxy.distsql.feature.sharding; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.example.proxy.distsql.feature.AbstractFeatureExecutor; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.sql.ResultSet; import java.sql.SQLException; @@ -63,7 +63,7 @@ public void init(Statement statement) { } @Override - public void execute() throws SQLException, InterruptedException { + public void execute() throws SQLException { executeUseSchema(); executeShowRule(); executeAddRule(); @@ -84,13 +84,13 @@ public void execute() throws SQLException, InterruptedException { private void executeShowRule() throws SQLException { log.info("show rule..."); ResultSet resultSet = statement.executeQuery(SHOW_RULE); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeShowAlgorithm() throws SQLException { log.info("show algorithm..."); ResultSet resultSet = statement.executeQuery(SHOW_ALGORITHM); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeAddRule() throws SQLException { @@ -116,7 +116,7 @@ private void executeDropAlgorithm() throws SQLException { private void executeShowKeyGenerators() throws SQLException { log.info("show sharding key generators..."); ResultSet resultSet = statement.executeQuery(SHOW_KEY_GENERATORS); - log.info(new Gson().toJson(getResultData(resultSet))); + log.info(JsonUtils.toJsonString(getResultData(resultSet))); } private void executeDropKeyGenerator() throws SQLException { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/pom.xml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/pom.xml index df27c175a44fa..8a98097c709bf 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/pom.xml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/pom.xml @@ -29,12 +29,6 @@ ${project.artifactId} - - org.apache.shardingsphere.example - example-raw-jdbc - ${project.parent.version} - - org.apache.shardingsphere shardingsphere-jdbc-core diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/ProxyHintExample.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/ProxyHintExample.java index 0e31206e75666..38b97ab8c8242 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/ProxyHintExample.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/ProxyHintExample.java @@ -17,9 +17,8 @@ package org.apache.shardingsphere.example.proxy.hint; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jdbc.service.OrderServiceImpl; import org.apache.shardingsphere.example.proxy.hint.factory.YamlDataSourceFactory; +import org.apache.shardingsphere.example.proxy.hint.service.OrderService; import javax.sql.DataSource; import java.io.File; @@ -33,10 +32,10 @@ public final class ProxyHintExample { public static void main(final String[] args) throws SQLException, IOException { DataSource dataSource = getDataSource(); - ExampleService exampleService = getExampleService(dataSource); - exampleService.initEnvironment(); + OrderService orderService = new OrderService(dataSource); + orderService.initEnvironment(); processWithHintValue(dataSource); - exampleService.cleanEnvironment(); + orderService.cleanEnvironment(); } private static DataSource getDataSource() throws IOException { @@ -47,10 +46,6 @@ private static File getFile(final String configFile) { return new File(ProxyHintExample.class.getResource(configFile).getFile()); } - private static ExampleService getExampleService(final DataSource dataSource) { - return new OrderServiceImpl(dataSource); - } - private static void processWithHintValue(final DataSource dataSource) throws SQLException { try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Address.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Address.java similarity index 95% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Address.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Address.java index fca3e5fdc24e6..43c659e452a21 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Address.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Address.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.example.proxy.hint.entity; import java.io.Serializable; diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Order.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Order.java similarity index 96% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Order.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Order.java index f171ca33a7ad3..f43f40375a8af 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/Order.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/Order.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.example.proxy.hint.entity; import java.io.Serializable; diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderItem.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/OrderItem.java similarity index 96% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderItem.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/OrderItem.java index ea21cfc7cbf82..1703b867b7cf9 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderItem.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/entity/OrderItem.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.example.proxy.hint.entity; import java.io.Serializable; diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AddressRepositoryImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/AddressRepository.java similarity index 90% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AddressRepositoryImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/AddressRepository.java index f35b7c384270a..f383dcf44f56f 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/AddressRepositoryImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/AddressRepository.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.repository; +package org.apache.shardingsphere.example.proxy.hint.repository; -import org.apache.shardingsphere.example.core.api.entity.Address; -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; +import org.apache.shardingsphere.example.proxy.hint.entity.Address; import javax.sql.DataSource; import java.sql.Connection; @@ -29,15 +28,14 @@ import java.util.LinkedList; import java.util.List; -public final class AddressRepositoryImpl implements AddressRepository { +public final class AddressRepository { private final DataSource dataSource; - public AddressRepositoryImpl(final DataSource dataSource) { + public AddressRepository(final DataSource dataSource) { this.dataSource = dataSource; } - @Override public void createTableIfNotExists() throws SQLException { String sql = "CREATE TABLE IF NOT EXISTS t_address " + "(address_id BIGINT NOT NULL, address_name VARCHAR(100) NOT NULL, PRIMARY KEY (address_id))"; @@ -47,7 +45,6 @@ public void createTableIfNotExists() throws SQLException { } } - @Override public void dropTable() throws SQLException { String sql = "DROP TABLE t_address"; try (Connection connection = dataSource.getConnection(); @@ -56,7 +53,6 @@ public void dropTable() throws SQLException { } } - @Override public void truncateTable() throws SQLException { String sql = "TRUNCATE TABLE t_address"; try (Connection connection = dataSource.getConnection(); @@ -65,7 +61,6 @@ public void truncateTable() throws SQLException { } } - @Override public Long insert(final Address entity) throws SQLException { String sql = "INSERT INTO t_address (address_id, address_name) VALUES (?, ?)"; try (Connection connection = dataSource.getConnection(); @@ -77,7 +72,6 @@ public Long insert(final Address entity) throws SQLException { return entity.getAddressId(); } - @Override public void delete(final Long primaryKey) throws SQLException { String sql = "DELETE FROM t_address WHERE address_id=?"; try (Connection connection = dataSource.getConnection(); @@ -87,7 +81,6 @@ public void delete(final Long primaryKey) throws SQLException { } } - @Override public List
selectAll() throws SQLException { String sql = "SELECT * FROM t_address"; return getAddress(sql); diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderItemRepositoryImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderItemRepository.java similarity index 91% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderItemRepositoryImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderItemRepository.java index a4c42713cba84..4df3f2d7698d7 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderItemRepositoryImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderItemRepository.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.repository; +package org.apache.shardingsphere.example.proxy.hint.repository; -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; -import org.apache.shardingsphere.example.core.api.entity.OrderItem; + +import org.apache.shardingsphere.example.proxy.hint.entity.OrderItem; import javax.sql.DataSource; import java.sql.Connection; @@ -29,15 +29,14 @@ import java.util.LinkedList; import java.util.List; -public class OrderItemRepositoryImpl implements OrderItemRepository { +public class OrderItemRepository { private final DataSource dataSource; - public OrderItemRepositoryImpl(final DataSource dataSource) { + public OrderItemRepository(final DataSource dataSource) { this.dataSource = dataSource; } - @Override public void createTableIfNotExists() throws SQLException { String sql = "CREATE TABLE IF NOT EXISTS t_order_item " + "(order_item_id BIGINT NOT NULL AUTO_INCREMENT, order_id BIGINT NOT NULL, user_id INT NOT NULL, status VARCHAR(50), PRIMARY KEY (order_item_id))"; @@ -47,7 +46,6 @@ public void createTableIfNotExists() throws SQLException { } } - @Override public void dropTable() throws SQLException { String sql = "DROP TABLE t_order_item"; try (Connection connection = dataSource.getConnection(); @@ -56,7 +54,6 @@ public void dropTable() throws SQLException { } } - @Override public void truncateTable() throws SQLException { String sql = "TRUNCATE TABLE t_order_item"; try (Connection connection = dataSource.getConnection(); @@ -65,7 +62,6 @@ public void truncateTable() throws SQLException { } } - @Override public Long insert(final OrderItem orderItem) throws SQLException { String sql = "INSERT INTO t_order_item (order_id, user_id, status) VALUES (?, ?, ?)"; try (Connection connection = dataSource.getConnection(); @@ -83,7 +79,6 @@ public Long insert(final OrderItem orderItem) throws SQLException { return orderItem.getOrderItemId(); } - @Override public void delete(final Long orderItemId) throws SQLException { String sql = "DELETE FROM t_order_item WHERE order_id=?"; try (Connection connection = dataSource.getConnection(); @@ -93,7 +88,6 @@ public void delete(final Long orderItemId) throws SQLException { } } - @Override public List selectAll() throws SQLException { // TODO Associated query with encrypt may query and decrypt failed. see https://github.com/apache/shardingsphere/issues/3352 // String sql = "SELECT i.* FROM t_order o, t_order_item i WHERE o.order_id = i.order_id"; diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderRepositoryImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderRepository.java similarity index 91% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderRepositoryImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderRepository.java index aa4ea5263aad8..96e2300b6c69b 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/OrderRepositoryImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/repository/OrderRepository.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.repository; +package org.apache.shardingsphere.example.proxy.hint.repository; -import org.apache.shardingsphere.example.core.api.entity.Order; -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; + +import org.apache.shardingsphere.example.proxy.hint.entity.Order; import javax.sql.DataSource; import java.sql.Connection; @@ -29,15 +29,14 @@ import java.util.LinkedList; import java.util.List; -public class OrderRepositoryImpl implements OrderRepository { +public class OrderRepository { private final DataSource dataSource; - public OrderRepositoryImpl(final DataSource dataSource) { + public OrderRepository(final DataSource dataSource) { this.dataSource = dataSource; } - @Override public void createTableIfNotExists() throws SQLException { String sql = "CREATE TABLE IF NOT EXISTS t_order (order_id BIGINT NOT NULL AUTO_INCREMENT, user_id INT NOT NULL, address_id BIGINT NOT NULL, status VARCHAR(50), PRIMARY KEY (order_id))"; try (Connection connection = dataSource.getConnection(); @@ -46,7 +45,6 @@ public void createTableIfNotExists() throws SQLException { } } - @Override public void dropTable() throws SQLException { String sql = "DROP TABLE t_order"; try (Connection connection = dataSource.getConnection(); @@ -55,7 +53,6 @@ public void dropTable() throws SQLException { } } - @Override public void truncateTable() throws SQLException { String sql = "TRUNCATE TABLE t_order"; try (Connection connection = dataSource.getConnection(); @@ -64,7 +61,6 @@ public void truncateTable() throws SQLException { } } - @Override public Long insert(final Order order) throws SQLException { String sql = "INSERT INTO t_order (user_id, address_id, status) VALUES (?, ?, ?)"; try (Connection connection = dataSource.getConnection(); @@ -82,7 +78,6 @@ public Long insert(final Order order) throws SQLException { return order.getOrderId(); } - @Override public void delete(final Long orderId) throws SQLException { String sql = "DELETE FROM t_order WHERE order_id=?"; try (Connection connection = dataSource.getConnection(); @@ -92,7 +87,6 @@ public void delete(final Long orderId) throws SQLException { } } - @Override public List selectAll() throws SQLException { String sql = "SELECT * FROM t_order"; return getOrders(sql); diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderServiceImpl.java b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/service/OrderService.java similarity index 77% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderServiceImpl.java rename to examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/service/OrderService.java index 928b09d9197f8..c0743209a297f 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/service/OrderServiceImpl.java +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/java/org/apache/shardingsphere/example/proxy/hint/service/OrderService.java @@ -15,25 +15,22 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.service; +package org.apache.shardingsphere.example.proxy.hint.service; -import org.apache.shardingsphere.example.core.api.entity.Address; -import org.apache.shardingsphere.example.core.api.entity.Order; -import org.apache.shardingsphere.example.core.api.entity.OrderItem; -import org.apache.shardingsphere.example.core.api.repository.AddressRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderItemRepository; -import org.apache.shardingsphere.example.core.api.repository.OrderRepository; -import org.apache.shardingsphere.example.core.api.service.ExampleService; -import org.apache.shardingsphere.example.core.jdbc.repository.AddressRepositoryImpl; -import org.apache.shardingsphere.example.core.jdbc.repository.OrderItemRepositoryImpl; -import org.apache.shardingsphere.example.core.jdbc.repository.OrderRepositoryImpl; + +import org.apache.shardingsphere.example.proxy.hint.entity.Address; +import org.apache.shardingsphere.example.proxy.hint.entity.Order; +import org.apache.shardingsphere.example.proxy.hint.entity.OrderItem; +import org.apache.shardingsphere.example.proxy.hint.repository.AddressRepository; +import org.apache.shardingsphere.example.proxy.hint.repository.OrderItemRepository; +import org.apache.shardingsphere.example.proxy.hint.repository.OrderRepository; import javax.sql.DataSource; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; -public final class OrderServiceImpl implements ExampleService { +public final class OrderService { private final OrderRepository orderRepository; @@ -41,19 +38,18 @@ public final class OrderServiceImpl implements ExampleService { private final AddressRepository addressRepository; - public OrderServiceImpl(final DataSource dataSource) { - orderRepository = new OrderRepositoryImpl(dataSource); - orderItemRepository = new OrderItemRepositoryImpl(dataSource); - addressRepository = new AddressRepositoryImpl(dataSource); + public OrderService(final DataSource dataSource) { + orderRepository = new OrderRepository(dataSource); + orderItemRepository = new OrderItemRepository(dataSource); + addressRepository = new AddressRepository(dataSource); } - public OrderServiceImpl(final OrderRepository orderRepository, final OrderItemRepository orderItemRepository, final AddressRepository addressRepository) { + public OrderService(final OrderRepository orderRepository, final OrderItemRepository orderItemRepository, final AddressRepository addressRepository) { this.orderRepository = orderRepository; this.orderItemRepository = orderItemRepository; this.addressRepository = addressRepository; } - @Override public void initEnvironment() throws SQLException { orderRepository.createTableIfNotExists(); orderItemRepository.createTableIfNotExists(); @@ -81,14 +77,12 @@ private void insertAddress(final int i) throws SQLException { addressRepository.insert(address); } - @Override public void cleanEnvironment() throws SQLException { orderRepository.dropTable(); orderItemRepository.dropTable(); addressRepository.dropTable(); } - @Override public void processSuccess() throws SQLException { System.out.println("-------------- Process Success Begin ---------------"); List orderIds = insertData(); @@ -98,7 +92,6 @@ public void processSuccess() throws SQLException { System.out.println("-------------- Process Success Finish --------------"); } - @Override public void processFailure() throws SQLException { System.out.println("-------------- Process Failure Begin ---------------"); insertData(); @@ -142,7 +135,6 @@ private void deleteData(final List orderIds) throws SQLException { } } - @Override public void printData() throws SQLException { System.out.println("---------------------------- Print Order Data -----------------------"); for (Object each : orderRepository.selectAll()) { diff --git a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/resources/conf/server.yaml b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/resources/conf/server.yaml index c11c26266ac34..c6645621f43b8 100644 --- a/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/resources/conf/server.yaml +++ b/examples/shardingsphere-proxy-example/shardingsphere-proxy-hint-example/src/main/resources/conf/server.yaml @@ -16,11 +16,10 @@ # ###################################################################################################### -# +# # If you want to configure governance, authorization and proxy properties, please refer to this file. -# +# ###################################################################################################### -# #mode: # type: Cluster diff --git a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/route/BroadcastSQLRouter.java b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/route/BroadcastSQLRouter.java index 236c90d1a4725..eb67e3b8aa529 100644 --- a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/route/BroadcastSQLRouter.java +++ b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/route/BroadcastSQLRouter.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.ddl.CloseStatementContext; import org.apache.shardingsphere.infra.binder.context.type.CursorAvailable; +import org.apache.shardingsphere.infra.binder.context.type.IndexAvailable; import org.apache.shardingsphere.infra.binder.context.type.TableAvailable; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; @@ -94,6 +95,9 @@ private void decorateRouteContextWhenDDLStatement(final RouteContext routeContex } return; } + if (sqlStatementContext instanceof IndexAvailable && !routeContext.getRouteUnits().isEmpty()) { + putAllBroadcastTables(routeContext, broadcastRule, sqlStatementContext); + } SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); boolean functionStatement = sqlStatement instanceof CreateFunctionStatement || sqlStatement instanceof AlterFunctionStatement || sqlStatement instanceof DropFunctionStatement; boolean procedureStatement = sqlStatement instanceof CreateProcedureStatement || sqlStatement instanceof AlterProcedureStatement || sqlStatement instanceof DropProcedureStatement; @@ -115,6 +119,15 @@ private void decorateRouteContextWhenDDLStatement(final RouteContext routeContex } } + private static void putAllBroadcastTables(final RouteContext routeContext, final BroadcastRule broadcastRule, final SQLStatementContext sqlStatementContext) { + Collection tableNames = sqlStatementContext.getTablesContext().getTableNames(); + for (String each : broadcastRule.getBroadcastRuleTableNames(tableNames)) { + for (RouteUnit routeUnit : routeContext.getRouteUnits()) { + routeUnit.getTableMappers().add(new RouteMapper(each, each)); + } + } + } + private static boolean isResourceGroupStatement(final SQLStatement sqlStatement) { // TODO add dropResourceGroupStatement, alterResourceGroupStatement return sqlStatement instanceof MySQLCreateResourceGroupStatement || sqlStatement instanceof MySQLSetResourceGroupStatement; diff --git a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/rule/BroadcastRule.java b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/rule/BroadcastRule.java index a6ae174d30008..58892c0ac2998 100644 --- a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/rule/BroadcastRule.java +++ b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/rule/BroadcastRule.java @@ -53,11 +53,11 @@ public final class BroadcastRule implements DatabaseRule, DataNodeContainedRule, private final TableNamesMapper logicalTableMapper; - public BroadcastRule(final BroadcastRuleConfiguration configuration, final String databaseName, final Map dataSources) { - this.configuration = configuration; + public BroadcastRule(final BroadcastRuleConfiguration config, final String databaseName, final Map dataSources) { + configuration = config; this.databaseName = databaseName; dataSourceNames = getDataSourceNames(dataSources); - tables = createBroadcastTables(configuration.getTables()); + tables = createBroadcastTables(config.getTables()); logicalTableMapper = createTableMapper(); tableDataNodes = createShardingTableDataNodes(dataSourceNames, tables); } @@ -94,11 +94,6 @@ private Collection generateDataNodes(final String logicTable, final Co return result; } - @Override - public String getType() { - return BroadcastRule.class.getSimpleName(); - } - @Override public Map> getAllDataNodes() { return tableDataNodes; diff --git a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/yaml/swapper/NewYamlBroadcastRuleConfigurationSwapper.java b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/yaml/swapper/NewYamlBroadcastRuleConfigurationSwapper.java index ca5a5586b577f..a16980009c2e0 100644 --- a/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/yaml/swapper/NewYamlBroadcastRuleConfigurationSwapper.java +++ b/features/broadcast/core/src/main/java/org/apache/shardingsphere/broadcast/yaml/swapper/NewYamlBroadcastRuleConfigurationSwapper.java @@ -45,9 +45,9 @@ public Collection swapToDataNodes(final BroadcastRuleConfiguration if (data.getTables().isEmpty()) { return Collections.emptyList(); } - YamlBroadcastRuleConfiguration yamlBroadcastRuleConfiguration = new YamlBroadcastRuleConfiguration(); - yamlBroadcastRuleConfiguration.getTables().addAll(data.getTables()); - return Collections.singleton(new YamlDataNode(BroadcastRuleNodePathProvider.TABLES, YamlEngine.marshal(yamlBroadcastRuleConfiguration))); + YamlBroadcastRuleConfiguration yamlBroadcastRuleConfig = new YamlBroadcastRuleConfiguration(); + yamlBroadcastRuleConfig.getTables().addAll(data.getTables()); + return Collections.singleton(new YamlDataNode(BroadcastRuleNodePathProvider.TABLES, YamlEngine.marshal(yamlBroadcastRuleConfig))); } @Override @@ -55,8 +55,8 @@ public Optional swapToObject(final Collection validDataNodes = dataNodes.stream().filter(each -> broadcastRuleNodePath.getRoot().isValidatedPath(each.getKey())).collect(Collectors.toList()); for (YamlDataNode each : validDataNodes) { if (broadcastRuleNodePath.getRoot().isValidatedPath(each.getKey())) { - YamlBroadcastRuleConfiguration yamlBroadcastRuleConfiguration = YamlEngine.unmarshal(each.getValue(), YamlBroadcastRuleConfiguration.class); - return Optional.of(new BroadcastRuleConfiguration(yamlBroadcastRuleConfiguration.getTables())); + YamlBroadcastRuleConfiguration yamlBroadcastRuleConfig = YamlEngine.unmarshal(each.getValue(), YamlBroadcastRuleConfiguration.class); + return Optional.of(new BroadcastRuleConfiguration(yamlBroadcastRuleConfig.getTables())); } } return Optional.empty(); diff --git a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/CountBroadcastRuleExecutorTest.java b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/CountBroadcastRuleExecutorTest.java index 089f24b740aa6..6bb51e9def4d3 100644 --- a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/CountBroadcastRuleExecutorTest.java +++ b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/CountBroadcastRuleExecutorTest.java @@ -68,9 +68,9 @@ private ShardingSphereDatabase mockDatabase() { private BroadcastRule mockBroadcastRule() { BroadcastRule result = mock(BroadcastRule.class); - BroadcastRuleConfiguration configuration = mock(BroadcastRuleConfiguration.class); - when(configuration.getTables()).thenReturn(Collections.singleton("t_address")); - when(result.getConfiguration()).thenReturn(configuration); + BroadcastRuleConfiguration config = mock(BroadcastRuleConfiguration.class); + when(config.getTables()).thenReturn(Collections.singleton("t_address")); + when(result.getConfiguration()).thenReturn(config); return result; } } diff --git a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/ShowBroadcastTableRuleExecutorTest.java b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/ShowBroadcastTableRuleExecutorTest.java index 4a53c0d2f06d0..139e9ff1ca194 100644 --- a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/ShowBroadcastTableRuleExecutorTest.java +++ b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/query/ShowBroadcastTableRuleExecutorTest.java @@ -75,9 +75,9 @@ private ShardingSphereDatabase mockDatabase() { private BroadcastRule mockBroadcastRule() { BroadcastRule result = mock(BroadcastRule.class); - BroadcastRuleConfiguration configuration = mock(BroadcastRuleConfiguration.class); - when(configuration.getTables()).thenReturn(Collections.singleton("t_address")); - when(result.getConfiguration()).thenReturn(configuration); + BroadcastRuleConfiguration config = mock(BroadcastRuleConfiguration.class); + when(config.getTables()).thenReturn(Collections.singleton("t_address")); + when(result.getConfiguration()).thenReturn(config); return result; } } diff --git a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/CreateBroadcastTableRuleStatementUpdaterTest.java b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/CreateBroadcastTableRuleStatementUpdaterTest.java index 6bac1416f0aff..cf8b6b322d178 100644 --- a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/CreateBroadcastTableRuleStatementUpdaterTest.java +++ b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/CreateBroadcastTableRuleStatementUpdaterTest.java @@ -38,10 +38,10 @@ class CreateBroadcastTableRuleStatementUpdaterTest { @Test void assertCheckSQLStatementWithDuplicateBroadcastRule() { - BroadcastRuleConfiguration currentConfiguration = mock(BroadcastRuleConfiguration.class); - when(currentConfiguration.getTables()).thenReturn(Collections.singleton("t_address")); + BroadcastRuleConfiguration currentConfig = mock(BroadcastRuleConfiguration.class); + when(currentConfig.getTables()).thenReturn(Collections.singleton("t_address")); CreateBroadcastTableRuleStatement statement = new CreateBroadcastTableRuleStatement(false, Collections.singleton("t_address")); - assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(mock(ShardingSphereDatabase.class), statement, currentConfiguration)); + assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(mock(ShardingSphereDatabase.class), statement, currentConfig)); } @Test diff --git a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/DropBroadcastTableRuleStatementUpdaterTest.java b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/DropBroadcastTableRuleStatementUpdaterTest.java index fc228d289e11c..474ebc62a311d 100644 --- a/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/DropBroadcastTableRuleStatementUpdaterTest.java +++ b/features/broadcast/distsql/handler/src/test/java/org/apache/shardingsphere/broadcast/distsql/handler/update/DropBroadcastTableRuleStatementUpdaterTest.java @@ -59,10 +59,10 @@ void assertCheckSQLStatementWithoutToBeDroppedRule() { @Test void assertUpdateCurrentRuleConfiguration() { - BroadcastRuleConfiguration configuration = new BroadcastRuleConfiguration(new LinkedList<>()); - configuration.getTables().add("t_address"); + BroadcastRuleConfiguration config = new BroadcastRuleConfiguration(new LinkedList<>()); + config.getTables().add("t_address"); DropBroadcastTableRuleStatement statement = new DropBroadcastTableRuleStatement(false, Collections.singleton("t_address")); - assertTrue(updater.updateCurrentRuleConfiguration(statement, configuration)); - assertTrue(configuration.getTables().isEmpty()); + assertTrue(updater.updateCurrentRuleConfiguration(statement, config)); + assertTrue(config.getTables().isEmpty()); } } diff --git a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/assisted/AssistedEncryptAlgorithm.java b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/assisted/AssistedEncryptAlgorithm.java index 7fc39fa4c8512..17f57b1c40f03 100644 --- a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/assisted/AssistedEncryptAlgorithm.java +++ b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/assisted/AssistedEncryptAlgorithm.java @@ -21,9 +21,6 @@ /** * Assisted encrypt algorithm. - * - * @param type of plain value - * @param type of cipher value */ -public interface AssistedEncryptAlgorithm extends EncryptAlgorithm { +public interface AssistedEncryptAlgorithm extends EncryptAlgorithm { } diff --git a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/like/LikeEncryptAlgorithm.java b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/like/LikeEncryptAlgorithm.java index d79902da88409..a534a530ab7ae 100644 --- a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/like/LikeEncryptAlgorithm.java +++ b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/like/LikeEncryptAlgorithm.java @@ -21,9 +21,6 @@ /** * Like encrypt algorithm. - * - * @param type of plain value - * @param type of cipher value */ -public interface LikeEncryptAlgorithm extends EncryptAlgorithm { +public interface LikeEncryptAlgorithm extends EncryptAlgorithm { } diff --git a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/standard/StandardEncryptAlgorithm.java b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/standard/StandardEncryptAlgorithm.java index 7b2de2ed64e1c..69c5102592aad 100644 --- a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/standard/StandardEncryptAlgorithm.java +++ b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/api/encrypt/standard/StandardEncryptAlgorithm.java @@ -22,11 +22,8 @@ /** * Standard encrypt algorithm. - * - * @param type of plain value - * @param type of cipher value */ -public interface StandardEncryptAlgorithm extends EncryptAlgorithm { +public interface StandardEncryptAlgorithm extends EncryptAlgorithm { /** * Decrypt. @@ -35,5 +32,5 @@ public interface StandardEncryptAlgorithm extends EncryptAlgorithm { * @param encryptContext encrypt context * @return plain value */ - I decrypt(O cipherValue, EncryptContext encryptContext); + Object decrypt(Object cipherValue, EncryptContext encryptContext); } diff --git a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/spi/EncryptAlgorithm.java b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/spi/EncryptAlgorithm.java index b7611f40fbf4d..27839f7e09c2e 100644 --- a/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/spi/EncryptAlgorithm.java +++ b/features/encrypt/api/src/main/java/org/apache/shardingsphere/encrypt/spi/EncryptAlgorithm.java @@ -22,11 +22,8 @@ /** * Encrypt algorithm. - * - * @param type of plain value - * @param type of cipher value */ -public interface EncryptAlgorithm extends ShardingSphereAlgorithm { +public interface EncryptAlgorithm extends ShardingSphereAlgorithm { /** * Encrypt. @@ -35,5 +32,5 @@ public interface EncryptAlgorithm extends ShardingSphereAlgorithm { * @param encryptContext encrypt context * @return cipher value */ - O encrypt(I plainValue, EncryptContext encryptContext); + Object encrypt(Object plainValue, EncryptContext encryptContext); } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithm.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithm.java index 78dc9ff457a4c..11f43bb3c0e7d 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithm.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithm.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.encrypt.algorithm.assisted; +import lombok.EqualsAndHashCode; import org.apache.commons.codec.digest.DigestUtils; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; @@ -26,7 +27,8 @@ /** * MD5 assisted encrypt algorithm. */ -public final class MD5AssistedEncryptAlgorithm implements AssistedEncryptAlgorithm { +@EqualsAndHashCode +public final class MD5AssistedEncryptAlgorithm implements AssistedEncryptAlgorithm { private static final String SALT_KEY = "salt"; diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java index 8e0d45b62e42f..4b64d85569554 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithm.java @@ -18,10 +18,11 @@ package org.apache.shardingsphere.encrypt.algorithm.like; import com.google.common.base.Strings; +import lombok.EqualsAndHashCode; import lombok.SneakyThrows; +import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import java.io.IOException; import java.io.InputStream; @@ -35,7 +36,8 @@ /** * Char digest like encrypt algorithm. */ -public final class CharDigestLikeEncryptAlgorithm implements LikeEncryptAlgorithm { +@EqualsAndHashCode +public final class CharDigestLikeEncryptAlgorithm implements LikeEncryptAlgorithm { private static final String DELTA_KEY = "delta"; @@ -104,7 +106,7 @@ private int createStart(final Properties props) { private Map createCharIndexes(final Properties props) { String dictContent = props.containsKey(DICT_KEY) && !Strings.isNullOrEmpty(props.getProperty(DICT_KEY)) ? props.getProperty(DICT_KEY) : initDefaultDict(); - return IntStream.range(0, dictContent.length()).boxed().collect(Collectors.toMap(dictContent::charAt, index -> index, (a, b) -> b)); + return IntStream.range(0, dictContent.length()).boxed().collect(Collectors.toMap(dictContent::charAt, index -> index, (oldValue, currentValue) -> oldValue)); } @SneakyThrows(IOException.class) diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithm.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithm.java index 952475f8e0847..6433d35559843 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithm.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithm.java @@ -18,12 +18,13 @@ package org.apache.shardingsphere.encrypt.algorithm.standard; import com.google.common.base.Strings; +import lombok.EqualsAndHashCode; import lombok.SneakyThrows; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.codec.digest.MessageDigestAlgorithms; +import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import javax.crypto.Cipher; @@ -40,7 +41,8 @@ /** * AES encrypt algorithm. */ -public final class AESEncryptAlgorithm implements StandardEncryptAlgorithm { +@EqualsAndHashCode +public final class AESEncryptAlgorithm implements StandardEncryptAlgorithm { private static final String AES_KEY = "aes-key-value"; @@ -73,11 +75,11 @@ public String encrypt(final Object plainValue, final EncryptContext encryptConte @SneakyThrows(GeneralSecurityException.class) @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { + public Object decrypt(final Object cipherValue, final EncryptContext encryptContext) { if (null == cipherValue) { return null; } - byte[] result = getCipher(Cipher.DECRYPT_MODE).doFinal(Base64.getDecoder().decode(cipherValue.trim())); + byte[] result = getCipher(Cipher.DECRYPT_MODE).doFinal(Base64.getDecoder().decode(cipherValue.toString().trim())); return new String(result, StandardCharsets.UTF_8); } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithm.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithm.java index e8ceb83e995e4..1bc3e2169e56f 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithm.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithm.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.encrypt.algorithm.standard; +import lombok.EqualsAndHashCode; import org.apache.commons.codec.binary.Base64; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; @@ -29,7 +30,8 @@ /** * RC4 encrypt algorithm. */ -public final class RC4EncryptAlgorithm implements StandardEncryptAlgorithm { +@EqualsAndHashCode +public final class RC4EncryptAlgorithm implements StandardEncryptAlgorithm { private static final String RC4_KEY = "rc4-key-value"; @@ -57,8 +59,8 @@ public String encrypt(final Object plainValue, final EncryptContext encryptConte } @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { - return null == cipherValue ? null : new String(crypt(Base64.decodeBase64(cipherValue)), StandardCharsets.UTF_8); + public Object decrypt(final Object cipherValue, final EncryptContext encryptContext) { + return null == cipherValue ? null : new String(crypt(Base64.decodeBase64(cipherValue.toString())), StandardCharsets.UTF_8); } /* diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/condition/EncryptConditionEngine.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/condition/EncryptConditionEngine.java index 664d36dd162a5..91c5210550a4f 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/condition/EncryptConditionEngine.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/condition/EncryptConditionEngine.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression; @@ -158,13 +159,11 @@ private Optional createEncryptCondition(final ExpressionSegmen private Optional createBinaryEncryptCondition(final BinaryOperationExpression expression, final String tableName) { String operator = expression.getOperator(); - if (!LOGICAL_OPERATOR.contains(operator)) { - if (SUPPORTED_COMPARE_OPERATOR.contains(operator)) { - return createCompareEncryptCondition(tableName, expression, expression.getRight()); - } - throw new UnsupportedEncryptSQLException(operator); + if (LOGICAL_OPERATOR.contains(operator)) { + return Optional.empty(); } - return Optional.empty(); + ShardingSpherePreconditions.checkState(SUPPORTED_COMPARE_OPERATOR.contains(operator), () -> new UnsupportedEncryptSQLException(operator)); + return createCompareEncryptCondition(tableName, expression, expression.getRight()); } private Optional createCompareEncryptCondition(final String tableName, final BinaryOperationExpression expression, final ExpressionSegment compareRightValue) { diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilder.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilder.java index cb83d61571206..6612ac3db59af 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilder.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilder.java @@ -19,23 +19,25 @@ import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseNameAware; +import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseTypeAware; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptConditionsAware; +import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; import org.apache.shardingsphere.encrypt.rewrite.condition.EncryptCondition; -import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertDerivedColumnsTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptAlterTableTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptAssignmentTokenGenerator; -import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptIndexColumnTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptCreateTableTokenGenerator; -import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptForUseDefaultInsertColumnsTokenGenerator; -import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertOnUpdateTokenGenerator; -import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertValuesTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertDefaultColumnsTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptGroupByItemTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptIndexColumnTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptOrderByItemTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptPredicateColumnTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptPredicateRightValueTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.EncryptProjectionTokenGenerator; import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertCipherNameTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertDerivedColumnsTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertOnUpdateTokenGenerator; +import org.apache.shardingsphere.encrypt.rewrite.token.generator.insert.EncryptInsertValuesTokenGenerator; import org.apache.shardingsphere.encrypt.rule.EncryptRule; -import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.SQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.builder.SQLTokenGeneratorBuilder; @@ -65,13 +67,14 @@ public Collection getSQLTokenGenerators() { addSQLTokenGenerator(result, new EncryptPredicateColumnTokenGenerator()); addSQLTokenGenerator(result, new EncryptPredicateRightValueTokenGenerator()); addSQLTokenGenerator(result, new EncryptInsertValuesTokenGenerator()); - addSQLTokenGenerator(result, new EncryptForUseDefaultInsertColumnsTokenGenerator()); + addSQLTokenGenerator(result, new EncryptInsertDefaultColumnsTokenGenerator()); addSQLTokenGenerator(result, new EncryptInsertCipherNameTokenGenerator()); addSQLTokenGenerator(result, new EncryptInsertDerivedColumnsTokenGenerator()); addSQLTokenGenerator(result, new EncryptInsertOnUpdateTokenGenerator()); addSQLTokenGenerator(result, new EncryptCreateTableTokenGenerator()); addSQLTokenGenerator(result, new EncryptAlterTableTokenGenerator()); addSQLTokenGenerator(result, new EncryptOrderByItemTokenGenerator()); + addSQLTokenGenerator(result, new EncryptGroupByItemTokenGenerator()); addSQLTokenGenerator(result, new EncryptIndexColumnTokenGenerator()); return result; } @@ -93,5 +96,8 @@ private void setUpSQLTokenGenerator(final SQLTokenGenerator toBeAddedSQLTokenGen if (toBeAddedSQLTokenGenerator instanceof DatabaseNameAware) { ((DatabaseNameAware) toBeAddedSQLTokenGenerator).setDatabaseName(databaseName); } + if (toBeAddedSQLTokenGenerator instanceof DatabaseTypeAware) { + ((DatabaseTypeAware) toBeAddedSQLTokenGenerator).setDatabaseType(sqlStatementContext.getDatabaseType()); + } } } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptAlterTableTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptAlterTableTokenGenerator.java index 33e07a8cbca7d..6fa11e1e4ecc2 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptAlterTableTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptAlterTableTokenGenerator.java @@ -31,11 +31,11 @@ import org.apache.shardingsphere.encrypt.rule.column.item.LikeQueryColumnItem; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.ddl.AlterTableStatementContext; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.CollectionSQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.Substitutable; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.RemoveToken; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.ColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.AddColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ChangeColumnDefinitionSegment; @@ -176,8 +176,8 @@ private Collection getChangeColumnTokens(final EncryptTable encryptTab } private void isSameEncryptColumn(final EncryptTable encryptTable, final String previousColumnName, final String columnName) { - Optional> previousEncryptor = encryptTable.findEncryptor(previousColumnName); - Optional> currentEncryptor = encryptTable.findEncryptor(columnName); + Optional previousEncryptor = encryptTable.findEncryptor(previousColumnName); + Optional currentEncryptor = encryptTable.findEncryptor(columnName); if (!previousEncryptor.isPresent() && !currentEncryptor.isPresent()) { return; } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGenerator.java new file mode 100644 index 0000000000000..7a5b93253eba6 --- /dev/null +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGenerator.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.encrypt.rewrite.token.generator; + +import lombok.Setter; +import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseTypeAware; +import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; +import org.apache.shardingsphere.encrypt.rule.EncryptRule; +import org.apache.shardingsphere.encrypt.rule.EncryptTable; +import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; +import org.apache.shardingsphere.infra.binder.context.segment.select.orderby.OrderByItem; +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; +import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; +import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.rewrite.sql.token.generator.CollectionSQLTokenGenerator; +import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.SchemaMetaDataAware; +import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; +import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.ColumnOrderByItemSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; + +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.Map; +import java.util.Optional; + +/** + * Group by item token generator for encrypt. + */ +@Setter +public final class EncryptGroupByItemTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, EncryptRuleAware, DatabaseTypeAware { + + private String databaseName; + + private Map schemas; + + private EncryptRule encryptRule; + + private DatabaseType databaseType; + + @Override + public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) { + return sqlStatementContext instanceof SelectStatementContext && containsGroupByItem(sqlStatementContext); + } + + @Override + public Collection generateSQLTokens(final SQLStatementContext sqlStatementContext) { + Collection result = new LinkedHashSet<>(); + String defaultSchema = new DatabaseTypeRegistry(sqlStatementContext.getDatabaseType()).getDefaultSchemaName(databaseName); + ShardingSphereSchema schema = sqlStatementContext.getTablesContext().getSchemaName().map(schemas::get).orElseGet(() -> schemas.get(defaultSchema)); + for (OrderByItem each : getGroupByItems(sqlStatementContext)) { + if (each.getSegment() instanceof ColumnOrderByItemSegment) { + ColumnSegment columnSegment = ((ColumnOrderByItemSegment) each.getSegment()).getColumn(); + Map columnTableNames = sqlStatementContext.getTablesContext().findTableNamesByColumnSegment(Collections.singleton(columnSegment), schema); + result.addAll(generateSQLTokensWithColumnSegments(Collections.singleton(columnSegment), columnTableNames)); + } + } + return result; + } + + private Collection generateSQLTokensWithColumnSegments(final Collection columnSegments, final Map columnTableNames) { + Collection result = new LinkedList<>(); + for (ColumnSegment each : columnSegments) { + String tableName = columnTableNames.getOrDefault(each.getExpression(), ""); + Optional encryptTable = encryptRule.findEncryptTable(tableName); + String columnName = each.getIdentifier().getValue(); + if (!encryptTable.isPresent() || !encryptTable.get().isEncryptColumn(columnName)) { + continue; + } + int startIndex = each.getOwner().isPresent() ? each.getOwner().get().getStopIndex() + 2 : each.getStartIndex(); + int stopIndex = each.getStopIndex(); + EncryptColumn encryptColumn = encryptTable.get().getEncryptColumn(columnName); + SubstitutableColumnNameToken encryptColumnNameToken = encryptColumn.getAssistedQuery() + .map(optional -> new SubstitutableColumnNameToken(startIndex, stopIndex, createColumnProjections(optional.getName(), each.getIdentifier().getQuoteCharacter()))) + .orElseGet(() -> new SubstitutableColumnNameToken(startIndex, stopIndex, createColumnProjections(encryptColumn.getCipher().getName(), each.getIdentifier().getQuoteCharacter()))); + result.add(encryptColumnNameToken); + } + return result; + } + + private Collection getGroupByItems(final SQLStatementContext sqlStatementContext) { + if (!(sqlStatementContext instanceof SelectStatementContext)) { + return Collections.emptyList(); + } + SelectStatementContext statementContext = (SelectStatementContext) sqlStatementContext; + Collection result = new LinkedList<>(statementContext.getGroupByContext().getItems()); + for (SelectStatementContext each : statementContext.getSubqueryContexts().values()) { + result.addAll(getGroupByItems(each)); + } + return result; + } + + private boolean containsGroupByItem(final SQLStatementContext sqlStatementContext) { + if (!(sqlStatementContext instanceof SelectStatementContext)) { + return false; + } + SelectStatementContext statementContext = (SelectStatementContext) sqlStatementContext; + if (!statementContext.getGroupByContext().getItems().isEmpty()) { + return true; + } + for (SelectStatementContext each : statementContext.getSubqueryContexts().values()) { + if (containsGroupByItem(each)) { + return true; + } + } + return false; + } + + private Collection createColumnProjections(final String columnName, final QuoteCharacter quoteCharacter) { + return Collections.singleton(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); + } +} diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptIndexColumnTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptIndexColumnTokenGenerator.java index 8a3219cbfed5a..05c23e2b20b70 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptIndexColumnTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptIndexColumnTokenGenerator.java @@ -89,6 +89,6 @@ private Optional getQueryColumnToken(final int startIndex, final int s } private Collection getColumnProjections(final String columnName, final QuoteCharacter quoteCharacter) { - return Collections.singletonList(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); + return Collections.singleton(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); } } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGenerator.java index 3d72c17c319ab..4e1acb3a29f89 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGenerator.java @@ -18,19 +18,15 @@ package org.apache.shardingsphere.encrypt.rewrite.token.generator; import lombok.Setter; -import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseTypeAware; +import org.apache.shardingsphere.encrypt.exception.syntax.UnsupportedEncryptSQLException; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; -import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; import org.apache.shardingsphere.infra.binder.context.segment.select.orderby.OrderByItem; -import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; -import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; -import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.CollectionSQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.SchemaMetaDataAware; @@ -38,7 +34,6 @@ import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.ColumnOrderByItemSegment; -import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Collection; import java.util.Collections; @@ -51,7 +46,7 @@ * Order by item token generator for encrypt. */ @Setter -public final class EncryptOrderByItemTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, EncryptRuleAware, DatabaseTypeAware { +public final class EncryptOrderByItemTokenGenerator implements CollectionSQLTokenGenerator, SchemaMetaDataAware, EncryptRuleAware { private String databaseName; @@ -59,8 +54,6 @@ public final class EncryptOrderByItemTokenGenerator implements CollectionSQLToke private EncryptRule encryptRule; - private DatabaseType databaseType; - @Override public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) { return sqlStatementContext instanceof SelectStatementContext && containsOrderByItem(sqlStatementContext); @@ -87,16 +80,7 @@ private Collection generateSQLTokensWithColumnSegm String tableName = columnTableNames.getOrDefault(each.getExpression(), ""); Optional encryptTable = encryptRule.findEncryptTable(tableName); String columnName = each.getIdentifier().getValue(); - if (!encryptTable.isPresent() || !encryptTable.get().isEncryptColumn(columnName)) { - continue; - } - int startIndex = each.getOwner().isPresent() ? each.getOwner().get().getStopIndex() + 2 : each.getStartIndex(); - int stopIndex = each.getStopIndex(); - EncryptColumn encryptColumn = encryptTable.get().getEncryptColumn(columnName); - SubstitutableColumnNameToken encryptColumnNameToken = encryptColumn.getAssistedQuery() - .map(optional -> new SubstitutableColumnNameToken(startIndex, stopIndex, createColumnProjections(optional.getName(), each.getIdentifier().getQuoteCharacter()))) - .orElseGet(() -> new SubstitutableColumnNameToken(startIndex, stopIndex, createColumnProjections(encryptColumn.getCipher().getName(), each.getIdentifier().getQuoteCharacter()))); - result.add(encryptColumnNameToken); + ShardingSpherePreconditions.checkState(!encryptTable.isPresent() || !encryptTable.get().isEncryptColumn(columnName), () -> new UnsupportedEncryptSQLException("ORDER BY")); } return result; } @@ -110,7 +94,6 @@ private Collection getOrderByItems(final SQLStatementContext sqlSta if (!statementContext.getOrderByContext().isGenerated()) { result.addAll(statementContext.getOrderByContext().getItems()); } - result.addAll(statementContext.getGroupByContext().getItems()); for (SelectStatementContext each : statementContext.getSubqueryContexts().values()) { result.addAll(getOrderByItems(each)); } @@ -122,7 +105,7 @@ private boolean containsOrderByItem(final SQLStatementContext sqlStatementContex return false; } SelectStatementContext statementContext = (SelectStatementContext) sqlStatementContext; - if (!statementContext.getOrderByContext().getItems().isEmpty() && !statementContext.getOrderByContext().isGenerated() || !statementContext.getGroupByContext().getItems().isEmpty()) { + if (!statementContext.getOrderByContext().getItems().isEmpty() && !statementContext.getOrderByContext().isGenerated()) { return true; } for (SelectStatementContext each : statementContext.getSubqueryContexts().values()) { @@ -132,8 +115,4 @@ private boolean containsOrderByItem(final SQLStatementContext sqlStatementContex } return false; } - - private Collection createColumnProjections(final String columnName, final QuoteCharacter quoteCharacter) { - return Collections.singleton(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); - } } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGenerator.java index 5facd649cbaee..0d8f6e7653bff 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGenerator.java @@ -21,6 +21,7 @@ import org.apache.shardingsphere.encrypt.exception.syntax.UnsupportedEncryptSQLException; import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseTypeAware; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; +import org.apache.shardingsphere.encrypt.rewrite.token.util.EncryptTokenGeneratorUtils; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; @@ -31,12 +32,13 @@ import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.CollectionSQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.SchemaMetaDataAware; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; @@ -74,18 +76,21 @@ public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) public Collection generateSQLTokens(final SQLStatementContext sqlStatementContext) { Collection columnSegments = Collections.emptyList(); Collection whereSegments = Collections.emptyList(); + Collection joinConditions = Collections.emptyList(); if (sqlStatementContext instanceof WhereAvailable) { columnSegments = ((WhereAvailable) sqlStatementContext).getColumnSegments(); whereSegments = ((WhereAvailable) sqlStatementContext).getWhereSegments(); + joinConditions = ((WhereAvailable) sqlStatementContext).getJoinConditions(); } + ShardingSpherePreconditions.checkState(EncryptTokenGeneratorUtils.isAllJoinConditionsUseSameEncryptor(joinConditions, encryptRule), + () -> new UnsupportedSQLOperationException("Can not use different encryptor in join condition")); String defaultSchema = new DatabaseTypeRegistry(sqlStatementContext.getDatabaseType()).getDefaultSchemaName(databaseName); ShardingSphereSchema schema = sqlStatementContext.getTablesContext().getSchemaName().map(schemas::get).orElseGet(() -> schemas.get(defaultSchema)); Map columnExpressionTableNames = sqlStatementContext.getTablesContext().findTableNamesByColumnSegment(columnSegments, schema); return generateSQLTokens(columnSegments, columnExpressionTableNames, whereSegments); } - private Collection generateSQLTokens(final Collection columnSegments, - final Map columnExpressionTableNames, final Collection whereSegments) { + private Collection generateSQLTokens(final Collection columnSegments, final Map columnExpressionTableNames, final Collection whereSegments) { Collection result = new LinkedHashSet<>(); for (ColumnSegment each : columnSegments) { String tableName = Optional.ofNullable(columnExpressionTableNames.get(each.getExpression())).orElse(""); @@ -137,6 +142,6 @@ private boolean isSameColumnSegment(final ExpressionSegment columnSegment, final } private Collection createColumnProjections(final String columnName, final QuoteCharacter quoteCharacter) { - return Collections.singletonList(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); + return Collections.singleton(new ColumnProjection(null, new IdentifierValue(columnName, quoteCharacter), null, databaseType)); } } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGenerator.java index 7ec6183d2e386..bd05308a6ac82 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGenerator.java @@ -17,22 +17,22 @@ package org.apache.shardingsphere.encrypt.rewrite.token.generator; -import com.google.common.base.Preconditions; import lombok.Setter; import org.apache.shardingsphere.encrypt.rewrite.aware.DatabaseTypeAware; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; -import org.apache.shardingsphere.encrypt.rule.column.item.AssistedQueryColumnItem; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.DerivedColumn; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.ProjectionsContext; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ShorthandProjection; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; +import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; @@ -46,12 +46,10 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Collection; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -71,21 +69,28 @@ public final class EncryptProjectionTokenGenerator implements CollectionSQLToken @Override public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) { - return sqlStatementContext instanceof SelectStatementContext && !((SelectStatementContext) sqlStatementContext).getAllTables().isEmpty(); + return sqlStatementContext instanceof SelectStatementContext && !((SelectStatementContext) sqlStatementContext).getAllTables().isEmpty() + || sqlStatementContext instanceof InsertStatementContext && null != ((InsertStatementContext) sqlStatementContext).getInsertSelectContext(); } @Override public Collection generateSQLTokens(final SQLStatementContext sqlStatementContext) { - Preconditions.checkState(sqlStatementContext instanceof SelectStatementContext); Collection result = new LinkedHashSet<>(); - SelectStatementContext selectStatementContext = (SelectStatementContext) sqlStatementContext; - addGenerateSQLTokens(result, selectStatementContext); - for (SelectStatementContext each : selectStatementContext.getSubqueryContexts().values()) { - addGenerateSQLTokens(result, each); + if (sqlStatementContext instanceof SelectStatementContext) { + generateSQLTokens((SelectStatementContext) sqlStatementContext, result); + } else if (sqlStatementContext instanceof InsertStatementContext && null != ((InsertStatementContext) sqlStatementContext).getInsertSelectContext()) { + generateSQLTokens(((InsertStatementContext) sqlStatementContext).getInsertSelectContext().getSelectStatementContext(), result); } return result; } + private void generateSQLTokens(final SelectStatementContext selectStatementContext, final Collection sqlTokens) { + addGenerateSQLTokens(sqlTokens, selectStatementContext); + for (SelectStatementContext each : selectStatementContext.getSubqueryContexts().values()) { + addGenerateSQLTokens(sqlTokens, each); + } + } + private void addGenerateSQLTokens(final Collection sqlTokens, final SelectStatementContext selectStatementContext) { for (ProjectionSegment each : selectStatementContext.getSqlStatement().getProjections().getProjections()) { SubqueryType subqueryType = selectStatementContext.getSubqueryType(); @@ -94,11 +99,11 @@ private void addGenerateSQLTokens(final Collection sqlTokens, final Se ColumnProjection columnProjection = buildColumnProjection(columnSegment); String originalColumnName = columnProjection.getOriginalColumn().getValue(); Optional encryptTable = encryptRule.findEncryptTable(columnProjection.getOriginalTable().getValue()); - if (encryptTable.isPresent() && encryptTable.get().isEncryptColumn(originalColumnName) && !containsTableSubquery(selectStatementContext.getSqlStatement().getFrom())) { + if (encryptTable.isPresent() && encryptTable.get().isEncryptColumn(originalColumnName) && !selectStatementContext.containsTableSubquery()) { sqlTokens.add(generateSQLToken(encryptTable.get().getEncryptColumn(originalColumnName), columnSegment, columnProjection, subqueryType)); } } - ShardingSpherePreconditions.checkState(!(each instanceof ShorthandProjectionSegment) || !containsTableSubquery(selectStatementContext.getSqlStatement().getFrom()), + ShardingSpherePreconditions.checkState(!(each instanceof ShorthandProjectionSegment) || !selectStatementContext.containsTableSubquery(), () -> new UnsupportedSQLOperationException("Can not support encrypt shorthand expand with subquery statement")); if (each instanceof ShorthandProjectionSegment) { ShorthandProjectionSegment shorthandSegment = (ShorthandProjectionSegment) each; @@ -110,22 +115,19 @@ private void addGenerateSQLTokens(final Collection sqlTokens, final Se } } - private boolean containsTableSubquery(final TableSegment tableSegment) { - if (tableSegment instanceof SubqueryTableSegment) { - return true; - } else if (tableSegment instanceof JoinTableSegment) { - JoinTableSegment joinTableSegment = (JoinTableSegment) tableSegment; - return containsTableSubquery(joinTableSegment.getLeft()) || containsTableSubquery(joinTableSegment.getRight()); - } - return false; + private ColumnProjection buildColumnProjection(final ColumnProjectionSegment segment) { + IdentifierValue owner = segment.getColumn().getOwner().map(OwnerSegment::getIdentifier).orElse(null); + ColumnProjection result = new ColumnProjection(owner, segment.getColumn().getIdentifier(), segment.getAliasName().isPresent() ? segment.getAlias().orElse(null) : null, databaseType); + result.setOriginalColumn(segment.getColumn().getColumnBoundedInfo().getOriginalColumn()); + result.setOriginalTable(segment.getColumn().getColumnBoundedInfo().getOriginalTable()); + return result; } private SubstitutableColumnNameToken generateSQLToken(final EncryptColumn encryptColumn, final ColumnProjectionSegment columnSegment, final ColumnProjection columnProjection, final SubqueryType subqueryType) { - Collection projections = generateProjections(encryptColumn, columnProjection, subqueryType, false, null); + Collection projections = generateProjections(encryptColumn, columnProjection, subqueryType, false); int startIndex = columnSegment.getColumn().getOwner().isPresent() ? columnSegment.getColumn().getOwner().get().getStopIndex() + 2 : columnSegment.getColumn().getStartIndex(); - int stopIndex = columnSegment.getStopIndex(); - return new SubstitutableColumnNameToken(startIndex, stopIndex, projections); + return new SubstitutableColumnNameToken(startIndex, columnSegment.getStopIndex(), projections); } private SubstitutableColumnNameToken generateSQLToken(final ShorthandProjectionSegment segment, final Collection actualColumns, @@ -133,11 +135,11 @@ private SubstitutableColumnNameToken generateSQLToken(final ShorthandProjectionS List projections = new LinkedList<>(); for (Projection each : actualColumns) { if (each instanceof ColumnProjection) { - Optional encryptTable = encryptRule.findEncryptTable(((ColumnProjection) each).getOriginalTable().getValue()); - if (encryptTable.isPresent() && encryptTable.get().isEncryptColumn(((ColumnProjection) each).getOriginalColumn().getValue()) - && !containsTableSubquery(selectStatementContext.getSqlStatement().getFrom())) { - EncryptColumn encryptColumn = encryptTable.get().getEncryptColumn(((ColumnProjection) each).getName().getValue()); - projections.addAll(generateProjections(encryptColumn, (ColumnProjection) each, subqueryType, true, segment)); + ColumnProjection columnProjection = (ColumnProjection) each; + Optional encryptTable = encryptRule.findEncryptTable(columnProjection.getOriginalTable().getValue()); + if (encryptTable.isPresent() && encryptTable.get().isEncryptColumn(columnProjection.getOriginalColumn().getValue()) && !selectStatementContext.containsTableSubquery()) { + EncryptColumn encryptColumn = encryptTable.get().getEncryptColumn(columnProjection.getOriginalColumn().getValue()); + projections.addAll(generateProjections(encryptColumn, columnProjection, subqueryType, true)); continue; } } @@ -150,67 +152,60 @@ private SubstitutableColumnNameToken generateSQLToken(final ShorthandProjectionS return new SubstitutableColumnNameToken(startIndex, segment.getStopIndex(), projections, dialectDatabaseMetaData.getQuoteCharacter()); } - private ColumnProjection buildColumnProjection(final ColumnProjectionSegment segment) { - IdentifierValue owner = segment.getColumn().getOwner().map(OwnerSegment::getIdentifier).orElse(null); - ColumnProjection result = new ColumnProjection(owner, segment.getColumn().getIdentifier(), segment.getAliasName().isPresent() ? segment.getAlias().orElse(null) : null, databaseType); - result.setOriginalColumn(segment.getColumn().getOriginalColumn()); - result.setOriginalTable(segment.getColumn().getOriginalTable()); - return result; - } - - private Collection generateProjections(final EncryptColumn encryptColumn, final ColumnProjection column, - final SubqueryType subqueryType, final boolean shorthand, final ShorthandProjectionSegment segment) { - Collection result = new LinkedList<>(); - if (SubqueryType.PREDICATE_SUBQUERY == subqueryType) { - result.add(distinctOwner(generatePredicateSubqueryProjection(encryptColumn, column), shorthand)); - } else if (SubqueryType.TABLE_SUBQUERY == subqueryType) { - result.addAll(generateTableSubqueryProjections(encryptColumn, column, shorthand)); - } else if (SubqueryType.EXISTS_SUBQUERY == subqueryType) { - result.addAll(generateExistsSubqueryProjections(encryptColumn, column, shorthand)); - } else { - result.add(distinctOwner(generateCommonProjection(encryptColumn, column, segment), shorthand)); + private Collection generateProjections(final EncryptColumn encryptColumn, final ColumnProjection columnProjection, + final SubqueryType subqueryType, final boolean shorthandProjection) { + if (null == subqueryType || SubqueryType.PROJECTION_SUBQUERY == subqueryType) { + return Collections.singleton(generateProjection(encryptColumn, columnProjection, shorthandProjection)); + } else if (SubqueryType.TABLE_SUBQUERY == subqueryType || SubqueryType.JOIN_SUBQUERY == subqueryType) { + return generateProjectionsInTableSegmentSubquery(encryptColumn, columnProjection, shorthandProjection, subqueryType); + } else if (SubqueryType.PREDICATE_SUBQUERY == subqueryType) { + return Collections.singleton(generateProjectionInPredicateSubquery(encryptColumn, columnProjection, shorthandProjection)); + } else if (SubqueryType.INSERT_SELECT_SUBQUERY == subqueryType) { + return generateProjectionsInInsertSelectSubquery(encryptColumn, columnProjection, shorthandProjection); } - return result; + throw new UnsupportedSQLOperationException( + "Projections not in simple select, table subquery, join subquery, predicate subquery and insert select subquery are not supported in encrypt feature."); } - private ColumnProjection distinctOwner(final ColumnProjection column, final boolean shorthand) { - if (shorthand || !column.getOwner().isPresent()) { - return column; - } - return new ColumnProjection(null, column.getName(), column.getAlias().isPresent() ? column.getAlias().get() : null, databaseType); + private ColumnProjection generateProjection(final EncryptColumn encryptColumn, final ColumnProjection columnProjection, final boolean shorthandProjection) { + IdentifierValue encryptColumnOwner = shorthandProjection ? columnProjection.getOwner().orElse(null) : null; + String encryptColumnName = encryptColumn.getCipher().getName(); + return new ColumnProjection(encryptColumnOwner, new IdentifierValue(encryptColumnName, columnProjection.getName().getQuoteCharacter()), + columnProjection.getAlias().orElse(columnProjection.getName()), databaseType); } - private ColumnProjection generatePredicateSubqueryProjection(final EncryptColumn encryptColumn, final ColumnProjection column) { - Optional assistedQueryColumn = encryptColumn.getAssistedQuery(); - if (assistedQueryColumn.isPresent()) { - return new ColumnProjection(column.getOwner().orElse(null), new IdentifierValue(assistedQueryColumn.get().getName(), column.getName().getQuoteCharacter()), null, databaseType); - } - String cipherColumn = encryptColumn.getCipher().getName(); - return new ColumnProjection(column.getOwner().orElse(null), new IdentifierValue(cipherColumn, column.getName().getQuoteCharacter()), null, databaseType); - } - - private Collection generateTableSubqueryProjections(final EncryptColumn encryptColumn, final ColumnProjection column, final boolean shorthand) { - Collection result = new LinkedList<>(); - result.add(distinctOwner(new ColumnProjection(column.getOwner().orElse(null), new IdentifierValue(encryptColumn.getCipher().getName(), - column.getName().getQuoteCharacter()), column.getAlias().orElse(column.getName()), databaseType), shorthand)); - encryptColumn.getAssistedQuery().ifPresent(optional -> result.add( - new ColumnProjection(column.getOwner().orElse(null), new IdentifierValue(optional.getName(), column.getName().getQuoteCharacter()), null, databaseType))); + private Collection generateProjectionsInTableSegmentSubquery(final EncryptColumn encryptColumn, final ColumnProjection columnProjection, + final boolean shorthandProjection, final SubqueryType subqueryType) { + Collection result = new LinkedList<>(); + IdentifierValue encryptColumnOwner = shorthandProjection ? columnProjection.getOwner().orElse(null) : null; + QuoteCharacter quoteCharacter = columnProjection.getName().getQuoteCharacter(); + IdentifierValue columnName = new IdentifierValue(encryptColumn.getCipher().getName(), quoteCharacter); + IdentifierValue alias = SubqueryType.JOIN_SUBQUERY == subqueryType ? null : columnProjection.getAlias().orElse(columnProjection.getName()); + result.add(new ColumnProjection(encryptColumnOwner, columnName, alias, databaseType)); + IdentifierValue assistedColumOwner = columnProjection.getOwner().orElse(null); + encryptColumn.getAssistedQuery().ifPresent(optional -> result.add(new ColumnProjection(assistedColumOwner, new IdentifierValue(optional.getName(), quoteCharacter), null, databaseType))); + encryptColumn.getLikeQuery().ifPresent(optional -> result.add(new ColumnProjection(assistedColumOwner, new IdentifierValue(optional.getName(), quoteCharacter), null, databaseType))); return result; } - private Collection generateExistsSubqueryProjections(final EncryptColumn encryptColumn, final ColumnProjection column, final boolean shorthand) { - Collection result = new LinkedList<>(); - result.add(distinctOwner(new ColumnProjection(column.getOwner().orElse(null), new IdentifierValue(encryptColumn.getCipher().getName(), - column.getName().getQuoteCharacter()), null, databaseType), shorthand)); - return result; + private ColumnProjection generateProjectionInPredicateSubquery(final EncryptColumn encryptColumn, final ColumnProjection columnProjection, final boolean shorthandProjection) { + IdentifierValue owner = shorthandProjection ? columnProjection.getOwner().orElse(null) : null; + QuoteCharacter quoteCharacter = columnProjection.getName().getQuoteCharacter(); + return encryptColumn.getAssistedQuery().map(optional -> new ColumnProjection(owner, new IdentifierValue(optional.getName(), quoteCharacter), null, databaseType)) + .orElseGet(() -> new ColumnProjection(owner, new IdentifierValue(encryptColumn.getCipher().getName(), quoteCharacter), columnProjection.getAlias().orElse(columnProjection.getName()), + databaseType)); } - private ColumnProjection generateCommonProjection(final EncryptColumn encryptColumn, final ColumnProjection column, final ShorthandProjectionSegment segment) { - String queryColumnName = encryptColumn.getCipher().getName(); - IdentifierValue owner = (null == segment || !segment.getOwner().isPresent()) ? column.getOwner().orElse(null) : segment.getOwner().get().getIdentifier(); - return new ColumnProjection(owner, new IdentifierValue(queryColumnName, column.getName().getQuoteCharacter()), column.getAlias().isPresent() - ? column.getAlias().get() - : column.getName(), databaseType); + private Collection generateProjectionsInInsertSelectSubquery(final EncryptColumn encryptColumn, final ColumnProjection columnProjection, final boolean shorthandProjection) { + QuoteCharacter quoteCharacter = columnProjection.getName().getQuoteCharacter(); + IdentifierValue columnName = new IdentifierValue(encryptColumn.getCipher().getName(), quoteCharacter); + Collection result = new LinkedList<>(); + IdentifierValue encryptColumnOwner = shorthandProjection ? columnProjection.getOwner().orElse(null) : null; + result.add(new ColumnProjection(encryptColumnOwner, columnName, null, databaseType)); + IdentifierValue assistedColumOwner = columnProjection.getOwner().orElse(null); + encryptColumn.getAssistedQuery().ifPresent(optional -> result.add(new ColumnProjection(assistedColumOwner, new IdentifierValue(optional.getName(), quoteCharacter), null, databaseType))); + encryptColumn.getLikeQuery().ifPresent(optional -> result.add(new ColumnProjection(assistedColumOwner, new IdentifierValue(optional.getName(), quoteCharacter), null, databaseType))); + return result; } private ShorthandProjection getShorthandProjection(final ShorthandProjectionSegment segment, final ProjectionsContext projectionsContext) { diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGenerator.java index 0ead81a6c3c24..621d0b60b727d 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGenerator.java @@ -20,12 +20,15 @@ import com.google.common.base.Preconditions; import lombok.Setter; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; +import org.apache.shardingsphere.encrypt.rewrite.token.util.EncryptTokenGeneratorUtils; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.CollectionSQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken; @@ -58,9 +61,16 @@ public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) public Collection generateSQLTokens(final InsertStatementContext insertStatementContext) { Optional insertColumnsSegment = insertStatementContext.getSqlStatement().getInsertColumns(); Preconditions.checkState(insertColumnsSegment.isPresent()); + Collection insertColumns = insertColumnsSegment.get().getColumns(); + if (null != insertStatementContext.getInsertSelectContext()) { + Collection projections = insertStatementContext.getInsertSelectContext().getSelectStatementContext().getProjectionsContext().getExpandProjections(); + ShardingSpherePreconditions.checkState(insertColumns.size() == projections.size(), () -> new UnsupportedSQLOperationException("Column count doesn't match value count.")); + ShardingSpherePreconditions.checkState(EncryptTokenGeneratorUtils.isAllInsertSelectColumnsUseSameEncryptor(insertColumns, projections, encryptRule), + () -> new UnsupportedSQLOperationException("Can not use different encryptor in insert select columns")); + } EncryptTable encryptTable = encryptRule.getEncryptTable(insertStatementContext.getSqlStatement().getTable().getTableName().getIdentifier().getValue()); Collection result = new LinkedList<>(); - for (ColumnSegment each : insertColumnsSegment.get().getColumns()) { + for (ColumnSegment each : insertColumns) { String columnName = each.getIdentifier().getValue(); if (encryptTable.isEncryptColumn(columnName)) { Collection projections = diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGenerator.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGenerator.java similarity index 77% rename from features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGenerator.java rename to features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGenerator.java index 2df782b4763e1..c8e8e8e4ea447 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGenerator.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGenerator.java @@ -15,32 +15,38 @@ * limitations under the License. */ -package org.apache.shardingsphere.encrypt.rewrite.token.generator; +package org.apache.shardingsphere.encrypt.rewrite.token.generator.insert; import com.google.common.base.Preconditions; import lombok.Setter; import org.apache.shardingsphere.encrypt.rewrite.aware.EncryptRuleAware; +import org.apache.shardingsphere.encrypt.rewrite.token.util.EncryptTokenGeneratorUtils; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.OptionalSQLTokenGenerator; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.PreviousSQLTokensAware; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.UseDefaultInsertColumnsToken; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.InsertColumnsSegment; +import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Optional; /** - * Use default insert columns token generator for encrypt. + * Insert default columns token generator for encrypt. */ @Setter -public final class EncryptForUseDefaultInsertColumnsTokenGenerator implements OptionalSQLTokenGenerator, PreviousSQLTokensAware, EncryptRuleAware { +public final class EncryptInsertDefaultColumnsTokenGenerator implements OptionalSQLTokenGenerator, PreviousSQLTokensAware, EncryptRuleAware { private List previousSQLTokens; @@ -53,7 +59,7 @@ public boolean isGenerateSQLToken(final SQLStatementContext sqlStatementContext) @Override public UseDefaultInsertColumnsToken generateSQLToken(final InsertStatementContext insertStatementContext) { - String tableName = insertStatementContext.getSqlStatement().getTable().getTableName().getIdentifier().getValue(); + String tableName = Optional.ofNullable(insertStatementContext.getSqlStatement().getTable()).map(optional -> optional.getTableName().getIdentifier().getValue()).orElse(""); Optional previousSQLToken = findInsertColumnsToken(); if (previousSQLToken.isPresent()) { processPreviousSQLToken(previousSQLToken.get(), insertStatementContext, tableName); @@ -80,6 +86,13 @@ private void processPreviousSQLToken(final UseDefaultInsertColumnsToken previous private UseDefaultInsertColumnsToken generateNewSQLToken(final InsertStatementContext insertStatementContext, final String tableName) { Optional insertColumnsSegment = insertStatementContext.getSqlStatement().getInsertColumns(); Preconditions.checkState(insertColumnsSegment.isPresent()); + if (null != insertStatementContext.getInsertSelectContext()) { + Collection derivedInsertColumns = insertStatementContext.getSqlStatement().getDerivedInsertColumns(); + Collection projections = insertStatementContext.getInsertSelectContext().getSelectStatementContext().getProjectionsContext().getExpandProjections(); + ShardingSpherePreconditions.checkState(derivedInsertColumns.size() == projections.size(), () -> new UnsupportedSQLOperationException("Column count doesn't match value count.")); + ShardingSpherePreconditions.checkState(EncryptTokenGeneratorUtils.isAllInsertSelectColumnsUseSameEncryptor(derivedInsertColumns, projections, encryptRule), + () -> new UnsupportedSQLOperationException("Can not use different encryptor in insert select columns")); + } return new UseDefaultInsertColumnsToken( insertColumnsSegment.get().getStopIndex(), getColumnNames(insertStatementContext, encryptRule.getEncryptTable(tableName), insertStatementContext.getColumnNames())); } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/util/EncryptTokenGeneratorUtils.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/util/EncryptTokenGeneratorUtils.java new file mode 100644 index 0000000000000..96f8b2ce5fb06 --- /dev/null +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rewrite/token/util/EncryptTokenGeneratorUtils.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.encrypt.rewrite.token.util; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.encrypt.rule.EncryptRule; +import org.apache.shardingsphere.encrypt.rule.EncryptTable; +import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; +import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.Projection; +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; + +import java.util.Collection; +import java.util.Iterator; + +/** + * Encrypt token generator utils. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class EncryptTokenGeneratorUtils { + + /** + * Judge whether all join conditions use same encryptor or not. + * + * @param joinConditions join conditions + * @param encryptRule encrypt rule + * @return whether all join conditions use same encryptor or not + */ + public static boolean isAllJoinConditionsUseSameEncryptor(final Collection joinConditions, final EncryptRule encryptRule) { + for (BinaryOperationExpression each : joinConditions) { + if (!(each.getLeft() instanceof ColumnSegment) || !(each.getRight() instanceof ColumnSegment)) { + continue; + } + EncryptAlgorithm leftColumnEncryptor = getColumnEncryptor(((ColumnSegment) each.getLeft()).getColumnBoundedInfo(), encryptRule); + EncryptAlgorithm rightColumnEncryptor = getColumnEncryptor(((ColumnSegment) each.getRight()).getColumnBoundedInfo(), encryptRule); + if (!isSameEncryptor(leftColumnEncryptor, rightColumnEncryptor)) { + return false; + } + } + return true; + } + + /** + * Judge whether all using columns use same encryptor or not. + * + * @param usingColumns using columns + * @param encryptRule encrypt rule + * @return whether all using columns use same encryptor or not + */ + public static boolean isAllUsingConditionsUseSameEncryptor(final Collection usingColumns, final EncryptRule encryptRule) { + for (ColumnSegment each : usingColumns) { + EncryptAlgorithm leftColumnEncryptor = getColumnEncryptor(each.getColumnBoundedInfo(), encryptRule); + EncryptAlgorithm rightColumnEncryptor = getColumnEncryptor(each.getOtherUsingColumnBoundedInfo(), encryptRule); + if (!isSameEncryptor(leftColumnEncryptor, rightColumnEncryptor)) { + return false; + } + } + return true; + } + + private static boolean isSameEncryptor(final EncryptAlgorithm leftColumnEncryptor, final EncryptAlgorithm rightColumnEncryptor) { + if (null != leftColumnEncryptor && null != rightColumnEncryptor) { + if (!leftColumnEncryptor.getType().equals(rightColumnEncryptor.getType())) { + return false; + } + return leftColumnEncryptor.equals(rightColumnEncryptor); + } + return null == leftColumnEncryptor && null == rightColumnEncryptor; + } + + private static EncryptAlgorithm getColumnEncryptor(final ColumnSegmentBoundedInfo columnBoundedInfo, final EncryptRule encryptRule) { + String tableName = columnBoundedInfo.getOriginalTable().getValue(); + String columnName = columnBoundedInfo.getOriginalColumn().getValue(); + if (!encryptRule.findEncryptTable(tableName).isPresent() || !encryptRule.getEncryptTable(tableName).isEncryptColumn(columnName)) { + return null; + } + EncryptTable encryptTable = encryptRule.getEncryptTable(tableName); + EncryptColumn encryptColumn = encryptTable.getEncryptColumn(columnName); + if (encryptColumn.getAssistedQuery().isPresent()) { + return encryptColumn.getAssistedQuery().get().getEncryptor(); + } + return encryptColumn.getCipher().getEncryptor(); + } + + /** + * Judge whether all insert select columns use same encryptor or not. + * + * @param insertColumns insert columns + * @param projections projections + * @param encryptRule encrypt rule + * @return whether all insert select columns use same encryptor or not + */ + public static boolean isAllInsertSelectColumnsUseSameEncryptor(final Collection insertColumns, final Collection projections, final EncryptRule encryptRule) { + Iterator insertColumnsIterator = insertColumns.iterator(); + Iterator projectionIterator = projections.iterator(); + while (insertColumnsIterator.hasNext()) { + ColumnSegment columnSegment = insertColumnsIterator.next(); + EncryptAlgorithm leftColumnEncryptor = getColumnEncryptor(columnSegment.getColumnBoundedInfo(), encryptRule); + Projection projection = projectionIterator.next(); + ColumnSegmentBoundedInfo columnBoundedInfo = projection instanceof ColumnProjection + ? new ColumnSegmentBoundedInfo(null, null, ((ColumnProjection) projection).getOriginalTable(), ((ColumnProjection) projection).getOriginalColumn()) + : new ColumnSegmentBoundedInfo(new IdentifierValue(projection.getColumnLabel())); + EncryptAlgorithm rightColumnEncryptor = getColumnEncryptor(columnBoundedInfo, encryptRule); + if (!isSameEncryptor(leftColumnEncryptor, rightColumnEncryptor)) { + return false; + } + } + return true; + } +} diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptRule.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptRule.java index d923ba65622b2..f4548d0f97074 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptRule.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptRule.java @@ -29,10 +29,10 @@ import org.apache.shardingsphere.encrypt.exception.metadata.EncryptTableNotFoundException; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.rule.identifier.scope.DatabaseRule; import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.infra.rule.identifier.type.TableNamesMapper; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.util.LinkedHashMap; @@ -56,11 +56,8 @@ public final class EncryptRule implements DatabaseRule, TableContainedRule { public EncryptRule(final String databaseName, final EncryptRuleConfiguration ruleConfig) { this.databaseName = databaseName; configuration = ruleConfig; - @SuppressWarnings("rawtypes") Map standardEncryptors = new LinkedHashMap<>(); - @SuppressWarnings("rawtypes") Map assistedEncryptors = new LinkedHashMap<>(); - @SuppressWarnings("rawtypes") Map likeEncryptors = new LinkedHashMap<>(); ruleConfig.getEncryptors().forEach((key, value) -> putAllEncryptors( key, TypedSPILoader.getService(EncryptAlgorithm.class, value.getType(), value.getProps()), standardEncryptors, assistedEncryptors, likeEncryptors)); @@ -82,11 +79,8 @@ public EncryptRule(final String databaseName, final EncryptRuleConfiguration rul public EncryptRule(final String databaseName, final CompatibleEncryptRuleConfiguration ruleConfig) { this.databaseName = databaseName; configuration = ruleConfig; - @SuppressWarnings("rawtypes") Map standardEncryptors = new LinkedHashMap<>(); - @SuppressWarnings("rawtypes") Map assistedEncryptors = new LinkedHashMap<>(); - @SuppressWarnings("rawtypes") Map likeEncryptors = new LinkedHashMap<>(); ruleConfig.getEncryptors().forEach((key, value) -> putAllEncryptors( key, TypedSPILoader.getService(EncryptAlgorithm.class, value.getType(), value.getProps()), standardEncryptors, assistedEncryptors, likeEncryptors)); @@ -99,7 +93,6 @@ public EncryptRule(final String databaseName, final CompatibleEncryptRuleConfigu } } - @SuppressWarnings("rawtypes") private void putAllEncryptors(final String encryptorName, final EncryptAlgorithm algorithm, final Map standardEncryptors, final Map assistedEncryptors, final Map likeEncryptors) { if (algorithm instanceof StandardEncryptAlgorithm) { @@ -113,19 +106,16 @@ private void putAllEncryptors(final String encryptorName, final EncryptAlgorithm } } - @SuppressWarnings("rawtypes") private void checkStandardEncryptorType(final EncryptColumnRuleConfiguration columnRuleConfig, final Map standardEncryptors) { ShardingSpherePreconditions.checkState(standardEncryptors.containsKey(columnRuleConfig.getCipher().getEncryptorName()), () -> new MismatchedEncryptAlgorithmTypeException(databaseName, "Cipher", columnRuleConfig.getCipher().getEncryptorName(), StandardEncryptAlgorithm.class.getSimpleName())); } - @SuppressWarnings("rawtypes") private void checkAssistedQueryEncryptorType(final EncryptColumnRuleConfiguration columnRuleConfig, final Map assistedEncryptors) { columnRuleConfig.getAssistedQuery().ifPresent(optional -> ShardingSpherePreconditions.checkState(assistedEncryptors.containsKey(optional.getEncryptorName()), () -> new MismatchedEncryptAlgorithmTypeException(databaseName, "Assisted query", optional.getEncryptorName(), AssistedEncryptAlgorithm.class.getSimpleName()))); } - @SuppressWarnings("rawtypes") private void checkLikeQueryEncryptorType(final EncryptColumnRuleConfiguration columnRuleConfig, final Map likeEncryptors) { columnRuleConfig.getLikeQuery().ifPresent(optional -> ShardingSpherePreconditions.checkState(likeEncryptors.containsKey(optional.getEncryptorName()), () -> new MismatchedEncryptAlgorithmTypeException(databaseName, "Like query", optional.getEncryptorName(), LikeEncryptAlgorithm.class.getSimpleName()))); @@ -172,9 +162,4 @@ public TableNamesMapper getDistributedTableMapper() { public TableNamesMapper getEnhancedTableMapper() { return getLogicTableMapper(); } - - @Override - public String getType() { - return EncryptRule.class.getSimpleName(); - } } diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptTable.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptTable.java index 073718b5a74b9..e230de7450d1b 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptTable.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/EncryptTable.java @@ -64,7 +64,6 @@ private Map createEncryptColumns(final EncryptTableRuleCo return result; } - @SuppressWarnings("rawtypes") private EncryptColumn createEncryptColumn(final EncryptColumnRuleConfiguration config, final Map standardEncryptors, final Map assistedEncryptors, final Map likeEncryptors) { EncryptColumn result = new EncryptColumn(config.getName(), new CipherColumnItem(config.getCipher().getName(), standardEncryptors.get(config.getCipher().getEncryptorName()))); @@ -83,8 +82,8 @@ private EncryptColumn createEncryptColumn(final EncryptColumnRuleConfiguration c * @param logicColumnName logic column name * @return found encryptor */ - public Optional> findEncryptor(final String logicColumnName) { - return columns.containsKey(logicColumnName) ? Optional.of((StandardEncryptAlgorithm) columns.get(logicColumnName).getCipher().getEncryptor()) : Optional.empty(); + public Optional findEncryptor(final String logicColumnName) { + return columns.containsKey(logicColumnName) ? Optional.of(columns.get(logicColumnName).getCipher().getEncryptor()) : Optional.empty(); } /** diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/AssistedQueryColumnItem.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/AssistedQueryColumnItem.java index a11b2ddb5b41f..9c69ed86c5d11 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/AssistedQueryColumnItem.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/AssistedQueryColumnItem.java @@ -35,7 +35,7 @@ public final class AssistedQueryColumnItem { @Getter private final String name; - @SuppressWarnings("rawtypes") + @Getter private final AssistedEncryptAlgorithm encryptor; /** @@ -48,7 +48,6 @@ public final class AssistedQueryColumnItem { * @param originalValue original value * @return assisted query values */ - @SuppressWarnings("unchecked") public Object encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final Object originalValue) { if (null == originalValue) { return null; @@ -67,7 +66,6 @@ public Object encrypt(final String databaseName, final String schemaName, final * @param originalValues original values * @return assisted query values */ - @SuppressWarnings("unchecked") public List encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final List originalValues) { EncryptContext context = EncryptContextBuilder.build(databaseName, schemaName, tableName, logicColumnName); List result = new LinkedList<>(); diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/CipherColumnItem.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/CipherColumnItem.java index fd2bc7ba9a293..16a799e4062f8 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/CipherColumnItem.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/CipherColumnItem.java @@ -35,7 +35,6 @@ public final class CipherColumnItem { private final String name; - @SuppressWarnings("rawtypes") private final StandardEncryptAlgorithm encryptor; /** @@ -48,7 +47,6 @@ public final class CipherColumnItem { * @param originalValue original value * @return encrypted value */ - @SuppressWarnings("unchecked") public Object encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final Object originalValue) { if (null == originalValue) { return null; @@ -67,7 +65,6 @@ public Object encrypt(final String databaseName, final String schemaName, final * @param originalValues original values * @return encrypted values */ - @SuppressWarnings("unchecked") public List encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final List originalValues) { EncryptContext context = EncryptContextBuilder.build(databaseName, schemaName, tableName, logicColumnName); List result = new LinkedList<>(); @@ -87,7 +84,6 @@ public List encrypt(final String databaseName, final String schemaName, * @param cipherValue cipher value * @return decrypted value */ - @SuppressWarnings("unchecked") public Object decrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final Object cipherValue) { if (null == cipherValue) { return null; diff --git a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/LikeQueryColumnItem.java b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/LikeQueryColumnItem.java index 5a0ebbf5a67b0..4003f6853cb41 100644 --- a/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/LikeQueryColumnItem.java +++ b/features/encrypt/core/src/main/java/org/apache/shardingsphere/encrypt/rule/column/item/LikeQueryColumnItem.java @@ -35,7 +35,6 @@ public final class LikeQueryColumnItem { @Getter private final String name; - @SuppressWarnings("rawtypes") private final LikeEncryptAlgorithm encryptor; /** @@ -48,7 +47,6 @@ public final class LikeQueryColumnItem { * @param originalValue original value * @return like query values */ - @SuppressWarnings("unchecked") public Object encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final Object originalValue) { if (null == originalValue) { return null; @@ -67,7 +65,6 @@ public Object encrypt(final String databaseName, final String schemaName, final * @param originalValues original values * @return like query values */ - @SuppressWarnings("unchecked") public List encrypt(final String databaseName, final String schemaName, final String tableName, final String logicColumnName, final List originalValues) { EncryptContext context = EncryptContextBuilder.build(databaseName, schemaName, tableName, logicColumnName); List result = new LinkedList<>(); diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithmTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithmTest.java index b50a481f6d924..f2c2207a74735 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithmTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/assisted/MD5AssistedEncryptAlgorithmTest.java @@ -33,12 +33,11 @@ class MD5AssistedEncryptAlgorithmTest { - private AssistedEncryptAlgorithm encryptAlgorithm; + private AssistedEncryptAlgorithm encryptAlgorithm; - @SuppressWarnings("unchecked") @BeforeEach void setUp() { - encryptAlgorithm = (AssistedEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "MD5"); + encryptAlgorithm = (AssistedEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "MD5"); } @Test diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithmTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithmTest.java index cc8bcc49b86a8..63e3b8f4b09a1 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithmTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/like/CharDigestLikeEncryptAlgorithmTest.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.encrypt.algorithm.like; +import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -33,18 +33,17 @@ class CharDigestLikeEncryptAlgorithmTest { - private LikeEncryptAlgorithm englishLikeEncryptAlgorithm; + private LikeEncryptAlgorithm englishLikeEncryptAlgorithm; - private LikeEncryptAlgorithm chineseLikeEncryptAlgorithm; + private LikeEncryptAlgorithm chineseLikeEncryptAlgorithm; - private LikeEncryptAlgorithm koreanLikeEncryptAlgorithm; + private LikeEncryptAlgorithm koreanLikeEncryptAlgorithm; - @SuppressWarnings("unchecked") @BeforeEach void setUp() { - englishLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "CHAR_DIGEST_LIKE"); - chineseLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "CHAR_DIGEST_LIKE"); - koreanLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, + englishLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "CHAR_DIGEST_LIKE"); + chineseLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "CHAR_DIGEST_LIKE"); + koreanLikeEncryptAlgorithm = (LikeEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "CHAR_DIGEST_LIKE", PropertiesBuilder.build(new Property("dict", "한국어시험"), new Property("start", "44032"))); } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithmTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithmTest.java index 39b63f86a1096..766a877538d7e 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithmTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/AESEncryptAlgorithmTest.java @@ -18,10 +18,10 @@ package org.apache.shardingsphere.encrypt.algorithm.standard; import org.apache.commons.codec.digest.DigestUtils; +import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -42,12 +42,11 @@ class AESEncryptAlgorithmTest { - private StandardEncryptAlgorithm encryptAlgorithm; + private StandardEncryptAlgorithm encryptAlgorithm; - @SuppressWarnings("unchecked") @BeforeEach void setUp() { - encryptAlgorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "AES", PropertiesBuilder.build(new Property("aes-key-value", "test"))); + encryptAlgorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "AES", PropertiesBuilder.build(new Property("aes-key-value", "test"))); } @Test diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithmTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithmTest.java index 09304e4cb6950..a87b3591f29a7 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithmTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/algorithm/standard/RC4EncryptAlgorithmTest.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.encrypt.algorithm.standard; +import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -38,12 +38,11 @@ class RC4EncryptAlgorithmTest { - private StandardEncryptAlgorithm encryptAlgorithm; + private StandardEncryptAlgorithm encryptAlgorithm; - @SuppressWarnings("unchecked") @BeforeEach void setUp() { - encryptAlgorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "RC4", PropertiesBuilder.build(new Property("rc4-key-value", "test-sharding"))); + encryptAlgorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "RC4", PropertiesBuilder.build(new Property("rc4-key-value", "test-sharding"))); } @Test diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java index 8168f8848ceec..170d57dcfb535 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/checker/EncryptRuleConfigurationCheckerTest.java @@ -48,8 +48,8 @@ private EncryptRuleConfiguration createValidConfiguration() { EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class); when(result.getEncryptors()).thenReturn(Collections.singletonMap("aes_encryptor", mock(AlgorithmConfiguration.class))); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor")); - Collection columns = Collections.singletonList(columnRuleConfig); - when(result.getTables()).thenReturn(Collections.singletonList(new EncryptTableRuleConfiguration("t_encrypt", columns))); + Collection columns = Collections.singleton(columnRuleConfig); + when(result.getTables()).thenReturn(Collections.singleton(new EncryptTableRuleConfiguration("t_encrypt", columns))); return result; } @@ -65,8 +65,8 @@ private EncryptRuleConfiguration createInvalidCipherColumnConfig() { EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class); when(result.getEncryptors()).thenReturn(Collections.emptyMap()); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor")); - Collection columns = Collections.singletonList(columnRuleConfig); - when(result.getTables()).thenReturn(Collections.singletonList(new EncryptTableRuleConfiguration("t_encrypt", columns))); + Collection columns = Collections.singleton(columnRuleConfig); + when(result.getTables()).thenReturn(Collections.singleton(new EncryptTableRuleConfiguration("t_encrypt", columns))); return result; } @@ -82,9 +82,9 @@ private EncryptRuleConfiguration createInvalidAssistColumnConfig() { EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class); when(result.getEncryptors()).thenReturn(Collections.emptyMap()); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor")); - Collection columns = Collections.singletonList(columnRuleConfig); + Collection columns = Collections.singleton(columnRuleConfig); columnRuleConfig.setAssistedQuery(new EncryptColumnItemRuleConfiguration("user_assisted", "aes_assisted_encryptor")); - when(result.getTables()).thenReturn(Collections.singletonList(new EncryptTableRuleConfiguration("t_encrypt", columns))); + when(result.getTables()).thenReturn(Collections.singleton(new EncryptTableRuleConfiguration("t_encrypt", columns))); return result; } @@ -100,9 +100,9 @@ private EncryptRuleConfiguration createInvalidLikeColumnConfig() { EncryptRuleConfiguration result = mock(EncryptRuleConfiguration.class); when(result.getEncryptors()).thenReturn(Collections.emptyMap()); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_cipher", "aes_encryptor")); - Collection columns = Collections.singletonList(columnRuleConfig); + Collection columns = Collections.singleton(columnRuleConfig); columnRuleConfig.setLikeQuery(new EncryptColumnItemRuleConfiguration("user_like", "like_cn_encryptor")); - when(result.getTables()).thenReturn(Collections.singletonList(new EncryptTableRuleConfiguration("t_encrypt", columns))); + when(result.getTables()).thenReturn(Collections.singleton(new EncryptTableRuleConfiguration("t_encrypt", columns))); return result; } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreEncryptAlgorithmFixture.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreEncryptAlgorithmFixture.java index dedd55e116485..3e332ac8e0c86 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreEncryptAlgorithmFixture.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreEncryptAlgorithmFixture.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.encrypt.fixture; -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; +import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -public final class CoreEncryptAlgorithmFixture implements StandardEncryptAlgorithm { +public final class CoreEncryptAlgorithmFixture implements StandardEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { @@ -28,7 +28,7 @@ public String encrypt(final Object plainValue, final EncryptContext encryptConte } @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { + public Object decrypt(final Object cipherValue, final EncryptContext encryptContext) { return "decryptValue"; } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryAssistedEncryptAlgorithmFixture.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryAssistedEncryptAlgorithmFixture.java index e1cb1138e4e52..38e2ab8b39a03 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryAssistedEncryptAlgorithmFixture.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryAssistedEncryptAlgorithmFixture.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; -public final class CoreQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { +public final class CoreQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryLikeEncryptAlgorithmFixture.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryLikeEncryptAlgorithmFixture.java index f9bce86ec61cf..6fb9e9aa21118 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryLikeEncryptAlgorithmFixture.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/fixture/CoreQueryLikeEncryptAlgorithmFixture.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.encrypt.fixture; -import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; +import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; -public final class CoreQueryLikeEncryptAlgorithmFixture implements LikeEncryptAlgorithm { +public final class CoreQueryLikeEncryptAlgorithmFixture implements LikeEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowColumnsMergedResultTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowColumnsMergedResultTest.java index 3bf8ebb14516a..03df83117bb0c 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowColumnsMergedResultTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowColumnsMergedResultTest.java @@ -135,7 +135,7 @@ void assertGetInputStream() { private DecoratedEncryptShowColumnsMergedResult createDecoratedEncryptShowColumnsMergedResult(final MergedResult mergedResult, final EncryptRule encryptRule) { ShowColumnsStatementContext showColumnsStatementContext = mock(ShowColumnsStatementContext.class); - when(showColumnsStatementContext.getAllTables()).thenReturn(Collections.singletonList(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_encrypt"))))); + when(showColumnsStatementContext.getAllTables()).thenReturn(Collections.singleton(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_encrypt"))))); return new DecoratedEncryptShowColumnsMergedResult(mergedResult, showColumnsStatementContext, encryptRule); } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowCreateTableMergedResultTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowCreateTableMergedResultTest.java index 32a1849f9d47b..52b3075670a0e 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowCreateTableMergedResultTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/DecoratedEncryptShowCreateTableMergedResultTest.java @@ -72,7 +72,7 @@ void assertGetValueWhenConfigAssistedQueryColumn() throws SQLException { + "`user_id_assisted` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_id_cipher", "foo_encryptor")); columnRuleConfig.setAssistedQuery(new EncryptColumnItemRuleConfiguration("user_id_assisted", "foo_assist_query_encryptor")); - DecoratedEncryptShowCreateTableMergedResult actual = createDecoratedEncryptShowCreateTableMergedResult(mergedResult, mockEncryptRule(Collections.singletonList(columnRuleConfig))); + DecoratedEncryptShowCreateTableMergedResult actual = createDecoratedEncryptShowCreateTableMergedResult(mergedResult, mockEncryptRule(Collections.singleton(columnRuleConfig))); assertTrue(actual.next()); assertThat(actual.getValue(2, String.class), is("CREATE TABLE `t_encrypt` (`id` INT NOT NULL, `user_id` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;")); @@ -86,7 +86,7 @@ void assertGetValueWhenConfigLikeQueryColumn() throws SQLException { + "`user_id_like` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_id_cipher", "foo_encryptor")); columnRuleConfig.setLikeQuery(new EncryptColumnItemRuleConfiguration("user_id_like", "foo_like_encryptor")); - DecoratedEncryptShowCreateTableMergedResult actual = createDecoratedEncryptShowCreateTableMergedResult(mergedResult, mockEncryptRule(Collections.singletonList(columnRuleConfig))); + DecoratedEncryptShowCreateTableMergedResult actual = createDecoratedEncryptShowCreateTableMergedResult(mergedResult, mockEncryptRule(Collections.singleton(columnRuleConfig))); assertTrue(actual.next()); assertThat(actual.getValue(2, String.class), is("CREATE TABLE `t_encrypt` (`id` INT NOT NULL, `user_id` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL," @@ -129,7 +129,7 @@ private DecoratedEncryptShowCreateTableMergedResult createDecoratedEncryptShowCr IdentifierValue identifierValue = new IdentifierValue("t_encrypt"); TableNameSegment tableNameSegment = new TableNameSegment(1, 4, identifierValue); SimpleTableSegment simpleTableSegment = new SimpleTableSegment(tableNameSegment); - when(sqlStatementContext.getAllTables()).thenReturn(Collections.singletonList(simpleTableSegment)); + when(sqlStatementContext.getAllTables()).thenReturn(Collections.singleton(simpleTableSegment)); when(sqlStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); return new DecoratedEncryptShowCreateTableMergedResult(mergedResult, sqlStatementContext, encryptRule); } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowColumnsMergedResultTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowColumnsMergedResultTest.java index e1c746a853875..dd71bc219a991 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowColumnsMergedResultTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowColumnsMergedResultTest.java @@ -135,7 +135,7 @@ void assertGetInputStream() { private MergedEncryptShowColumnsMergedResult createMergedEncryptColumnsMergedResult(final QueryResult queryResult, final EncryptRule encryptRule) { SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class); - when(sqlStatementContext.getAllTables()).thenReturn(Collections.singletonList(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_encrypt"))))); + when(sqlStatementContext.getAllTables()).thenReturn(Collections.singleton(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_encrypt"))))); return new MergedEncryptShowColumnsMergedResult(queryResult, sqlStatementContext, encryptRule); } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowCreateTableMergedResultTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowCreateTableMergedResultTest.java index d4c7809b355f0..3d16057d715d3 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowCreateTableMergedResultTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/merge/dal/show/MergedEncryptShowCreateTableMergedResultTest.java @@ -71,7 +71,7 @@ void assertGetValueWhenConfigAssistedQueryColumn() throws SQLException { + "`user_id_assisted` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_id_cipher", "foo_encryptor")); columnRuleConfig.setAssistedQuery(new EncryptColumnItemRuleConfiguration("user_id_assisted", "foo_assist_query_encryptor")); - MergedEncryptShowCreateTableMergedResult actual = createMergedEncryptShowCreateTableMergedResult(queryResult, mockEncryptRule(Collections.singletonList(columnRuleConfig))); + MergedEncryptShowCreateTableMergedResult actual = createMergedEncryptShowCreateTableMergedResult(queryResult, mockEncryptRule(Collections.singleton(columnRuleConfig))); assertTrue(actual.next()); assertThat(actual.getValue(2, String.class), is("CREATE TABLE `t_encrypt` (`id` INT NOT NULL, `user_id` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;")); @@ -85,7 +85,7 @@ void assertGetValueWhenConfigLikeQueryColumn() throws SQLException { + "`user_id_like` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"); EncryptColumnRuleConfiguration columnRuleConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_id_cipher", "foo_encryptor")); columnRuleConfig.setLikeQuery(new EncryptColumnItemRuleConfiguration("user_id_like", "foo_like_encryptor")); - MergedEncryptShowCreateTableMergedResult actual = createMergedEncryptShowCreateTableMergedResult(queryResult, mockEncryptRule(Collections.singletonList(columnRuleConfig))); + MergedEncryptShowCreateTableMergedResult actual = createMergedEncryptShowCreateTableMergedResult(queryResult, mockEncryptRule(Collections.singleton(columnRuleConfig))); assertTrue(actual.next()); assertThat(actual.getValue(2, String.class), is("CREATE TABLE `t_encrypt` (`id` INT NOT NULL, `user_id` VARCHAR(100) NOT NULL, `order_id` VARCHAR(30) NOT NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;")); @@ -108,7 +108,7 @@ private MergedEncryptShowCreateTableMergedResult createMergedEncryptShowCreateTa IdentifierValue identifierValue = new IdentifierValue("t_encrypt"); TableNameSegment tableNameSegment = new TableNameSegment(1, 4, identifierValue); SimpleTableSegment simpleTableSegment = new SimpleTableSegment(tableNameSegment); - when(sqlStatementContext.getAllTables()).thenReturn(Collections.singletonList(simpleTableSegment)); + when(sqlStatementContext.getAllTables()).thenReturn(Collections.singleton(simpleTableSegment)); when(sqlStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); return new MergedEncryptShowCreateTableMergedResult(queryResult, sqlStatementContext, encryptRule); } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/parameter/EncryptParameterRewriterBuilderTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/parameter/EncryptParameterRewriterBuilderTest.java index 69001363c4991..ead6110bab755 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/parameter/EncryptParameterRewriterBuilderTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/parameter/EncryptParameterRewriterBuilderTest.java @@ -44,7 +44,7 @@ void assertGetParameterRewritersWhenPredicateIsNeedRewrite() { EncryptRule encryptRule = mock(EncryptRule.class, RETURNS_DEEP_STUBS); when(encryptRule.findEncryptTable("t_order").isPresent()).thenReturn(true); SQLStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_order")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_order")); Collection actual = new EncryptParameterRewriterBuilder( encryptRule, DefaultDatabase.LOGIC_NAME, Collections.singletonMap("test", mock(ShardingSphereSchema.class)), sqlStatementContext, Collections.emptyList()).getParameterRewriters(); assertThat(actual.size(), is(1)); @@ -55,7 +55,7 @@ void assertGetParameterRewritersWhenPredicateIsNeedRewrite() { void assertGetParameterRewritersWhenPredicateIsNotNeedRewrite() { EncryptRule encryptRule = mock(EncryptRule.class, RETURNS_DEEP_STUBS); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_order")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_order")); when(sqlStatementContext.getWhereSegments()).thenReturn(Collections.emptyList()); assertTrue(new EncryptParameterRewriterBuilder(encryptRule, DefaultDatabase.LOGIC_NAME, Collections.singletonMap("test", mock(ShardingSphereSchema.class)), sqlStatementContext, Collections.emptyList()).getParameterRewriters().isEmpty()); diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateEqualRightValueTokenTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateEqualRightValueTokenTest.java index 9436de9412c3d..d51679777531a 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateEqualRightValueTokenTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateEqualRightValueTokenTest.java @@ -39,7 +39,7 @@ void assertToStringWithoutPlaceholderWithoutTableOwnerWithEqual() { @Test void assertToStringWithPlaceholderWithoutTableOwnerWithEqual() { - EncryptPredicateEqualRightValueToken actual = new EncryptPredicateEqualRightValueToken(0, 0, Collections.emptyMap(), Collections.singletonList(0)); + EncryptPredicateEqualRightValueToken actual = new EncryptPredicateEqualRightValueToken(0, 0, Collections.emptyMap(), Collections.singleton(0)); assertThat(actual.toString(), is("?")); } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateInRightValueTokenTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateInRightValueTokenTest.java index 3d69de125fd9e..77f6205d94f86 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateInRightValueTokenTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/pojo/EncryptPredicateInRightValueTokenTest.java @@ -40,7 +40,7 @@ void assertToStringWithoutPlaceholderWithoutTableOwnerWithIn() { @Test void assertToStringWithPlaceholderWithoutTableOwnerWithIn() { - EncryptPredicateInRightValueToken actual = new EncryptPredicateInRightValueToken(0, 0, Collections.emptyMap(), Collections.singletonList(0)); + EncryptPredicateInRightValueToken actual = new EncryptPredicateInRightValueToken(0, 0, Collections.emptyMap(), Collections.singleton(0)); assertThat(actual.toString(), is("(?)")); } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilderTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilderTest.java index 87914d10412e3..6b7430269ae50 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilderTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/EncryptTokenGenerateBuilderTest.java @@ -55,8 +55,8 @@ class EncryptTokenGenerateBuilderTest { void assertGetSQLTokenGenerators() { SelectStatementContext selectStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); when(selectStatementContext.getAllTables().isEmpty()).thenReturn(false); - when(selectStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("table")); - when(selectStatementContext.getOrderByContext().getItems()).thenReturn(Collections.singletonList(mock(OrderByItem.class))); + when(selectStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("table")); + when(selectStatementContext.getOrderByContext().getItems()).thenReturn(Collections.singleton(mock(OrderByItem.class))); when(selectStatementContext.getGroupByContext().getItems()).thenReturn(Collections.emptyList()); when(selectStatementContext.getWhereSegments()).thenReturn(Collections.emptyList()); EncryptTokenGenerateBuilder encryptTokenGenerateBuilder = new EncryptTokenGenerateBuilder(encryptRule, selectStatementContext, Collections.emptyList(), DefaultDatabase.LOGIC_NAME); @@ -85,7 +85,7 @@ private void assertField(final SQLTokenGenerator sqlTokenGenerator, final Object private Field findField(final Class clazz, final String fieldName, final Class fieldType) { Class searchClass = clazz; while (null != searchClass && !Object.class.equals(searchClass)) { - for (final Field each : searchClass.getDeclaredFields()) { + for (Field each : searchClass.getDeclaredFields()) { if (fieldName.equals(each.getName()) && fieldType.equals(each.getType())) { return each; } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptCreateTableTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptCreateTableTokenGeneratorTest.java index 60b83bf41d9d0..c1110657335ab 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptCreateTableTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptCreateTableTokenGeneratorTest.java @@ -103,7 +103,7 @@ private CreateTableStatementContext mockCreateTableStatementContext() { CreateTableStatementContext result = mock(CreateTableStatementContext.class, RETURNS_DEEP_STUBS); when(result.getSqlStatement().getTable().getTableName().getIdentifier().getValue()).thenReturn("t_encrypt"); ColumnDefinitionSegment segment = new ColumnDefinitionSegment(25, 78, new ColumnSegment(25, 42, new IdentifierValue("certificate_number")), new DataTypeSegment(), false, false); - when(result.getSqlStatement().getColumnDefinitions()).thenReturn(Collections.singletonList(segment)); + when(result.getSqlStatement().getColumnDefinitions()).thenReturn(Collections.singleton(segment)); return result; } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGeneratorTest.java new file mode 100644 index 0000000000000..08ce51aa10039 --- /dev/null +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptGroupByItemTokenGeneratorTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.encrypt.rewrite.token.generator; + +import org.apache.shardingsphere.encrypt.rule.EncryptRule; +import org.apache.shardingsphere.encrypt.rule.EncryptTable; +import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; +import org.apache.shardingsphere.infra.binder.context.segment.select.orderby.OrderByItem; +import org.apache.shardingsphere.infra.binder.context.segment.table.TablesContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; +import org.apache.shardingsphere.infra.database.core.metadata.database.enums.NullsOrderType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.item.ColumnOrderByItemSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.Optional; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class EncryptGroupByItemTokenGeneratorTest { + + private final EncryptGroupByItemTokenGenerator generator = new EncryptGroupByItemTokenGenerator(); + + private final DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); + + @BeforeEach + void setup() { + generator.setEncryptRule(mockEncryptRule()); + generator.setDatabaseName("db_schema"); + generator.setSchemas(Collections.singletonMap("test", mock(ShardingSphereSchema.class))); + } + + private EncryptRule mockEncryptRule() { + EncryptRule result = mock(EncryptRule.class); + EncryptTable encryptTable = mock(EncryptTable.class); + when(encryptTable.isEncryptColumn("certificate_number")).thenReturn(true); + EncryptColumn encryptColumn = mock(EncryptColumn.class, RETURNS_DEEP_STUBS); + when(encryptColumn.getAssistedQuery()).thenReturn(Optional.empty()); + when(encryptTable.getEncryptColumn("certificate_number")).thenReturn(encryptColumn); + when(result.findEncryptTable("t_encrypt")).thenReturn(Optional.of(encryptTable)); + return result; + } + + @Test + void assertGenerateSQLTokens() { + assertThat(generator.generateSQLTokens(buildSelectStatementContext()).size(), is(1)); + } + + private SelectStatementContext buildSelectStatementContext() { + SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_encrypt"))); + simpleTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("a"))); + ColumnSegment columnSegment = new ColumnSegment(0, 0, new IdentifierValue("certificate_number")); + columnSegment.setOwner(new OwnerSegment(0, 0, new IdentifierValue("a"))); + SelectStatementContext result = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); + when(result.getDatabaseType()).thenReturn(databaseType); + ColumnOrderByItemSegment columnOrderByItemSegment = new ColumnOrderByItemSegment(columnSegment, OrderDirection.ASC, NullsOrderType.FIRST); + OrderByItem orderByItem = new OrderByItem(columnOrderByItemSegment); + when(result.getGroupByContext().getItems()).thenReturn(Collections.singleton(orderByItem)); + when(result.getSubqueryContexts().values()).thenReturn(Collections.emptyList()); + when(result.getTablesContext()).thenReturn(new TablesContext(Collections.singleton(simpleTableSegment), databaseType)); + return result; + } +} diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGeneratorTest.java index 451fdea27d0a9..387b52f4dc30d 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptOrderByItemTokenGeneratorTest.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.encrypt.rewrite.token.generator; +import org.apache.shardingsphere.encrypt.exception.syntax.UnsupportedEncryptSQLException; import org.apache.shardingsphere.encrypt.rule.EncryptRule; import org.apache.shardingsphere.encrypt.rule.EncryptTable; import org.apache.shardingsphere.encrypt.rule.column.EncryptColumn; @@ -41,8 +42,7 @@ import java.util.Collections; import java.util.Optional; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -73,7 +73,7 @@ private EncryptRule mockEncryptRule() { @Test void assertGenerateSQLTokens() { - assertThat(generator.generateSQLTokens(buildSelectStatementContext()).size(), is(1)); + assertThrows(UnsupportedEncryptSQLException.class, () -> generator.generateSQLTokens(buildSelectStatementContext())); } private SelectStatementContext buildSelectStatementContext() { diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGeneratorTest.java index 164f92b370562..a0be83c9cd16b 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptPredicateColumnTokenGeneratorTest.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.encrypt.rewrite.token.generator.fixture.EncryptGeneratorFixtureBuilder; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.generic.SubstitutableColumnNameToken; import org.junit.jupiter.api.BeforeEach; @@ -29,6 +30,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; class EncryptPredicateColumnTokenGeneratorTest { @@ -55,4 +57,11 @@ void assertGenerateSQLTokenFromGenerateNewSQLToken() { assertThat(substitutableColumnNameTokens.size(), is(1)); assertThat(((SubstitutableColumnNameToken) substitutableColumnNameTokens.iterator().next()).toString(null), is("pwd_assist")); } + + @Test + void assertGenerateSQLTokensWhenJoinConditionUseDifferentEncryptor() { + generator.setDatabaseName(DefaultDatabase.LOGIC_NAME); + generator.setSchemas(Collections.emptyMap()); + assertThrows(UnsupportedSQLOperationException.class, () -> generator.generateSQLTokens(EncryptGeneratorFixtureBuilder.createSelectStatementContext())); + } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGeneratorTest.java index d30545ceab74f..346af625002e0 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptProjectionTokenGeneratorTest.java @@ -23,21 +23,20 @@ import org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl.ColumnProjection; import org.apache.shardingsphere.infra.binder.context.segment.table.TablesContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; -import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -85,18 +84,19 @@ void assertGenerateSQLTokensWhenOwnerMatchTableAlias() { SimpleTableSegment doctorTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor"))); doctorTable.setAlias(new AliasSegment(0, 0, new IdentifierValue("a"))); ColumnSegment column = new ColumnSegment(0, 0, new IdentifierValue("mobile")); - column.setOriginalColumn(new IdentifierValue("mobile")); - column.setOriginalTable(new IdentifierValue("doctor")); + column.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("doctor"), + new IdentifierValue("mobile"))); column.setOwner(new OwnerSegment(0, 0, new IdentifierValue("a"))); ProjectionsSegment projections = mock(ProjectionsSegment.class); - when(projections.getProjections()).thenReturn(Collections.singletonList(new ColumnProjectionSegment(column))); + when(projections.getProjections()).thenReturn(Collections.singleton(new ColumnProjectionSegment(column))); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); + when(sqlStatementContext.getSubqueryType()).thenReturn(null); when(sqlStatementContext.getDatabaseType()).thenReturn(databaseType); when(sqlStatementContext.getSqlStatement().getProjections()).thenReturn(projections); when(sqlStatementContext.getSubqueryContexts().values()).thenReturn(Collections.emptyList()); SimpleTableSegment doctorOneTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor1"))); when(sqlStatementContext.getTablesContext()).thenReturn(new TablesContext(Arrays.asList(doctorTable, doctorOneTable), databaseType)); - when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singletonList(new ColumnProjection("a", "mobile", null, databaseType))); + when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singleton(new ColumnProjection("a", "mobile", null, databaseType))); Collection actual = generator.generateSQLTokens(sqlStatementContext); assertThat(actual.size(), is(1)); } @@ -106,18 +106,19 @@ void assertGenerateSQLTokensWhenOwnerMatchTableAliasForSameTable() { SimpleTableSegment doctorTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor"))); doctorTable.setAlias(new AliasSegment(0, 0, new IdentifierValue("a"))); ColumnSegment column = new ColumnSegment(0, 0, new IdentifierValue("mobile")); - column.setOriginalColumn(new IdentifierValue("mobile")); - column.setOriginalTable(new IdentifierValue("doctor")); + column.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("doctor"), + new IdentifierValue("mobile"))); column.setOwner(new OwnerSegment(0, 0, new IdentifierValue("a"))); ProjectionsSegment projections = mock(ProjectionsSegment.class); - when(projections.getProjections()).thenReturn(Collections.singletonList(new ColumnProjectionSegment(column))); + when(projections.getProjections()).thenReturn(Collections.singleton(new ColumnProjectionSegment(column))); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); + when(sqlStatementContext.getSubqueryType()).thenReturn(null); when(sqlStatementContext.getDatabaseType()).thenReturn(databaseType); when(sqlStatementContext.getSqlStatement().getProjections()).thenReturn(projections); when(sqlStatementContext.getSubqueryContexts().values()).thenReturn(Collections.emptyList()); SimpleTableSegment sameDoctorTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor"))); when(sqlStatementContext.getTablesContext()).thenReturn(new TablesContext(Arrays.asList(doctorTable, sameDoctorTable), databaseType)); - when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singletonList(new ColumnProjection("a", "mobile", null, databaseType))); + when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singleton(new ColumnProjection("a", "mobile", null, databaseType))); Collection actual = generator.generateSQLTokens(sqlStatementContext); assertThat(actual.size(), is(1)); } @@ -127,15 +128,16 @@ void assertGenerateSQLTokensWhenOwnerMatchTableName() { ColumnSegment column = new ColumnSegment(0, 0, new IdentifierValue("mobile")); column.setOwner(new OwnerSegment(0, 0, new IdentifierValue("doctor"))); ProjectionsSegment projections = mock(ProjectionsSegment.class); - when(projections.getProjections()).thenReturn(Collections.singletonList(new ColumnProjectionSegment(column))); + when(projections.getProjections()).thenReturn(Collections.singleton(new ColumnProjectionSegment(column))); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); + when(sqlStatementContext.getSubqueryType()).thenReturn(null); when(sqlStatementContext.getDatabaseType()).thenReturn(databaseType); when(sqlStatementContext.getSqlStatement().getProjections()).thenReturn(projections); when(sqlStatementContext.getSubqueryContexts().values()).thenReturn(Collections.emptyList()); SimpleTableSegment doctorTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor"))); SimpleTableSegment doctorOneTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("doctor1"))); when(sqlStatementContext.getTablesContext()).thenReturn(new TablesContext(Arrays.asList(doctorTable, doctorOneTable), databaseType)); - when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singletonList(new ColumnProjection("doctor", "mobile", null, databaseType))); + when(sqlStatementContext.getProjectionsContext().getProjections()).thenReturn(Collections.singleton(new ColumnProjection("doctor", "mobile", null, databaseType))); Collection actual = generator.generateSQLTokens(sqlStatementContext); assertThat(actual.size(), is(1)); } @@ -143,7 +145,7 @@ void assertGenerateSQLTokensWhenOwnerMatchTableName() { @Test void assertGenerateSQLTokensWhenShorthandExpandContainsSubqueryTable() { SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getSqlStatement().getFrom()).thenReturn(new SubqueryTableSegment(new SubquerySegment(0, 0, mock(SelectStatement.class)))); + when(sqlStatementContext.containsTableSubquery()).thenReturn(true); when(sqlStatementContext.getSqlStatement().getProjections().getProjections()).thenReturn(Collections.singleton(new ShorthandProjectionSegment(0, 0))); assertThrows(UnsupportedSQLOperationException.class, () -> generator.generateSQLTokens(sqlStatementContext)); } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/fixture/EncryptGeneratorFixtureBuilder.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/fixture/EncryptGeneratorFixtureBuilder.java index 6402f33f899be..d6053fac6d58e 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/fixture/EncryptGeneratorFixtureBuilder.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/fixture/EncryptGeneratorFixtureBuilder.java @@ -25,7 +25,9 @@ import org.apache.shardingsphere.encrypt.api.config.rule.EncryptTableRuleConfiguration; import org.apache.shardingsphere.encrypt.rewrite.token.pojo.EncryptInsertValuesToken; import org.apache.shardingsphere.encrypt.rule.EncryptRule; +import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.UpdateStatementContext; import org.apache.shardingsphere.infra.config.algorithm.AlgorithmConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; @@ -46,12 +48,17 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLInsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLSelectStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLUpdateStatement; import java.util.ArrayList; @@ -86,8 +93,11 @@ public static EncryptRule createEncryptRule() { new EncryptColumnRuleConfiguration("pwd", new EncryptColumnItemRuleConfiguration("pwd_cipher", "standard_encryptor")); pwdColumnConfig.setAssistedQuery(new EncryptColumnItemRuleConfiguration("pwd_assist", "assisted_encryptor")); pwdColumnConfig.setLikeQuery(new EncryptColumnItemRuleConfiguration("pwd_like", "like_encryptor")); + EncryptColumnRuleConfiguration userNameColumnConfig = new EncryptColumnRuleConfiguration("user_name", new EncryptColumnItemRuleConfiguration("user_name_cipher", "standard_encryptor")); + EncryptColumnRuleConfiguration userIdColumnConfig = new EncryptColumnRuleConfiguration("user_id", new EncryptColumnItemRuleConfiguration("user_id_cipher", "standard_encryptor")); + userIdColumnConfig.setAssistedQuery(new EncryptColumnItemRuleConfiguration("user_id_assist", "assisted_encryptor")); return new EncryptRule("foo_db", - new EncryptRuleConfiguration(Collections.singleton(new EncryptTableRuleConfiguration("t_user", Collections.singletonList(pwdColumnConfig))), encryptors)); + new EncryptRuleConfiguration(Collections.singleton(new EncryptTableRuleConfiguration("t_user", Arrays.asList(pwdColumnConfig, userNameColumnConfig, userIdColumnConfig))), encryptors)); } /** @@ -119,6 +129,35 @@ private static InsertStatement createInsertStatement() { return result; } + private static InsertStatement createInsertSelectStatement(final boolean containsInsertColumns) { + InsertStatement result = new MySQLInsertStatement(); + result.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_user")))); + ColumnSegment userIdColumn = new ColumnSegment(0, 0, new IdentifierValue("user_id")); + userIdColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_user"), + new IdentifierValue("user_id"))); + ColumnSegment userNameColumn = new ColumnSegment(0, 0, new IdentifierValue("user_name")); + userNameColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), + new IdentifierValue("t_user"), new IdentifierValue("user_name"))); + List insertColumns = Arrays.asList(userIdColumn, userNameColumn); + if (containsInsertColumns) { + result.setInsertColumns(new InsertColumnsSegment(0, 0, insertColumns)); + } else { + result.setInsertColumns(new InsertColumnsSegment(0, 0, Collections.emptyList())); + result.getDerivedInsertColumns().addAll(insertColumns); + } + MySQLSelectStatement selectStatement = new MySQLSelectStatement(); + selectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_user")))); + ProjectionsSegment projections = new ProjectionsSegment(0, 0); + projections.getProjections().add(new ColumnProjectionSegment(userIdColumn)); + ColumnSegment statusColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + statusColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_user"), + new IdentifierValue("status"))); + projections.getProjections().add(new ColumnProjectionSegment(statusColumn)); + selectStatement.setProjections(projections); + result.setInsertSelect(new SubquerySegment(0, 0, selectStatement)); + return result; + } + /** * Create update statement context. * @@ -164,4 +203,40 @@ private static List createValueExpressions() { result.add(new ParameterMarkerExpressionSegment(0, 0, 4)); return result; } + + /** + * Create select statement context. + * + * @return select statement context + */ + public static SQLStatementContext createSelectStatementContext() { + ColumnSegment leftColumn = new ColumnSegment(0, 0, new IdentifierValue("user_name")); + leftColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_user"), + new IdentifierValue("user_name"))); + ColumnSegment rightColumn = new ColumnSegment(0, 0, new IdentifierValue("user_id")); + rightColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue("t_user"), + new IdentifierValue("user_id"))); + SelectStatementContext result = mock(SelectStatementContext.class); + when(result.getJoinConditions()).thenReturn(Collections.singleton(new BinaryOperationExpression(0, 0, leftColumn, rightColumn, "=", ""))); + return result; + } + + /** + * Create insert select statement context. + * + * @param params parameters + * @param containsInsertColumns contains insert columns + * @return created insert select statement context + */ + public static InsertStatementContext createInsertSelectStatementContext(final List params, final boolean containsInsertColumns) { + InsertStatement insertStatement = createInsertSelectStatement(containsInsertColumns); + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); + ShardingSphereSchema schema = mock(ShardingSphereSchema.class); + when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(schema.getAllColumnNames("t_user")).thenReturn(Arrays.asList("user_id", "user_name", "pwd")); + ShardingSphereMetaData metaData = new ShardingSphereMetaData( + Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database), mock(ResourceMetaData.class), + mock(RuleMetaData.class), mock(ConfigurationProperties.class)); + return new InsertStatementContext(metaData, params, insertStatement, DefaultDatabase.LOGIC_NAME); + } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGeneratorTest.java index c1a18ef9a84e5..f3242b9b9ff5b 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertCipherNameTokenGeneratorTest.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.encrypt.rewrite.token.generator.fixture.EncryptGeneratorFixtureBuilder; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -27,6 +28,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; @@ -53,4 +55,9 @@ void assertIsGenerateSQLTokenWithInsertStatementContext() { void assertGenerateSQLTokensWithInsertStatementContext() { assertThat(generator.generateSQLTokens(EncryptGeneratorFixtureBuilder.createInsertStatementContext(Collections.emptyList())).size(), is(1)); } + + @Test + void assertGenerateSQLTokensWhenInsertColumnsUseDifferentEncryptorWithSelectProjection() { + assertThrows(UnsupportedSQLOperationException.class, () -> generator.generateSQLTokens(EncryptGeneratorFixtureBuilder.createInsertSelectStatementContext(Collections.emptyList(), true))); + } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGeneratorTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGeneratorTest.java similarity index 75% rename from features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGeneratorTest.java rename to features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGeneratorTest.java index accff00c5370f..320fdeaf8bc0f 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/EncryptForUseDefaultInsertColumnsTokenGeneratorTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rewrite/token/generator/insert/EncryptInsertDefaultColumnsTokenGeneratorTest.java @@ -15,9 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.encrypt.rewrite.token.generator; +package org.apache.shardingsphere.encrypt.rewrite.token.generator.insert; import org.apache.shardingsphere.encrypt.rewrite.token.generator.fixture.EncryptGeneratorFixtureBuilder; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -26,10 +27,11 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; -class EncryptForUseDefaultInsertColumnsTokenGeneratorTest { +class EncryptInsertDefaultColumnsTokenGeneratorTest { - private final EncryptForUseDefaultInsertColumnsTokenGenerator generator = new EncryptForUseDefaultInsertColumnsTokenGenerator(); + private final EncryptInsertDefaultColumnsTokenGenerator generator = new EncryptInsertDefaultColumnsTokenGenerator(); @BeforeEach void setup() { @@ -54,4 +56,10 @@ void assertGenerateSQLTokenFromPreviousSQLTokens() { assertThat(generator.generateSQLToken(EncryptGeneratorFixtureBuilder.createInsertStatementContext(Collections.emptyList())).toString(), is("(id, name, status, pwd_cipher, pwd_assist, pwd_like)")); } + + @Test + void assertGenerateSQLTokensWhenInsertColumnsUseDifferentEncryptorWithSelectProjection() { + generator.setPreviousSQLTokens(EncryptGeneratorFixtureBuilder.getPreviousSQLTokens()); + assertThrows(UnsupportedSQLOperationException.class, () -> generator.generateSQLToken(EncryptGeneratorFixtureBuilder.createInsertSelectStatementContext(Collections.emptyList(), false))); + } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/EncryptTableTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/EncryptTableTest.java index c46285fd4a5b1..b12f434351b66 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/EncryptTableTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/EncryptTableTest.java @@ -73,11 +73,21 @@ void assertIsNotCipherColumn() { assertFalse(encryptTable.isCipherColumn("logicColumn")); } + @Test + void assertIsEncryptColumn() { + assertTrue(encryptTable.isEncryptColumn("logicColumn")); + } + @Test void assertGetLogicColumnByCipherColumn() { assertNotNull(encryptTable.getLogicColumnByCipherColumn("cipherColumn")); } + @Test + void assertGetEncryptColumn() { + assertNotNull(encryptTable.getEncryptColumn("logicColumn")); + } + @Test void assertGetLogicColumnByCipherColumnWhenNotFind() { assertThrows(EncryptLogicColumnNotFoundException.class, () -> encryptTable.getLogicColumnByCipherColumn("invalidColumn")); diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/builder/EncryptRuleBuilderTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/builder/EncryptRuleBuilderTest.java index 08b0e5680740f..e002840e9a402 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/builder/EncryptRuleBuilderTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/rule/builder/EncryptRuleBuilderTest.java @@ -36,7 +36,7 @@ class EncryptRuleBuilderTest { @Test void assertBuild() { EncryptRuleConfiguration ruleConfig = mock(EncryptRuleConfiguration.class); - DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class, Collections.singletonList(ruleConfig)).get(ruleConfig); + DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class, Collections.singleton(ruleConfig)).get(ruleConfig); assertThat(builder.build(ruleConfig, "", Collections.emptyMap(), Collections.emptyList(), mock(InstanceContext.class)), instanceOf(EncryptRule.class)); } } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlCompatibleEncryptRuleConfigurationSwapperTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlCompatibleEncryptRuleConfigurationSwapperTest.java index 2b2bada136e05..8043e93c93875 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlCompatibleEncryptRuleConfigurationSwapperTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlCompatibleEncryptRuleConfigurationSwapperTest.java @@ -47,7 +47,7 @@ void assertSwapToYamlConfiguration() { } private CompatibleEncryptRuleConfiguration createEncryptRuleConfiguration() { - Collection tables = Collections.singletonList(new EncryptTableRuleConfiguration("tbl", Collections.emptyList())); + Collection tables = Collections.singleton(new EncryptTableRuleConfiguration("tbl", Collections.emptyList())); Map encryptors = Collections.singletonMap("myEncryptor", new AlgorithmConfiguration("FIXTURE", new Properties())); return new CompatibleEncryptRuleConfiguration(tables, encryptors); } diff --git a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlEncryptRuleConfigurationSwapperTest.java b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlEncryptRuleConfigurationSwapperTest.java index f40bed1fb0cae..415877df02176 100644 --- a/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlEncryptRuleConfigurationSwapperTest.java +++ b/features/encrypt/core/src/test/java/org/apache/shardingsphere/encrypt/yaml/swapper/YamlEncryptRuleConfigurationSwapperTest.java @@ -46,7 +46,7 @@ void assertSwapToYamlConfiguration() { } private EncryptRuleConfiguration createEncryptRuleConfiguration() { - Collection tables = Collections.singletonList(new EncryptTableRuleConfiguration("tbl", Collections.emptyList())); + Collection tables = Collections.singleton(new EncryptTableRuleConfiguration("tbl", Collections.emptyList())); Map encryptors = Collections.singletonMap("myEncryptor", new AlgorithmConfiguration("FIXTURE", new Properties())); return new EncryptRuleConfiguration(tables, encryptors); } diff --git a/features/encrypt/distsql/handler/pom.xml b/features/encrypt/distsql/handler/pom.xml index f17743d9417dc..235c722f2fa8e 100644 --- a/features/encrypt/distsql/handler/pom.xml +++ b/features/encrypt/distsql/handler/pom.xml @@ -37,11 +37,6 @@ shardingsphere-encrypt-core ${project.version} - - org.apache.shardingsphere - shardingsphere-encrypt-sm - ${project.version} - org.apache.shardingsphere shardingsphere-encrypt-distsql-statement diff --git a/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleStatementUpdater.java b/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleStatementUpdater.java index b76ce9a23281d..88d0fbb8d7041 100644 --- a/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleStatementUpdater.java +++ b/features/encrypt/distsql/handler/src/main/java/org/apache/shardingsphere/encrypt/distsql/handler/update/CreateEncryptRuleStatementUpdater.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.encrypt.distsql.handler.update; +import org.apache.shardingsphere.distsql.handler.exception.algorithm.InvalidAlgorithmConfigurationException; import org.apache.shardingsphere.distsql.handler.exception.rule.DuplicateRuleException; import org.apache.shardingsphere.distsql.handler.exception.rule.InvalidRuleConfigurationException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.EmptyStorageUnitException; @@ -24,13 +25,17 @@ import org.apache.shardingsphere.distsql.parser.segment.AlgorithmSegment; import org.apache.shardingsphere.encrypt.api.config.EncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.api.config.rule.EncryptTableRuleConfiguration; +import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; +import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; +import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.distsql.handler.converter.EncryptRuleStatementConverter; +import org.apache.shardingsphere.encrypt.distsql.parser.segment.EncryptColumnItemSegment; import org.apache.shardingsphere.encrypt.distsql.parser.segment.EncryptColumnSegment; import org.apache.shardingsphere.encrypt.distsql.parser.segment.EncryptRuleSegment; import org.apache.shardingsphere.encrypt.distsql.parser.statement.CreateEncryptRuleStatement; import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.util.Collection; @@ -49,10 +54,27 @@ public void checkSQLStatement(final ShardingSphereDatabase database, final Creat checkDuplicateRuleNames(database.getName(), sqlStatement, currentRuleConfig); } checkColumnNames(sqlStatement); + checkAlgorithmTypes(sqlStatement); checkToBeCreatedEncryptors(sqlStatement); checkDataSources(database); } + private void checkAlgorithmTypes(final CreateEncryptRuleStatement sqlStatement) { + sqlStatement.getRules().stream().flatMap(each -> each.getColumns().stream()).forEach(each -> { + checkAlgorithmType(each.getCipher(), "standard encrypt", StandardEncryptAlgorithm.class); + checkAlgorithmType(each.getLikeQuery(), "like encrypt", LikeEncryptAlgorithm.class); + checkAlgorithmType(each.getAssistedQuery(), "assisted encrypt", AssistedEncryptAlgorithm.class); + }); + } + + private void checkAlgorithmType(final EncryptColumnItemSegment itemSegment, final String algorithmType, final Class encryptAlgorithmClass) { + if (null == itemSegment || null == itemSegment.getEncryptor()) { + return; + } + EncryptAlgorithm encryptAlgorithm = TypedSPILoader.getService(EncryptAlgorithm.class, itemSegment.getEncryptor().getName(), itemSegment.getEncryptor().getProps()); + ShardingSpherePreconditions.checkState(encryptAlgorithmClass.isInstance(encryptAlgorithm), () -> new InvalidAlgorithmConfigurationException(algorithmType, encryptAlgorithm.getType())); + } + private void checkDuplicateRuleNames(final String databaseName, final CreateEncryptRuleStatement sqlStatement, final EncryptRuleConfiguration currentRuleConfig) { Collection duplicatedRuleNames = getDuplicatedRuleNames(sqlStatement, currentRuleConfig); ShardingSpherePreconditions.checkState(duplicatedRuleNames.isEmpty(), () -> new DuplicateRuleException("encrypt", databaseName, duplicatedRuleNames)); @@ -100,7 +122,7 @@ private void checkToBeCreatedEncryptors(final CreateEncryptRuleStatement sqlStat } private void checkDataSources(final ShardingSphereDatabase database) { - ShardingSpherePreconditions.checkState(!database.getResourceMetaData().getDataSources().isEmpty(), () -> new EmptyStorageUnitException(database.getName())); + ShardingSpherePreconditions.checkState(!database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty(), () -> new EmptyStorageUnitException(database.getName())); } @Override diff --git a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/AlterEncryptRuleStatementUpdaterTest.java b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/AlterEncryptRuleStatementUpdaterTest.java index e8db40f3cee2a..5ce67d58d59ed 100644 --- a/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/AlterEncryptRuleStatementUpdaterTest.java +++ b/features/encrypt/distsql/handler/src/test/java/org/apache/shardingsphere/encrypt/distsql/handler/update/AlterEncryptRuleStatementUpdaterTest.java @@ -78,9 +78,9 @@ void assertCheckSQLStatementWithConflictColumnNames() { @Test void assertUpdateCurrentRuleConfigurationWithInUsedEncryptor() { - EncryptRuleConfiguration currentRuleConfiguration = createCurrentRuleConfigurationWithMultipleTableRules(); - updater.updateCurrentRuleConfiguration(currentRuleConfiguration, createToBeAlteredRuleConfiguration()); - assertThat(currentRuleConfiguration.getEncryptors().size(), is(1)); + EncryptRuleConfiguration currentRuleConfig = createCurrentRuleConfigurationWithMultipleTableRules(); + updater.updateCurrentRuleConfiguration(currentRuleConfig, createToBeAlteredRuleConfiguration()); + assertThat(currentRuleConfig.getEncryptors().size(), is(1)); } private AlterEncryptRuleStatement createSQLStatement(final String encryptorName) { diff --git a/features/encrypt/plugin/pom.xml b/features/encrypt/plugin/pom.xml deleted file mode 100644 index 0baf3670a1b3b..0000000000000 --- a/features/encrypt/plugin/pom.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-encrypt - 5.4.1-SNAPSHOT - - shardingsphere-encrypt-plugin - pom - ${project.artifactId} - - - sm - - diff --git a/features/encrypt/plugin/sm/pom.xml b/features/encrypt/plugin/sm/pom.xml deleted file mode 100644 index ae0ecfe9e2f57..0000000000000 --- a/features/encrypt/plugin/sm/pom.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-encrypt-plugin - 5.4.1-SNAPSHOT - - shardingsphere-encrypt-sm - ${project.artifactId} - - - - org.apache.shardingsphere - shardingsphere-encrypt-api - ${project.version} - - - - org.apache.shardingsphere - shardingsphere-encrypt-core - ${project.version} - - - - org.apache.shardingsphere - shardingsphere-test-util - ${project.version} - test - - - - org.bouncycastle - bcprov-jdk15on - ${bouncycastle.version} - - - diff --git a/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithm.java b/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithm.java deleted file mode 100644 index b770347664a7e..0000000000000 --- a/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithm.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.encrypt.sm.algorithm; - -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.bouncycastle.crypto.digests.SM3Digest; -import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.bouncycastle.pqc.math.linearalgebra.ByteUtils; - -import java.nio.charset.StandardCharsets; -import java.security.Security; -import java.util.Properties; - -/** - * SM3 encrypt algorithm. - */ -public final class SM3EncryptAlgorithm implements StandardEncryptAlgorithm { - - static { - Security.addProvider(new BouncyCastleProvider()); - } - - private static final String SM3_SALT = "sm3-salt"; - - private static final int SALT_LENGTH = 8; - - private byte[] sm3Salt; - - @Override - public void init(final Properties props) { - sm3Salt = createSm3Salt(props); - } - - private byte[] createSm3Salt(final Properties props) { - String salt = null == props.getProperty(SM3_SALT) ? "" : String.valueOf(props.getProperty(SM3_SALT)); - ShardingSpherePreconditions.checkState(salt.isEmpty() || SALT_LENGTH == salt.length(), - () -> new EncryptAlgorithmInitializationException("SM3", "Salt should be either blank or better " + SALT_LENGTH + " bytes long.")); - return salt.isEmpty() ? new byte[0] : salt.getBytes(StandardCharsets.UTF_8); - } - - @Override - public String encrypt(final Object plainValue, final EncryptContext encryptContext) { - return null == plainValue ? null : ByteUtils.toHexString(digest(String.valueOf(plainValue).getBytes(StandardCharsets.UTF_8), sm3Salt)); - } - - @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { - return cipherValue; - } - - private byte[] digest(final byte[] input, final byte[] salt) { - SM3Digest sm3Digest = new SM3Digest(); - byte[] updateByte = concat(input, salt); - sm3Digest.update(updateByte, 0, updateByte.length); - byte[] result = new byte[sm3Digest.getDigestSize()]; - sm3Digest.doFinal(result, 0); - return result; - } - - private byte[] concat(final byte[] input, final byte[] salt) { - int inputLength = input.length; - int saltLength = salt.length; - byte[] result = new byte[inputLength + saltLength]; - System.arraycopy(input, 0, result, 0, inputLength); - System.arraycopy(salt, 0, result, inputLength, saltLength); - return result; - } - - @Override - public String getType() { - return "SM3"; - } -} diff --git a/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithm.java b/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithm.java deleted file mode 100644 index 2a4faf3510be3..0000000000000 --- a/features/encrypt/plugin/sm/src/main/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithm.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.encrypt.sm.algorithm; - -import lombok.SneakyThrows; -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.bouncycastle.pqc.math.linearalgebra.ByteUtils; - -import javax.crypto.Cipher; -import javax.crypto.spec.IvParameterSpec; -import javax.crypto.spec.SecretKeySpec; -import java.nio.charset.StandardCharsets; -import java.security.GeneralSecurityException; -import java.security.Security; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -/** - * SM4 encrypt algorithm. - */ -public final class SM4EncryptAlgorithm implements StandardEncryptAlgorithm { - - static { - Security.addProvider(new BouncyCastleProvider()); - } - - private static final String SM4_KEY = "sm4-key"; - - private static final String SM4_IV = "sm4-iv"; - - private static final String SM4_MODE = "sm4-mode"; - - private static final String SM4_PADDING = "sm4-padding"; - - private static final int KEY_LENGTH = 16; - - private static final int IV_LENGTH = 16; - - private static final Set MODES = new HashSet<>(Arrays.asList("ECB", "CBC")); - - private static final Set PADDINGS = new HashSet<>(Arrays.asList("PKCS5Padding", "PKCS7Padding")); - - private byte[] sm4Key; - - private byte[] sm4Iv; - - private String sm4ModePadding; - - @Override - public void init(final Properties props) { - String sm4Mode = createSm4Mode(props); - String sm4Padding = createSm4Padding(props); - sm4ModePadding = "SM4/" + sm4Mode + "/" + sm4Padding; - sm4Key = createSm4Key(props); - sm4Iv = createSm4Iv(props, sm4Mode); - } - - private String createSm4Mode(final Properties props) { - ShardingSpherePreconditions.checkState(props.containsKey(SM4_MODE), () -> new EncryptAlgorithmInitializationException("SM4", String.format("%s can not be null", SM4_MODE))); - String result = String.valueOf(props.getProperty(SM4_MODE)).toUpperCase(); - ShardingSpherePreconditions.checkState(MODES.contains(result), () -> new EncryptAlgorithmInitializationException("SM4", "Mode must be either CBC or ECB")); - return result; - } - - private byte[] createSm4Key(final Properties props) { - ShardingSpherePreconditions.checkState(props.containsKey(SM4_KEY), () -> new EncryptAlgorithmInitializationException("SM4", String.format("%s can not be null", SM4_KEY))); - byte[] result = ByteUtils.fromHexString(String.valueOf(props.getProperty(SM4_KEY))); - ShardingSpherePreconditions.checkState(KEY_LENGTH == result.length, - () -> new EncryptAlgorithmInitializationException("SM4", "Key length must be " + KEY_LENGTH + " bytes long")); - return result; - } - - private byte[] createSm4Iv(final Properties props, final String sm4Mode) { - if (!"CBC".equalsIgnoreCase(sm4Mode)) { - return new byte[0]; - } - ShardingSpherePreconditions.checkState(props.containsKey(SM4_IV), () -> new EncryptAlgorithmInitializationException("SM4", String.format("%s can not be null", SM4_IV))); - String sm4IvValue = String.valueOf(props.getProperty(SM4_IV)); - byte[] result = ByteUtils.fromHexString(sm4IvValue); - ShardingSpherePreconditions.checkState(IV_LENGTH == result.length, () -> new EncryptAlgorithmInitializationException("SM4", "Iv length must be " + IV_LENGTH + " bytes long")); - return result; - } - - private String createSm4Padding(final Properties props) { - ShardingSpherePreconditions.checkState(props.containsKey(SM4_PADDING), () -> new EncryptAlgorithmInitializationException("SM4", String.format("%s can not be null", SM4_PADDING))); - String result = String.valueOf(props.getProperty(SM4_PADDING)).toUpperCase().replace("PADDING", "Padding"); - ShardingSpherePreconditions.checkState(PADDINGS.contains(result), () -> new EncryptAlgorithmInitializationException("SM4", "Padding must be either PKCS5Padding or PKCS7Padding")); - return result; - } - - @Override - public String encrypt(final Object plainValue, final EncryptContext encryptContext) { - return null == plainValue ? null : ByteUtils.toHexString(encrypt(String.valueOf(plainValue).getBytes(StandardCharsets.UTF_8))); - } - - private byte[] encrypt(final byte[] plainValue) { - return handle(plainValue, Cipher.ENCRYPT_MODE); - } - - @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { - return null == cipherValue ? null : new String(decrypt(ByteUtils.fromHexString(cipherValue)), StandardCharsets.UTF_8); - } - - private byte[] decrypt(final byte[] cipherValue) { - return handle(cipherValue, Cipher.DECRYPT_MODE); - } - - @SneakyThrows(GeneralSecurityException.class) - private byte[] handle(final byte[] input, final int mode) { - Cipher cipher = Cipher.getInstance(sm4ModePadding, BouncyCastleProvider.PROVIDER_NAME); - SecretKeySpec secretKeySpec = new SecretKeySpec(sm4Key, "SM4"); - if (0 == sm4Iv.length) { - cipher.init(mode, secretKeySpec); - } else { - cipher.init(mode, secretKeySpec, new IvParameterSpec(sm4Iv)); - } - return cipher.doFinal(input); - } - - @Override - public String getType() { - return "SM4"; - } -} diff --git a/features/encrypt/plugin/sm/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm b/features/encrypt/plugin/sm/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm deleted file mode 100644 index 630f75247d136..0000000000000 --- a/features/encrypt/plugin/sm/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm +++ /dev/null @@ -1,19 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.encrypt.sm.algorithm.SM3EncryptAlgorithm -org.apache.shardingsphere.encrypt.sm.algorithm.SM4EncryptAlgorithm diff --git a/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithmTest.java b/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithmTest.java deleted file mode 100644 index 56a38c6d00e7e..0000000000000 --- a/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM3EncryptAlgorithmTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.encrypt.sm.algorithm; - -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.Properties; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.mockito.Mockito.mock; - -class SM3EncryptAlgorithmTest { - - private StandardEncryptAlgorithm encryptAlgorithm; - - @SuppressWarnings("unchecked") - @BeforeEach - void setUp() { - encryptAlgorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM3", PropertiesBuilder.build(new Property("sm3-salt", "test1234"))); - } - - @Test - void assertEncrypt() { - Object actual = encryptAlgorithm.encrypt("test1234", mock(EncryptContext.class)); - assertThat(actual, is("9587fe084ee4b53fe629c6ae5519ee4d55def8ed4badc8588d3be9b99bd84aba")); - } - - @Test - void assertEncryptWithoutSalt() { - encryptAlgorithm.init(new Properties()); - assertThat(encryptAlgorithm.encrypt("test1234", mock(EncryptContext.class)), is("ab847c6f2f6a53be88808c5221bd6ee0762e1af1def82b21d2061599b6cf5c79")); - } - - @Test - void assertEncryptWithNullPlaintext() { - assertNull(encryptAlgorithm.encrypt(null, mock(EncryptContext.class))); - } - - @Test - void assertDecrypt() { - Object actual = encryptAlgorithm.decrypt("ab847c6f2f6a53be88808c5221bd6ee0762e1af1def82b21d2061599b6cf5c79", mock(EncryptContext.class)); - assertThat(actual.toString(), is("ab847c6f2f6a53be88808c5221bd6ee0762e1af1def82b21d2061599b6cf5c79")); - } - - @Test - void assertDecryptWithoutSalt() { - encryptAlgorithm.init(new Properties()); - Object actual = encryptAlgorithm.decrypt("ab847c6f2f6a53be88808c5221bd6ee0762e1af1def82b21d2061599b6cf5c79", mock(EncryptContext.class)); - assertThat(actual.toString(), is("ab847c6f2f6a53be88808c5221bd6ee0762e1af1def82b21d2061599b6cf5c79")); - } - - @Test - void assertDecryptWithNullCiphertext() { - assertNull(encryptAlgorithm.decrypt(null, mock(EncryptContext.class))); - } -} diff --git a/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithmTest.java b/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithmTest.java deleted file mode 100644 index a2de944268535..0000000000000 --- a/features/encrypt/plugin/sm/src/test/java/org/apache/shardingsphere/encrypt/sm/algorithm/SM4EncryptAlgorithmTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.encrypt.sm.algorithm; - -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -import org.apache.shardingsphere.encrypt.exception.algorithm.EncryptAlgorithmInitializationException; -import org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm; -import org.apache.shardingsphere.encrypt.api.context.EncryptContext; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.Test; - -import java.util.Properties; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; - -class SM4EncryptAlgorithmTest { - - @Test - void assertInitWithoutKey() { - assertThrows(EncryptAlgorithmInitializationException.class, - () -> TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", PropertiesBuilder.build(new Property("sm4-mode", "ECB"), new Property("sm4-padding", "PKCS5Padding")))); - } - - @SuppressWarnings("unchecked") - @Test - void assertEncryptNullValue() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createECBProperties()); - assertNull(algorithm.encrypt(null, mock(EncryptContext.class))); - } - - @SuppressWarnings("unchecked") - @Test - void assertEncryptWithECBMode() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createECBProperties()); - assertThat(algorithm.encrypt("test", mock(EncryptContext.class)), is("028654f2ca4f575dee9e1faae85dadde")); - } - - @SuppressWarnings("unchecked") - @Test - void assertDecryptNullValue() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createECBProperties()); - assertNull(algorithm.decrypt(null, mock(EncryptContext.class))); - } - - @SuppressWarnings("unchecked") - @Test - void assertDecryptWithECBMode() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createECBProperties()); - assertThat(algorithm.decrypt("028654f2ca4f575dee9e1faae85dadde", mock(EncryptContext.class)).toString(), is("test")); - } - - private Properties createECBProperties() { - return PropertiesBuilder.build(new Property("sm4-key", "4D744E003D713D054E7E407C350E447E"), new Property("sm4-mode", "ECB"), new Property("sm4-padding", "PKCS5Padding")); - } - - @SuppressWarnings("unchecked") - @Test - void assertEncryptWithCBCMode() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createCBCProperties()); - assertThat(algorithm.encrypt("test", mock(EncryptContext.class)), is("dca2127b57ba8cac36a0914e0208dc11")); - } - - @SuppressWarnings("unchecked") - @Test - void assertDecrypt() { - StandardEncryptAlgorithm algorithm = (StandardEncryptAlgorithm) TypedSPILoader.getService(EncryptAlgorithm.class, "SM4", createCBCProperties()); - assertThat(algorithm.decrypt("dca2127b57ba8cac36a0914e0208dc11", mock(EncryptContext.class)).toString(), is("test")); - } - - private Properties createCBCProperties() { - return PropertiesBuilder.build( - new Property("sm4-key", "f201326119911788cFd30575b81059ac"), new Property("sm4-iv", "e166c3391294E69cc4c620f594fe00d7"), - new Property("sm4-mode", "CBC"), new Property("sm4-padding", "PKCS7Padding")); - } -} diff --git a/features/encrypt/pom.xml b/features/encrypt/pom.xml index 186cbc2b230ed..df7f2b0624458 100644 --- a/features/encrypt/pom.xml +++ b/features/encrypt/pom.xml @@ -31,6 +31,5 @@ api core distsql - plugin diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java index 86dbfcca87f59..0567762377f4d 100644 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java +++ b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/GenericTableRandomReplaceAlgorithm.java @@ -68,7 +68,7 @@ public void init(final Properties props) { ShardingSpherePreconditions.checkState(!digitalCodes.isEmpty(), () -> new MaskAlgorithmInitializationException(getType(), String.format("'%s' must be not empty", DIGITAL_CODES))); specialCodes = splitPropsToList(props.getProperty(SPECIAL_CODES, DEFAULT_SPECIAL_CODES)); - ShardingSpherePreconditions.checkState(!SPECIAL_CODES.isEmpty(), + ShardingSpherePreconditions.checkState(!specialCodes.isEmpty(), () -> new MaskAlgorithmInitializationException(getType(), String.format("'%s' must be not empty", SPECIAL_CODES))); } diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithm.java deleted file mode 100644 index ad32c3364eef4..0000000000000 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithm.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import org.apache.shardingsphere.mask.algorithm.MaskAlgorithmPropsChecker; -import org.apache.shardingsphere.mask.spi.MaskAlgorithm; - -import java.security.SecureRandom; -import java.util.List; -import java.util.Properties; -import java.util.Random; - -/** - * Landline number replace algorithm. - */ -public final class LandlineNumberRandomAlgorithm implements MaskAlgorithm { - - private static final String LANDLINE_NUMBERS = "landline-numbers"; - - private final Random random = new SecureRandom(); - - private List landLineNumbers; - - @Override - public void init(final Properties props) { - landLineNumbers = createLandLineNumbers(props); - } - - private List createLandLineNumbers(final Properties props) { - MaskAlgorithmPropsChecker.checkAtLeastOneCharConfig(props, LANDLINE_NUMBERS, getType()); - return Splitter.on(",").trimResults().splitToList(props.getProperty(LANDLINE_NUMBERS)); - } - - @Override - public String mask(final Object plainValue) { - String result = null == plainValue ? null : String.valueOf(plainValue); - if (Strings.isNullOrEmpty(result)) { - return result; - } - return landLineNumbers.stream().filter(result::startsWith).findFirst().map(each -> createRandValue(result, each)).orElse(result); - } - - private String createRandValue(final String plainValue, final String landLineNumber) { - StringBuilder result = new StringBuilder(); - result.append(landLineNumbers.get(random.nextInt(landLineNumbers.size()))); - for (int i = landLineNumber.length(); i < plainValue.length(); i++) { - result.append(Character.forDigit(random.nextInt(10), 10)); - } - return result.toString(); - } - - @Override - public String getType() { - return "LANDLINE_NUMBER_RANDOM_REPLACE"; - } -} diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithm.java deleted file mode 100644 index ecfc1ffbed220..0000000000000 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithm.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import org.apache.shardingsphere.mask.algorithm.MaskAlgorithmPropsChecker; -import org.apache.shardingsphere.mask.spi.MaskAlgorithm; - -import java.security.SecureRandom; -import java.util.List; -import java.util.Properties; -import java.util.Random; -import java.util.stream.Collectors; - -/** - * Military identity number random replace algorithm. - */ -public final class MilitaryIdentityNumberRandomReplaceAlgorithm implements MaskAlgorithm { - - private static final String TYPE_CODE = "type-codes"; - - private final Random random = new SecureRandom(); - - private List typeCodes; - - @Override - public void init(final Properties props) { - typeCodes = createTypeCodes(props); - } - - private List createTypeCodes(final Properties props) { - MaskAlgorithmPropsChecker.checkAtLeastOneCharConfig(props, TYPE_CODE, getType()); - return Splitter.on(",").trimResults().splitToList(props.getProperty(TYPE_CODE)).stream().map(each -> each.charAt(0)).collect(Collectors.toList()); - } - - @Override - public String mask(final Object plainValue) { - String result = null == plainValue ? null : String.valueOf(plainValue); - if (Strings.isNullOrEmpty(result)) { - return result; - } - char[] chars = result.toCharArray(); - chars[0] = typeCodes.get(random.nextInt(typeCodes.size())); - for (int i = 1; i < chars.length; i++) { - if (Character.isDigit(chars[i])) { - chars[i] = Character.forDigit(random.nextInt(10), 10); - } - } - return new String(chars); - } - - @Override - public String getType() { - return "MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE"; - } -} diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithm.java deleted file mode 100644 index 4dbd547b82cb9..0000000000000 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithm.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import com.google.common.base.Strings; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.mask.spi.MaskAlgorithm; - -import java.security.SecureRandom; -import java.util.Properties; -import java.util.Random; - -/** - * Personal identity number random replace algorithm. - */ -public final class PersonalIdentityNumberRandomReplaceAlgorithm implements MaskAlgorithm { - - private static final String ALPHA_TWO_COUNTRY_AREA_CODE = "alpha-two-country-area-code"; - - private final Random random = new SecureRandom(); - - private String alphaTwoCountryAreaCode; - - @Override - public void init(final Properties props) { - alphaTwoCountryAreaCode = props.getProperty(ALPHA_TWO_COUNTRY_AREA_CODE, "CN"); - ShardingSpherePreconditions.checkState(!Strings.isNullOrEmpty(alphaTwoCountryAreaCode), - () -> new MaskAlgorithmInitializationException(getType(), String.format("%s can not be empty", ALPHA_TWO_COUNTRY_AREA_CODE))); - } - - @Override - public String mask(final Object plainValue) { - String result = null == plainValue ? null : String.valueOf(plainValue); - if (Strings.isNullOrEmpty(result)) { - return result; - } - if ("CN".equals(alphaTwoCountryAreaCode)) { - return randomReplaceForChinesePersonalIdentityNumber(result); - } - return result; - } - - private String randomReplaceForChinesePersonalIdentityNumber(final String result) { - switch (result.length()) { - case 15: - return randomReplaceNumber(result, 6, 12); - case 18: - return randomReplaceNumber(result, 6, 14); - default: - } - return result; - } - - private String randomReplaceNumber(final String result, final int from, final int to) { - char[] chars = result.toCharArray(); - for (int i = from; i < to; i++) { - chars[i] = Character.forDigit(random.nextInt(10), 10); - } - return new String(chars); - } - - @Override - public String getType() { - return "PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE"; - } -} diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithm.java deleted file mode 100644 index 55d9057198239..0000000000000 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithm.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import lombok.SneakyThrows; -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.mask.spi.MaskAlgorithm; - -import java.io.IOException; -import java.io.InputStream; -import java.security.SecureRandom; -import java.util.List; -import java.util.Objects; -import java.util.Properties; -import java.util.Random; -import java.util.Scanner; -import java.util.stream.Collectors; - -/** - * Telephone random replace algorithm. - */ -public final class TelephoneRandomReplaceAlgorithm implements MaskAlgorithm { - - private static final String NETWORK_NUMBERS = "network-numbers"; - - private final Random random = new SecureRandom(); - - private List networkNumbers; - - @Override - public void init(final Properties props) { - networkNumbers = createNetworkNumbers(props); - } - - private List createNetworkNumbers(final Properties props) { - String networkNumbers = props.containsKey(NETWORK_NUMBERS) && !Strings.isNullOrEmpty(props.getProperty(NETWORK_NUMBERS)) ? props.getProperty(NETWORK_NUMBERS) : initDefaultNetworkNumbers(); - return Splitter.on(",").trimResults().splitToList(networkNumbers).stream().map(this::getNetworkNumber).distinct().collect(Collectors.toList()); - } - - @SneakyThrows(IOException.class) - private String initDefaultNetworkNumbers() { - StringBuilder result = new StringBuilder(); - try ( - InputStream inputStream = Objects.requireNonNull(Thread.currentThread().getContextClassLoader().getResourceAsStream("algorithm/replace/chinese_network_numbers.dict")); - Scanner scanner = new Scanner(inputStream)) { - while (scanner.hasNextLine()) { - String line = scanner.nextLine(); - if (!line.isEmpty() && !line.startsWith("#")) { - result.append(line); - } - } - } - return result.toString(); - } - - private String getNetworkNumber(final String networkNumber) { - try { - Integer.parseInt(networkNumber); - return networkNumber; - } catch (final NumberFormatException ignored) { - throw new MaskAlgorithmInitializationException(getType(), String.format("network-number %s can only be integer number", networkNumber)); - } - } - - @Override - public String mask(final Object plainValue) { - String result = null == plainValue ? null : String.valueOf(plainValue); - if (Strings.isNullOrEmpty(result)) { - return result; - } - return networkNumbers.stream().filter(result::startsWith).findFirst().map(each -> createRandValue(result, each)).orElse(result); - } - - private String createRandValue(final String plainValue, final String networkNumber) { - StringBuilder result = new StringBuilder(); - result.append(networkNumbers.get(random.nextInt(networkNumbers.size()))); - for (int i = networkNumber.length(); i < plainValue.length(); i++) { - result.append(Character.forDigit(random.nextInt(10), 10)); - } - return result.toString(); - } - - @Override - public String getType() { - return "TELEPHONE_RANDOM_REPLACE"; - } -} diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithm.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithm.java deleted file mode 100644 index 6a570f00920dd..0000000000000 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithm.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import lombok.Getter; -import org.apache.shardingsphere.mask.algorithm.MaskAlgorithmPropsChecker; -import org.apache.shardingsphere.mask.spi.MaskAlgorithm; - -import java.security.SecureRandom; -import java.util.List; -import java.util.Properties; -import java.util.Random; -import java.util.stream.Collectors; - -/** - * Unified credit code random replace algorithm. - */ -public final class UnifiedCreditCodeRandomReplaceAlgorithm implements MaskAlgorithm { - - private static final String REGISTRATION_DEPARTMENT_CODES = "registration-department-codes"; - - private static final String CATEGORY_CODES = "category-codes"; - - private static final String ADMINISTRATIVE_DIVISION_CODES = "administrative-division-codes"; - - private final Random random = new SecureRandom(); - - private List registrationDepartmentCodes; - - private List categoryCodes; - - private List administrativeDivisionCodes; - - @Getter - private Properties props; - - @Override - public void init(final Properties props) { - this.props = props; - this.registrationDepartmentCodes = createRegistrationDepartmentCodes(props); - this.categoryCodes = createCategoryCodes(props); - this.administrativeDivisionCodes = createAdministrativeDivisionCodes(props); - } - - private List createRegistrationDepartmentCodes(final Properties props) { - MaskAlgorithmPropsChecker.checkAtLeastOneCharConfig(props, REGISTRATION_DEPARTMENT_CODES, getType()); - return Splitter.on(",").trimResults().splitToList(props.getProperty(REGISTRATION_DEPARTMENT_CODES)).stream().map(each -> each.charAt(0)).collect(Collectors.toList()); - } - - private List createCategoryCodes(final Properties props) { - MaskAlgorithmPropsChecker.checkAtLeastOneCharConfig(props, CATEGORY_CODES, getType()); - return Splitter.on(",").trimResults().splitToList(props.getProperty(CATEGORY_CODES)).stream().map(each -> each.charAt(0)).collect(Collectors.toList()); - } - - private List createAdministrativeDivisionCodes(final Properties props) { - MaskAlgorithmPropsChecker.checkAtLeastOneCharConfig(props, ADMINISTRATIVE_DIVISION_CODES, getType()); - return Splitter.on(",").trimResults().splitToList(props.getProperty(ADMINISTRATIVE_DIVISION_CODES)); - } - - @Override - public String mask(final Object plainValue) { - String result = null == plainValue ? null : String.valueOf(plainValue); - if (Strings.isNullOrEmpty(result)) { - return result; - } - return randomReplace(); - } - - private String randomReplace() { - StringBuilder result = new StringBuilder(); - result.append(registrationDepartmentCodes.get(random.nextInt(registrationDepartmentCodes.size()))) - .append(categoryCodes.get(random.nextInt(categoryCodes.size()))) - .append(administrativeDivisionCodes.get(random.nextInt(administrativeDivisionCodes.size()))); - for (int i = 0; i < 10; i++) { - result.append(Character.forDigit(random.nextInt(10), 10)); - } - return result.toString(); - } - - @Override - public String getType() { - return "UNIFIED_CREDIT_CODE_RANDOM_REPLACE"; - } -} diff --git a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/rule/MaskRule.java b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/rule/MaskRule.java index eda9e780c7f10..3c41572e8ab94 100644 --- a/features/mask/core/src/main/java/org/apache/shardingsphere/mask/rule/MaskRule.java +++ b/features/mask/core/src/main/java/org/apache/shardingsphere/mask/rule/MaskRule.java @@ -83,9 +83,4 @@ public TableNamesMapper getDistributedTableMapper() { public TableNamesMapper getEnhancedTableMapper() { return new TableNamesMapper(); } - - @Override - public String getType() { - return MaskRule.class.getSimpleName(); - } } diff --git a/features/mask/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mask.spi.MaskAlgorithm b/features/mask/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mask.spi.MaskAlgorithm index 8653596044bc3..c7f54ad98c181 100644 --- a/features/mask/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mask.spi.MaskAlgorithm +++ b/features/mask/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mask.spi.MaskAlgorithm @@ -22,9 +22,4 @@ org.apache.shardingsphere.mask.algorithm.cover.MaskAfterSpecialCharsAlgorithm org.apache.shardingsphere.mask.algorithm.cover.MaskBeforeSpecialCharsAlgorithm org.apache.shardingsphere.mask.algorithm.cover.MaskFirstNLastMMaskAlgorithm org.apache.shardingsphere.mask.algorithm.cover.MaskFromXToYMaskAlgorithm -org.apache.shardingsphere.mask.algorithm.replace.TelephoneRandomReplaceAlgorithm -org.apache.shardingsphere.mask.algorithm.replace.PersonalIdentityNumberRandomReplaceAlgorithm -org.apache.shardingsphere.mask.algorithm.replace.MilitaryIdentityNumberRandomReplaceAlgorithm -org.apache.shardingsphere.mask.algorithm.replace.LandlineNumberRandomAlgorithm org.apache.shardingsphere.mask.algorithm.replace.GenericTableRandomReplaceAlgorithm -org.apache.shardingsphere.mask.algorithm.replace.UnifiedCreditCodeRandomReplaceAlgorithm diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithmTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithmTest.java deleted file mode 100644 index 4cbeee9757690..0000000000000 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/LandlineNumberRandomAlgorithmTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class LandlineNumberRandomAlgorithmTest { - - private LandlineNumberRandomAlgorithm maskAlgorithm; - - @BeforeEach - void setUp() { - maskAlgorithm = new LandlineNumberRandomAlgorithm(); - maskAlgorithm.init(PropertiesBuilder.build(new Property("landline-numbers", "025, 027, 028, 029, 0310, 0311, 0313"))); - } - - @Test - void assertMask() { - assertThat(maskAlgorithm.mask(""), is("")); - assertThat(maskAlgorithm.mask("0251234567"), not("0251234567")); - assertThat(maskAlgorithm.mask("03101234567"), not("03101234567")); - } - - @Test - void assertInitWhenConfigIsEmpty() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build())); - } - - @Test - void assertMaskWithInvalidConfig() { - assertThrows(MaskAlgorithmInitializationException.class, - () -> maskAlgorithm.init(PropertiesBuilder.build(new Property("landline-numbers", "")))); - } -} diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithmTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithmTest.java deleted file mode 100644 index 0b42067a4ec80..0000000000000 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/MilitaryIdentityNumberRandomReplaceAlgorithmTest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.Properties; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class MilitaryIdentityNumberRandomReplaceAlgorithmTest { - - private MilitaryIdentityNumberRandomReplaceAlgorithm maskAlgorithm; - - @BeforeEach - void setUp() { - maskAlgorithm = new MilitaryIdentityNumberRandomReplaceAlgorithm(); - maskAlgorithm.init(PropertiesBuilder.build(new Property("type-codes", "军,人,士,文,职"))); - } - - @Test - void assertMask() { - assertThat(maskAlgorithm.mask("军字第1234567号"), not("军字第1234567号")); - assertThat(maskAlgorithm.mask(""), is("")); - assertNull(maskAlgorithm.mask(null)); - } - - @Test - void assertMaskWithInvalidProps() { - MilitaryIdentityNumberRandomReplaceAlgorithm algorithm = new MilitaryIdentityNumberRandomReplaceAlgorithm(); - assertThrows(MaskAlgorithmInitializationException.class, () -> algorithm.init(new Properties())); - } -} diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithmTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithmTest.java deleted file mode 100644 index 526253899daf1..0000000000000 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/PersonalIdentityNumberRandomReplaceAlgorithmTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.MatcherAssert.assertThat; - -class PersonalIdentityNumberRandomReplaceAlgorithmTest { - - private PersonalIdentityNumberRandomReplaceAlgorithm maskAlgorithm; - - @BeforeEach - void setUp() { - maskAlgorithm = new PersonalIdentityNumberRandomReplaceAlgorithm(); - maskAlgorithm.init(PropertiesBuilder.build(new Property("alpha-two-country-area-code", "CN"))); - } - - @Test - void assertMask() { - assertThat(maskAlgorithm.mask("372928198312103215"), not("372928198312103215")); - assertThat(maskAlgorithm.mask("372928231210321"), not("372928231210321")); - assertThat(maskAlgorithm.mask("1234567891011121314"), is("1234567891011121314")); - assertThat(maskAlgorithm.mask("123456"), is("123456")); - assertThat(maskAlgorithm.mask(""), is("")); - assertThat(maskAlgorithm.mask(null), is(nullValue())); - } - - @Test - void assertMaskWithDifferentCountryCode() { - PersonalIdentityNumberRandomReplaceAlgorithm maskAlgorithmCN = new PersonalIdentityNumberRandomReplaceAlgorithm(); - maskAlgorithmCN.init(PropertiesBuilder.build(new Property("alpha-two-country-area-code", "CN"))); - PersonalIdentityNumberRandomReplaceAlgorithm maskAlgorithmJP = new PersonalIdentityNumberRandomReplaceAlgorithm(); - maskAlgorithmJP.init(PropertiesBuilder.build(new Property("alpha-two-country-area-code", "JP"))); - assertThat(maskAlgorithmCN.mask("372928198312103215"), not(maskAlgorithmJP.mask("372928198312103215"))); - } -} diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithmTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithmTest.java deleted file mode 100644 index 62be4eb06bc73..0000000000000 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/TelephoneRandomReplaceAlgorithmTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import org.apache.shardingsphere.infra.util.reflection.ReflectionUtils; -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Optional; -import java.util.Properties; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; - -class TelephoneRandomReplaceAlgorithmTest { - - private static final Collection DEFAULT_NETWORK_NUMBERS = Arrays.asList("130", "131", "132", "133", "134", "135", "136", "137", "138", "139", "150", "151", "152", "153", "155", "156", - "157", "158", "159", "166", "170", "176", "177", "178", "180", "181", "182", "183", "184", "185", "186", "187", "188", "189", "191", "198", "199"); - - private TelephoneRandomReplaceAlgorithm maskAlgorithm; - - @BeforeEach - void setUp() { - maskAlgorithm = new TelephoneRandomReplaceAlgorithm(); - maskAlgorithm.init(PropertiesBuilder.build(new Property("network-numbers", "130, 130, 155,1702"))); - } - - @Test - void assertInitWithEmptyProps() { - maskAlgorithm.init(new Properties()); - Optional actual = ReflectionUtils.getFieldValue(maskAlgorithm, "networkNumbers"); - assertTrue(actual.isPresent()); - assertThat(actual.get(), is(DEFAULT_NETWORK_NUMBERS)); - } - - @Test - void assertMaskWithNullPlaintext() { - assertNull(maskAlgorithm.mask(null)); - } - - @Test - void assertMask() { - assertThat(maskAlgorithm.mask(""), is("")); - assertThat(maskAlgorithm.mask("13012345678"), not("13012345678")); - } - - @Test - void assertInitWhenConfigNotNumberProps() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build(new Property("network-numbers", "130, x130, 155,1702")))); - } -} diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithmTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithmTest.java deleted file mode 100644 index 27124e0deee23..0000000000000 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/algorithm/replace/UnifiedCreditCodeRandomReplaceAlgorithmTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.mask.algorithm.replace; - -import org.apache.shardingsphere.mask.exception.algorithm.MaskAlgorithmInitializationException; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class UnifiedCreditCodeRandomReplaceAlgorithmTest { - - private UnifiedCreditCodeRandomReplaceAlgorithm maskAlgorithm; - - @BeforeEach - void setUp() { - maskAlgorithm = new UnifiedCreditCodeRandomReplaceAlgorithm(); - } - - @Test - void assertMask() { - maskAlgorithm.init(PropertiesBuilder.build(new Property("registration-department-codes", "1,2,3,4"), new Property("category-codes", "1,2,3,4"), - new Property("administrative-division-codes", "100000,200000,300000"))); - assertThat(maskAlgorithm.mask("123456781234567890"), not("123456781234567890")); - assertThat(maskAlgorithm.mask("123456781234567890").length(), is(18)); - } - - @Test - void assertInitWhenConfigIsNull() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build(new Property("registration-department-codes", "1,2,3,4")))); - } - - @Test - void assertInitWhenConfigIsEmpty() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build())); - } - - @Test - void assertInitWhenRegistrationDepartmentCodesIsEmpty() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build( - new Property("registration-department-codes", ""), - new Property("category-codes", "1,2,3,4"), - new Property("administrative-division-codes", "100000,200000,300000")))); - } - - @Test - void assertInitWhenCategoryCodesIsEmpty() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build( - new Property("registration-department-codes", "1,2,3,4"), - new Property("category-codes", ""), - new Property("administrative-division-codes", "100000,200000,300000")))); - } - - @Test - void assertInitWhenAdministrativeDivisionCodesIsEmpty() { - assertThrows(MaskAlgorithmInitializationException.class, () -> maskAlgorithm.init(PropertiesBuilder.build( - new Property("registration-department-codes", "1,2,3,4"), - new Property("category-codes", "1,2,3,4"), - new Property("administrative-division-codes", "")))); - } -} diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/merge/dql/MaskAlgorithmMetaDataTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/merge/dql/MaskAlgorithmMetaDataTest.java index eae75d2a29ccb..df4475a287dc4 100644 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/merge/dql/MaskAlgorithmMetaDataTest.java +++ b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/merge/dql/MaskAlgorithmMetaDataTest.java @@ -60,7 +60,7 @@ void assertFindMaskAlgorithmByColumnIndex() { columnProjection.setOriginalColumn(new IdentifierValue("order_id")); columnProjection.setOriginalTable(new IdentifierValue("t_order")); when(selectStatementContext.getProjectionsContext().getExpandProjections()).thenReturn(Collections.singletonList(columnProjection)); - when(selectStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_order")); + when(selectStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_order")); Optional actual = new MaskAlgorithmMetaData(database, maskRule, selectStatementContext).findMaskAlgorithmByColumnIndex(1); assertTrue(actual.isPresent()); assertThat(actual.get().getType(), is("MD5")); diff --git a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/yaml/swapper/YamlMaskRuleConfigurationSwapperTest.java b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/yaml/swapper/YamlMaskRuleConfigurationSwapperTest.java index 3aaa6bafaf6a4..fd6a697947c5b 100644 --- a/features/mask/core/src/test/java/org/apache/shardingsphere/mask/yaml/swapper/YamlMaskRuleConfigurationSwapperTest.java +++ b/features/mask/core/src/test/java/org/apache/shardingsphere/mask/yaml/swapper/YamlMaskRuleConfigurationSwapperTest.java @@ -46,7 +46,7 @@ void assertSwapToYamlConfiguration() { } private MaskRuleConfiguration createMaskRuleConfiguration() { - Collection tables = Collections.singletonList(new MaskTableRuleConfiguration("tbl", Collections.emptyList())); + Collection tables = Collections.singleton(new MaskTableRuleConfiguration("tbl", Collections.emptyList())); Map encryptors = Collections.singletonMap("myMaskAlgorithm", new AlgorithmConfiguration("MD5", new Properties())); return new MaskRuleConfiguration(tables, encryptors); } diff --git a/features/mask/distsql/parser/src/main/antlr4/imports/mask/BaseRule.g4 b/features/mask/distsql/parser/src/main/antlr4/imports/mask/BaseRule.g4 index 959519b325b35..0fa8839518185 100644 --- a/features/mask/distsql/parser/src/main/antlr4/imports/mask/BaseRule.g4 +++ b/features/mask/distsql/parser/src/main/antlr4/imports/mask/BaseRule.g4 @@ -39,11 +39,6 @@ buildInMaskAlgorithmType | MASK_FROM_X_TO_Y | MASK_BEFORE_SPECIAL_CHARS | MASK_AFTER_SPECIAL_CHARS - | PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE - | MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE - | LANDLINE_NUMBER_RANDOM_REPLACE - | TELEPHONE_RANDOM_REPLACE - | UNIFIED_CREDIT_CODE_RANDOM_REPLACE | GENERIC_TABLE_RANDOM_REPLACE ; diff --git a/features/mask/distsql/parser/src/main/antlr4/imports/mask/Keyword.g4 b/features/mask/distsql/parser/src/main/antlr4/imports/mask/Keyword.g4 index 653700886f572..695bacacc22fc 100644 --- a/features/mask/distsql/parser/src/main/antlr4/imports/mask/Keyword.g4 +++ b/features/mask/distsql/parser/src/main/antlr4/imports/mask/Keyword.g4 @@ -131,26 +131,6 @@ MASK_AFTER_SPECIAL_CHARS : M A S K UL_ A F T E R UL_ S P E C I A L UL_ C H A R S ; -PERSONAL_IDENTITY_NUMBER_RANDOM_REPLACE - : P E R S O N A L UL_ I D E N T I T Y UL_ N U M B E R UL_ R A N D O M UL_ R E P L A C E - ; - -MILITARY_IDENTITY_NUMBER_RANDOM_REPLACE - : M I L I T A R Y UL_ I D E N T I T Y UL_ N U M B E R UL_ R A N D O M UL_ R E P L A C E - ; - -LANDLINE_NUMBER_RANDOM_REPLACE - : L A N D L I N E UL_ N U M B E R UL_ R A N D O M UL_ R E P L A C E - ; - -TELEPHONE_RANDOM_REPLACE - : T E L E P H O N E UL_ R A N D O M UL_ R E P L A C E - ; - -UNIFIED_CREDIT_CODE_RANDOM_REPLACE - : U N I F I E D UL_ C R E D I T UL_ C O D E UL_ R A N D O M UL_ R E P L A C E - ; - GENERIC_TABLE_RANDOM_REPLACE : G E N E R I C UL_ T A B L E UL_ R A N D O M UL_ R E P L A C E ; diff --git a/features/readwrite-splitting/core/src/main/java/org/apache/shardingsphere/readwritesplitting/rule/ReadwriteSplittingRule.java b/features/readwrite-splitting/core/src/main/java/org/apache/shardingsphere/readwritesplitting/rule/ReadwriteSplittingRule.java index 4bf5b1184295c..db9bad0d7374d 100644 --- a/features/readwrite-splitting/core/src/main/java/org/apache/shardingsphere/readwritesplitting/rule/ReadwriteSplittingRule.java +++ b/features/readwrite-splitting/core/src/main/java/org/apache/shardingsphere/readwritesplitting/rule/ReadwriteSplittingRule.java @@ -205,9 +205,4 @@ private Map> exportStaticDataSources() { } return result; } - - @Override - public String getType() { - return ReadwriteSplittingRule.class.getSimpleName(); - } } diff --git a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java index 6ab88ca748c87..b7bf0b0fe1c72 100644 --- a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java +++ b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/checker/ReadwriteSplittingRuleConfigurationCheckerTest.java @@ -33,6 +33,7 @@ import javax.sql.DataSource; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; @@ -65,9 +66,9 @@ private ReadwriteSplittingRuleConfiguration createInvalidConfiguration() { @Test void assertCheckWhenConfigInvalidWriteDataSource() { ReadwriteSplittingRuleConfiguration config = mock(ReadwriteSplittingRuleConfiguration.class); - List configurations = Arrays.asList(createDataSourceRuleConfig( + List configs = Arrays.asList(createDataSourceRuleConfig( "write_ds_0", Arrays.asList("read_ds_0", "read_ds_1")), createDataSourceRuleConfig("write_ds_2", Arrays.asList("read_ds_0", "read_ds_1"))); - when(config.getDataSources()).thenReturn(configurations); + when(config.getDataSources()).thenReturn(configs); RuleConfigurationChecker checker = OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class, Collections.singleton(config.getClass())).get(config.getClass()); assertThrows(DataSourceNameExistedException.class, () -> checker.check("test", config, mockDataSources(), Collections.emptyList())); } @@ -76,9 +77,9 @@ void assertCheckWhenConfigInvalidWriteDataSource() { @Test void assertCheckWhenConfigInvalidReadDataSource() { ReadwriteSplittingRuleConfiguration config = mock(ReadwriteSplittingRuleConfiguration.class); - List configurations = Arrays.asList(createDataSourceRuleConfig( + List configs = Arrays.asList(createDataSourceRuleConfig( "write_ds_0", Arrays.asList("read_ds_0", "read_ds_0")), createDataSourceRuleConfig("write_ds_1", Arrays.asList("read_ds_0", "read_ds_0"))); - when(config.getDataSources()).thenReturn(configurations); + when(config.getDataSources()).thenReturn(configs); RuleConfigurationChecker checker = OrderedSPILoader.getServicesByClass(RuleConfigurationChecker.class, Collections.singleton(config.getClass())).get(config.getClass()); assertThrows(DuplicateDataSourceException.class, () -> checker.check("test", config, mockDataSources(), Collections.emptyList())); } @@ -87,7 +88,7 @@ void assertCheckWhenConfigInvalidReadDataSource() { @Test void assertCheckWeightLoadBalanceInvalidDataSourceName() { ReadwriteSplittingRuleConfiguration config = mock(ReadwriteSplittingRuleConfiguration.class); - List configs = Collections.singletonList(createDataSourceRuleConfig("write_ds_0", Arrays.asList("read_ds_0", "read_ds_1"))); + Collection configs = Collections.singleton(createDataSourceRuleConfig("write_ds_0", Arrays.asList("read_ds_0", "read_ds_1"))); when(config.getDataSources()).thenReturn(configs); AlgorithmConfiguration algorithm = new AlgorithmConfiguration("WEIGHT", PropertiesBuilder.build(new Property("read_ds_2", "1"), new Property("read_ds_1", "2"))); when(config.getLoadBalancers()).thenReturn(Collections.singletonMap("weight_ds", algorithm)); @@ -111,7 +112,7 @@ private ReadwriteSplittingRuleConfiguration createContainsOtherRulesDatasourceCo when(dataSourceConfig.getName()).thenReturn("readwrite_ds"); when(dataSourceConfig.getWriteDataSourceName()).thenReturn("otherDatasourceName"); when(dataSourceConfig.getReadDataSourceNames()).thenReturn(Arrays.asList("read_ds_0", "read_ds_1")); - when(result.getDataSources()).thenReturn(Collections.singletonList(dataSourceConfig)); + when(result.getDataSources()).thenReturn(Collections.singleton(dataSourceConfig)); return result; } diff --git a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/ReadwriteSplittingSQLRouterTest.java b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/ReadwriteSplittingSQLRouterTest.java index 07386968eb9f1..d3bbe56e33c9f 100644 --- a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/ReadwriteSplittingSQLRouterTest.java +++ b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/ReadwriteSplittingSQLRouterTest.java @@ -235,7 +235,7 @@ void assertSqlHintRouteWriteOnly() { private RouteContext mockRouteContext() { RouteContext result = new RouteContext(); - RouteUnit routeUnit = new RouteUnit(new RouteMapper(DATASOURCE_NAME, DATASOURCE_NAME), Collections.singletonList(new RouteMapper("table", "table_0"))); + RouteUnit routeUnit = new RouteUnit(new RouteMapper(DATASOURCE_NAME, DATASOURCE_NAME), Collections.singleton(new RouteMapper("table", "table_0"))); result.getRouteUnits().add(routeUnit); result.getRouteUnits().add(new RouteUnit(new RouteMapper(NONE_READWRITE_SPLITTING_DATASOURCE_NAME, NONE_READWRITE_SPLITTING_DATASOURCE_NAME), Collections.emptyList())); return result; diff --git a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/qualified/type/QualifiedReadwriteSplittingTransactionalDataSourceRouterTest.java b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/qualified/type/QualifiedReadwriteSplittingTransactionalDataSourceRouterTest.java index 76af7184b184c..29d059ed68b9d 100644 --- a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/qualified/type/QualifiedReadwriteSplittingTransactionalDataSourceRouterTest.java +++ b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/route/qualified/type/QualifiedReadwriteSplittingTransactionalDataSourceRouterTest.java @@ -57,14 +57,14 @@ void assertWriteRouteTransaction() { @Test void assertRoute() { - ReadwriteSplittingDataSourceRuleConfiguration readwriteSplittingDataSourceRuleConfiguration = + ReadwriteSplittingDataSourceRuleConfiguration readwriteSplittingDataSourceRuleConfig = new ReadwriteSplittingDataSourceRuleConfiguration("test_config", "write_ds", Arrays.asList("read_ds_0", "read_ds_1"), null); ReadwriteSplittingDataSourceRule rule; - rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfiguration, TransactionalReadQueryStrategy.PRIMARY, null); + rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfig, TransactionalReadQueryStrategy.PRIMARY, null); assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext()).route(rule), is("write_ds")); - rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfiguration, TransactionalReadQueryStrategy.FIXED, new RoundRobinReadQueryLoadBalanceAlgorithm()); + rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfig, TransactionalReadQueryStrategy.FIXED, new RoundRobinReadQueryLoadBalanceAlgorithm()); assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext()).route(rule), is("read_ds_0")); - rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfiguration, TransactionalReadQueryStrategy.DYNAMIC, new RoundRobinReadQueryLoadBalanceAlgorithm()); + rule = new ReadwriteSplittingDataSourceRule(readwriteSplittingDataSourceRuleConfig, TransactionalReadQueryStrategy.DYNAMIC, new RoundRobinReadQueryLoadBalanceAlgorithm()); assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext()).route(rule), is("read_ds_0")); } } diff --git a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/rule/builder/ReadwriteSplittingRuleBuilderTest.java b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/rule/builder/ReadwriteSplittingRuleBuilderTest.java index 0b1ba3ac2dc47..8aa3af15ada46 100644 --- a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/rule/builder/ReadwriteSplittingRuleBuilderTest.java +++ b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/rule/builder/ReadwriteSplittingRuleBuilderTest.java @@ -36,10 +36,8 @@ class ReadwriteSplittingRuleBuilderTest { @SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertBuild() { - ReadwriteSplittingRuleConfiguration ruleConfig = new ReadwriteSplittingRuleConfiguration( - Collections.singletonList(new ReadwriteSplittingDataSourceRuleConfiguration("name", "writeDataSourceName", - Collections.singletonList("readDataSourceName"), "loadBalancerName")), - Collections.emptyMap()); + ReadwriteSplittingRuleConfiguration ruleConfig = new ReadwriteSplittingRuleConfiguration(Collections.singleton( + new ReadwriteSplittingDataSourceRuleConfiguration("name", "writeDataSourceName", Collections.singletonList("readDataSourceName"), "loadBalancerName")), Collections.emptyMap()); DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class, Collections.singleton(ruleConfig)).get(ruleConfig); assertThat(builder.build(ruleConfig, "", Collections.emptyMap(), Collections.emptyList(), mock(InstanceContext.class)), instanceOf(ReadwriteSplittingRule.class)); } diff --git a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/yaml/swapper/YamlReadwriteSplittingRuleConfigurationSwapperTest.java b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/yaml/swapper/YamlReadwriteSplittingRuleConfigurationSwapperTest.java index 4f229d916d300..1ece80c0f5c68 100644 --- a/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/yaml/swapper/YamlReadwriteSplittingRuleConfigurationSwapperTest.java +++ b/features/readwrite-splitting/core/src/test/java/org/apache/shardingsphere/readwritesplitting/yaml/swapper/YamlReadwriteSplittingRuleConfigurationSwapperTest.java @@ -60,7 +60,7 @@ void assertReadwriteSplittingRule(final ReadwriteSplittingRuleConfiguration actu } private ReadwriteSplittingRuleConfiguration creatReadwriteSplittingRuleConfiguration() { - Collection dataSources = Collections.singletonList( + Collection dataSources = Collections.singleton( new ReadwriteSplittingDataSourceRuleConfiguration("readwrite", "write_ds", Arrays.asList("read_ds_0", "read_ds_1"), "random")); Map loadBalancers = Collections.singletonMap("myLoadBalancer", new AlgorithmConfiguration("RANDOM", new Properties())); return new ReadwriteSplittingRuleConfiguration(dataSources, loadBalancers); diff --git a/features/readwrite-splitting/distsql/handler/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/checker/ReadwriteSplittingRuleStatementChecker.java b/features/readwrite-splitting/distsql/handler/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/checker/ReadwriteSplittingRuleStatementChecker.java index 86998457671c6..2ade1dc59738e 100644 --- a/features/readwrite-splitting/distsql/handler/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/checker/ReadwriteSplittingRuleStatementChecker.java +++ b/features/readwrite-splitting/distsql/handler/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/checker/ReadwriteSplittingRuleStatementChecker.java @@ -126,8 +126,8 @@ private static Collection getDuplicated(final Collection require private static void checkDuplicateRuleNamesWithExistsDataSources(final ShardingSphereDatabase database, final Collection segments) { Collection currentRuleNames = new HashSet<>(); ResourceMetaData resourceMetaData = database.getResourceMetaData(); - if (null != resourceMetaData && null != resourceMetaData.getDataSources()) { - currentRuleNames.addAll(resourceMetaData.getDataSources().keySet()); + if (null != resourceMetaData && null != resourceMetaData.getStorageUnitMetaData().getStorageUnits()) { + currentRuleNames.addAll(resourceMetaData.getStorageUnitMetaData().getStorageUnits().keySet()); } currentRuleNames.addAll(getLogicDataSources(database)); Collection toBeCreatedRuleNames = segments.stream().map(ReadwriteSplittingRuleSegment::getName).filter(currentRuleNames::contains).collect(Collectors.toList()); diff --git a/features/readwrite-splitting/distsql/handler/src/test/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/update/CreateReadwriteSplittingRuleStatementUpdaterTest.java b/features/readwrite-splitting/distsql/handler/src/test/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/update/CreateReadwriteSplittingRuleStatementUpdaterTest.java index 3da19a8b5acd8..c2ccbdc0e7244 100644 --- a/features/readwrite-splitting/distsql/handler/src/test/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/update/CreateReadwriteSplittingRuleStatementUpdaterTest.java +++ b/features/readwrite-splitting/distsql/handler/src/test/java/org/apache/shardingsphere/readwritesplitting/distsql/handler/update/CreateReadwriteSplittingRuleStatementUpdaterTest.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.distsql.parser.segment.AlgorithmSegment; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.readwritesplitting.api.ReadwriteSplittingRuleConfiguration; @@ -67,18 +68,19 @@ class CreateReadwriteSplittingRuleStatementUpdaterTest { @BeforeEach void before() { when(database.getResourceMetaData()).thenReturn(resourceMetaData); + when(resourceMetaData.getStorageUnitMetaData()).thenReturn(mock(StorageUnitMetaData.class)); when(database.getRuleMetaData().findRules(DataSourceContainedRule.class)).thenReturn(Collections.emptyList()); } @Test void assertCheckSQLStatementWithDuplicateRuleNames() { - when(resourceMetaData.getDataSources()).thenReturn(Collections.emptyMap()); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.emptyMap()); assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(database, createSQLStatement("TEST"), createCurrentRuleConfiguration())); } @Test void assertCheckSQLStatementWithDuplicateResource() { - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("write_ds", null)); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("write_ds", null)); assertThrows(InvalidRuleConfigurationException.class, () -> updater.checkSQLStatement(database, createSQLStatement("write_ds", "TEST"), createCurrentRuleConfiguration())); } diff --git a/features/readwrite-splitting/distsql/parser/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/parser/core/ReadwriteSplittingDistSQLStatementVisitor.java b/features/readwrite-splitting/distsql/parser/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/parser/core/ReadwriteSplittingDistSQLStatementVisitor.java index 688364c2b7101..cc7d45cc3fc7b 100644 --- a/features/readwrite-splitting/distsql/parser/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/parser/core/ReadwriteSplittingDistSQLStatementVisitor.java +++ b/features/readwrite-splitting/distsql/parser/src/main/java/org/apache/shardingsphere/readwritesplitting/distsql/parser/core/ReadwriteSplittingDistSQLStatementVisitor.java @@ -66,7 +66,7 @@ public ASTNode visitAlterReadwriteSplittingRule(final AlterReadwriteSplittingRul @Override public ASTNode visitDropReadwriteSplittingRule(final DropReadwriteSplittingRuleContext ctx) { - return new DropReadwriteSplittingRuleStatement(ctx.ifExists() != null, ctx.ruleName().stream().map(this::getIdentifierValue).collect(Collectors.toList())); + return new DropReadwriteSplittingRuleStatement(null != ctx.ifExists(), ctx.ruleName().stream().map(this::getIdentifierValue).collect(Collectors.toList())); } @Override diff --git a/features/shadow/core/src/main/java/org/apache/shardingsphere/shadow/rule/ShadowRule.java b/features/shadow/core/src/main/java/org/apache/shardingsphere/shadow/rule/ShadowRule.java index 14c008df7d977..154fc28b50b05 100644 --- a/features/shadow/core/src/main/java/org/apache/shardingsphere/shadow/rule/ShadowRule.java +++ b/features/shadow/core/src/main/java/org/apache/shardingsphere/shadow/rule/ShadowRule.java @@ -247,9 +247,4 @@ private Collection createShadowDataSources(final ShadowDataSourceRule sh result.add(shadowDataSourceRule.getShadowDataSource()); return result; } - - @Override - public String getType() { - return ShadowRule.class.getSimpleName(); - } } diff --git a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/determiner/HintShadowAlgorithmDeterminerTest.java b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/determiner/HintShadowAlgorithmDeterminerTest.java index cfc97b652b7ce..8084c10b64cb7 100644 --- a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/determiner/HintShadowAlgorithmDeterminerTest.java +++ b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/determiner/HintShadowAlgorithmDeterminerTest.java @@ -48,7 +48,7 @@ void assertIsShadow() { private ShadowRuleConfiguration createShadowRuleConfiguration() { ShadowRuleConfiguration result = new ShadowRuleConfiguration(); result.setDataSources(createDataSources()); - result.setTables(Collections.singletonMap("t_order", new ShadowTableConfiguration(Collections.singletonList("shadow-data-source-0"), Collections.singleton("sql-hint-algorithm")))); + result.setTables(Collections.singletonMap("t_order", new ShadowTableConfiguration(Collections.singleton("shadow-data-source-0"), Collections.singleton("sql-hint-algorithm")))); result.setShadowAlgorithms(Collections.singletonMap("sql-hint-algorithm", new AlgorithmConfiguration("SQL_HINT", new Properties()))); return result; } diff --git a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/impl/ShadowNonDMLStatementRoutingEngineTest.java b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/impl/ShadowNonDMLStatementRoutingEngineTest.java index ca2385a7a33b2..dce38affae344 100644 --- a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/impl/ShadowNonDMLStatementRoutingEngineTest.java +++ b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/route/engine/impl/ShadowNonDMLStatementRoutingEngineTest.java @@ -83,8 +83,8 @@ private RouteUnit createRouteUnit() { private ShadowRuleConfiguration createShadowRuleConfiguration() { ShadowRuleConfiguration result = new ShadowRuleConfiguration(); - result.setDataSources(Collections.singletonList(new ShadowDataSourceConfiguration("shadow-data-source", "ds", "ds_shadow"))); - result.setTables(Collections.singletonMap("t_order", new ShadowTableConfiguration(Collections.singletonList("shadow-data-source"), Collections.singleton("sql-hint-algorithm")))); + result.setDataSources(Collections.singleton(new ShadowDataSourceConfiguration("shadow-data-source", "ds", "ds_shadow"))); + result.setTables(Collections.singletonMap("t_order", new ShadowTableConfiguration(Collections.singleton("shadow-data-source"), Collections.singleton("sql-hint-algorithm")))); result.setShadowAlgorithms(createShadowAlgorithms()); return result; } diff --git a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/rule/ShadowRuleTest.java b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/rule/ShadowRuleTest.java index 7841be2b44945..274fd09233cde 100644 --- a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/rule/ShadowRuleTest.java +++ b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/rule/ShadowRuleTest.java @@ -68,8 +68,8 @@ private Map createShadowAlgorithms() { private Map createTables() { Map result = new LinkedHashMap<>(); - result.put("t_user", new ShadowTableConfiguration(Collections.singletonList("shadow-data-source-0"), createShadowAlgorithmNames("t_user"))); - result.put("t_order", new ShadowTableConfiguration(Collections.singletonList("shadow-data-source-1"), createShadowAlgorithmNames("t_order"))); + result.put("t_user", new ShadowTableConfiguration(Collections.singleton("shadow-data-source-0"), createShadowAlgorithmNames("t_user"))); + result.put("t_order", new ShadowTableConfiguration(Collections.singleton("shadow-data-source-1"), createShadowAlgorithmNames("t_order"))); return result; } diff --git a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/yaml/ShadowRuleConfigurationYamlIT.java b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/yaml/ShadowRuleConfigurationYamlIT.java index 60006b525720a..65f55a4591e85 100644 --- a/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/yaml/ShadowRuleConfigurationYamlIT.java +++ b/features/shadow/core/src/test/java/org/apache/shardingsphere/shadow/yaml/ShadowRuleConfigurationYamlIT.java @@ -38,18 +38,18 @@ class ShadowRuleConfigurationYamlIT extends YamlRuleConfigurationIT { @Override protected void assertYamlRootConfiguration(final YamlRootConfiguration actual) { assertDataSourceMap(actual); - Optional shadowRuleConfiguration = actual.getRules().stream() + Optional shadowRuleConfig = actual.getRules().stream() .filter(each -> each instanceof YamlShadowRuleConfiguration).findFirst().map(optional -> (YamlShadowRuleConfiguration) optional); - assertTrue(shadowRuleConfiguration.isPresent()); - assertThat(shadowRuleConfiguration.get().getTables().size(), is(3)); - assertTOrder(shadowRuleConfiguration.get()); - assertTOrderItem(shadowRuleConfiguration.get()); - assertTAddress(shadowRuleConfiguration.get()); - assertThat(shadowRuleConfiguration.get().getShadowAlgorithms().size(), is(4)); - assertUserIdInsertMatchAlgorithm(shadowRuleConfiguration.get()); - assertUserIdUpdateMatchAlgorithm(shadowRuleConfiguration.get()); - assertUserIdSelectMatchAlgorithm(shadowRuleConfiguration.get()); - assertSqlHintAlgorithm(shadowRuleConfiguration.get()); + assertTrue(shadowRuleConfig.isPresent()); + assertThat(shadowRuleConfig.get().getTables().size(), is(3)); + assertTOrder(shadowRuleConfig.get()); + assertTOrderItem(shadowRuleConfig.get()); + assertTAddress(shadowRuleConfig.get()); + assertThat(shadowRuleConfig.get().getShadowAlgorithms().size(), is(4)); + assertUserIdInsertMatchAlgorithm(shadowRuleConfig.get()); + assertUserIdUpdateMatchAlgorithm(shadowRuleConfig.get()); + assertUserIdSelectMatchAlgorithm(shadowRuleConfig.get()); + assertSqlHintAlgorithm(shadowRuleConfig.get()); } private void assertDataSourceMap(final YamlRootConfiguration actual) { diff --git a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowDefaultShadowAlgorithmExecutor.java b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowDefaultShadowAlgorithmExecutor.java index db80eef366cea..7201e5a48f288 100644 --- a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowDefaultShadowAlgorithmExecutor.java +++ b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowDefaultShadowAlgorithmExecutor.java @@ -50,7 +50,7 @@ public Collection getRows(final ShardingSphereDatabase ShadowRuleConfiguration config = (ShadowRuleConfiguration) rule.get().getConfiguration(); String defaultAlgorithm = config.getDefaultShadowAlgorithmName(); Iterator> data = config.getShadowAlgorithms().entrySet().stream().filter(each -> each.getKey().equals(defaultAlgorithm)) - .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (oldValue, currentValue) -> currentValue)).entrySet().iterator(); + .collect(Collectors.toMap(Entry::getKey, Entry::getValue)).entrySet().iterator(); Collection result = new LinkedList<>(); while (data.hasNext()) { Entry entry = data.next(); diff --git a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowAlgorithmsExecutor.java b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowAlgorithmsExecutor.java index eafea8f7e6704..060c024551787 100644 --- a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowAlgorithmsExecutor.java +++ b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowAlgorithmsExecutor.java @@ -63,7 +63,7 @@ public Collection getColumnNames() { } private String convertToString(final Properties props) { - return null != props ? PropertiesConverter.convert(props) : ""; + return null == props ? "" : PropertiesConverter.convert(props); } @Override diff --git a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowTableRulesExecutor.java b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowTableRulesExecutor.java index 14981d56c2af3..0afc6c3336a85 100644 --- a/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowTableRulesExecutor.java +++ b/features/shadow/distsql/handler/src/main/java/org/apache/shardingsphere/shadow/distsql/handler/query/ShowShadowTableRulesExecutor.java @@ -59,9 +59,9 @@ public Collection getRows(final ShardingSphereDatabase return result; } - private List> buildData(final ShadowRuleConfiguration shadowRuleConfiguration) { + private List> buildData(final ShadowRuleConfiguration shadowRuleConfig) { List> result = new ArrayList<>(); - shadowRuleConfiguration.getTables().forEach((key, value) -> { + shadowRuleConfig.getTables().forEach((key, value) -> { Map map = new HashMap<>(); map.put(SHADOW_TABLE, key); map.put(SHADOW_ALGORITHM_NAME, convertToString(value.getShadowAlgorithmNames())); diff --git a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java index dc768654bc51a..e69a58a74d92e 100644 --- a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java +++ b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/query/ShowShadowRuleExecutorTest.java @@ -92,8 +92,8 @@ private RuleConfiguration createRuleConfiguration() { ShadowRuleConfiguration result = new ShadowRuleConfiguration(); result.getDataSources().add(new ShadowDataSourceConfiguration("shadow_rule", "source", "shadow")); result.getShadowAlgorithms().put("user_id_select_match_algorithm", new AlgorithmConfiguration("REGEX_MATCH", new Properties())); - result.getTables().put("t_order", new ShadowTableConfiguration(Collections.singletonList("shadow_rule"), Collections.singletonList("user_id_select_match_algorithm"))); - result.getTables().put("t_order_item", new ShadowTableConfiguration(Collections.singletonList("shadow_rule"), Collections.singletonList("user_id_select_match_algorithm"))); + result.getTables().put("t_order", new ShadowTableConfiguration(Collections.singleton("shadow_rule"), Collections.singleton("user_id_select_match_algorithm"))); + result.getTables().put("t_order_item", new ShadowTableConfiguration(Collections.singleton("shadow_rule"), Collections.singleton("user_id_select_match_algorithm"))); return result; } } diff --git a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/AlterShadowRuleStatementUpdaterTest.java b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/AlterShadowRuleStatementUpdaterTest.java index 925451e04d868..c241960af4748 100644 --- a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/AlterShadowRuleStatementUpdaterTest.java +++ b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/AlterShadowRuleStatementUpdaterTest.java @@ -107,7 +107,7 @@ void assertExecuteDuplicateAlgorithm() { ShadowAlgorithmSegment segment = new ShadowAlgorithmSegment("algorithmName", new AlgorithmSegment("name", PropertiesBuilder.build(new Property("type", "value")))); AlterShadowRuleStatement sqlStatement = new AlterShadowRuleStatement(Arrays.asList( new ShadowRuleSegment("initRuleName1", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment))), - new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment))))); + new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment))))); assertThrows(AlgorithmInUsedException.class, () -> updater.checkSQLStatement(database, sqlStatement, currentConfig)); } @@ -116,7 +116,7 @@ void assertExecuteDuplicateAlgorithmWithoutConfiguration() { ShadowAlgorithmSegment segment = new ShadowAlgorithmSegment("algorithmName", new AlgorithmSegment("name", PropertiesBuilder.build(new Property("type", "value")))); AlterShadowRuleStatement sqlStatement = new AlterShadowRuleStatement(Arrays.asList( new ShadowRuleSegment("initRuleName1", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment))), - new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment))))); + new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment))))); assertThrows(AlgorithmInUsedException.class, () -> updater.checkSQLStatement(database, sqlStatement, currentConfig)); } @@ -127,7 +127,7 @@ void assertExecuteSuccess() { ShadowAlgorithmSegment segment2 = new ShadowAlgorithmSegment("algorithmName2", new AlgorithmSegment("SQL_HINT", props)); AlterShadowRuleStatement sqlStatement = new AlterShadowRuleStatement(Arrays.asList( new ShadowRuleSegment("initRuleName1", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment1))), - new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment2))))); + new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment2))))); updater.checkSQLStatement(database, sqlStatement, currentConfig); } @@ -137,7 +137,7 @@ void assertExecuteSuccessWithoutProps() { ShadowAlgorithmSegment segment2 = new ShadowAlgorithmSegment("algorithmName2", new AlgorithmSegment("SQL_HINT", null)); AlterShadowRuleStatement sqlStatement = new AlterShadowRuleStatement(Arrays.asList( new ShadowRuleSegment("initRuleName1", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment1))), - new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment2))))); + new ShadowRuleSegment("initRuleName2", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment2))))); updater.checkSQLStatement(database, sqlStatement, currentConfig); } } diff --git a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/CreateShadowRuleStatementUpdaterTest.java b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/CreateShadowRuleStatementUpdaterTest.java index c601ab1c17bd2..1c68f0fd951cd 100644 --- a/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/CreateShadowRuleStatementUpdaterTest.java +++ b/features/shadow/distsql/handler/src/test/java/org/apache/shardingsphere/shadow/distsql/update/CreateShadowRuleStatementUpdaterTest.java @@ -81,7 +81,7 @@ void assertExecuteWithDuplicateRuleName() { @Test void assertExecuteWithDuplicateRuleNameInMetaData() { - when(currentConfig.getDataSources()).thenReturn(Collections.singletonList(new ShadowDataSourceConfiguration("ruleName", "ds", "ds_shadow"))); + when(currentConfig.getDataSources()).thenReturn(Collections.singleton(new ShadowDataSourceConfiguration("ruleName", "ds", "ds_shadow"))); ShadowRuleSegment ruleSegment = new ShadowRuleSegment("ruleName", null, null, null); assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(database, new CreateShadowRuleStatement(false, Collections.singleton(ruleSegment)), currentConfig)); } @@ -107,7 +107,7 @@ void assertExecuteDuplicateAlgorithm() { ShadowAlgorithmSegment segment = new ShadowAlgorithmSegment("algorithmName", new AlgorithmSegment("name", PropertiesBuilder.build(new Property("type", "value")))); CreateShadowRuleStatement sqlStatement = new CreateShadowRuleStatement(false, Arrays.asList( new ShadowRuleSegment("ruleName", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment))), - new ShadowRuleSegment("ruleName", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment))))); + new ShadowRuleSegment("ruleName", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment))))); assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(database, sqlStatement, currentConfig)); } @@ -116,7 +116,7 @@ void assertExecuteDuplicateAlgorithmWithoutConfiguration() { ShadowAlgorithmSegment segment = new ShadowAlgorithmSegment("algorithmName", new AlgorithmSegment("name", PropertiesBuilder.build(new Property("type", "value")))); CreateShadowRuleStatement sqlStatement = new CreateShadowRuleStatement(false, Arrays.asList( new ShadowRuleSegment("ruleName", "ds", null, Collections.singletonMap("t_order", Collections.singleton(segment))), - new ShadowRuleSegment("ruleName1", "ds1", null, Collections.singletonMap("t_order_1", Collections.singletonList(segment))))); + new ShadowRuleSegment("ruleName1", "ds1", null, Collections.singletonMap("t_order_1", Collections.singleton(segment))))); assertThrows(DuplicateRuleException.class, () -> updater.checkSQLStatement(database, sqlStatement, null)); } diff --git a/features/sharding/core/pom.xml b/features/sharding/core/pom.xml index 04f293dbce551..826168dbb1061 100644 --- a/features/sharding/core/pom.xml +++ b/features/sharding/core/pom.xml @@ -78,16 +78,6 @@ shardingsphere-sql-federation-core ${project.version} - - org.apache.shardingsphere - shardingsphere-sharding-cosid - ${project.version} - - - org.apache.shardingsphere - shardingsphere-sharding-nanoid - ${project.version} - org.apache.shardingsphere shardingsphere-infra-expr-core diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/cache/ShardingCache.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/cache/ShardingCache.java index fed3039625bcc..030fbbd4eb0dd 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/cache/ShardingCache.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/cache/ShardingCache.java @@ -41,11 +41,11 @@ public final class ShardingCache { private final ShardingRouteCache routeCache; - public ShardingCache(final ShardingCacheConfiguration configuration, final ShardingRule shardingRule) { - this.configuration = configuration; + public ShardingCache(final ShardingCacheConfiguration config, final ShardingRule shardingRule) { + configuration = config; this.shardingRule = shardingRule; timestampServiceRule = new TimestampServiceRule(new DefaultTimestampServiceConfigurationBuilder().build()); routeCacheableChecker = new ShardingRouteCacheableChecker(this); - routeCache = new ShardingRouteCache(configuration.getRouteCache()); + routeCache = new ShardingRouteCache(config.getRouteCache()); } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/data/ShardingStatisticsTableCollector.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/data/ShardingStatisticsTableCollector.java index 210c7b3b721a4..b3945e77131fa 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/data/ShardingStatisticsTableCollector.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/data/ShardingStatisticsTableCollector.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereRowData; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereTableData; @@ -31,7 +32,6 @@ import org.apache.shardingsphere.sharding.rule.ShardingRule; import org.apache.shardingsphere.sharding.rule.TableRule; -import javax.sql.DataSource; import java.math.BigDecimal; import java.sql.Connection; import java.sql.SQLException; @@ -63,37 +63,36 @@ public Optional collect(final String databaseName, fina return result.getRows().isEmpty() ? Optional.empty() : Optional.of(result); } - private void collectFromDatabase(final ShardingSphereDatabase shardingSphereDatabase, final ShardingSphereTableData tableData) throws SQLException { - Optional shardingRule = shardingSphereDatabase.getRuleMetaData().findSingleRule(ShardingRule.class); + private void collectFromDatabase(final ShardingSphereDatabase database, final ShardingSphereTableData tableData) throws SQLException { + Optional shardingRule = database.getRuleMetaData().findSingleRule(ShardingRule.class); if (!shardingRule.isPresent()) { return; } - collectForShardingStatisticTable(shardingSphereDatabase, shardingRule.get(), tableData); + collectForShardingStatisticTable(database, shardingRule.get(), tableData); } - private void collectForShardingStatisticTable(final ShardingSphereDatabase shardingSphereDatabase, final ShardingRule shardingRule, final ShardingSphereTableData tableData) throws SQLException { + private void collectForShardingStatisticTable(final ShardingSphereDatabase database, final ShardingRule shardingRule, final ShardingSphereTableData tableData) throws SQLException { int count = 1; for (TableRule each : shardingRule.getTableRules().values()) { for (DataNode dataNode : each.getActualDataNodes()) { List row = new LinkedList<>(); row.add(count++); - row.add(shardingSphereDatabase.getName()); + row.add(database.getName()); row.add(each.getLogicTable()); row.add(dataNode.getDataSourceName()); row.add(dataNode.getTableName()); - addTableRowsAndDataLength(shardingSphereDatabase.getResourceMetaData().getStorageTypes(), shardingSphereDatabase.getResourceMetaData().getDataSources(), dataNode, row); + addTableRowsAndDataLength(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(), dataNode, row); tableData.getRows().add(new ShardingSphereRowData(row)); } } } - private void addTableRowsAndDataLength(final Map databaseTypes, final Map dataSources, - final DataNode dataNode, final List row) throws SQLException { - DatabaseType databaseType = databaseTypes.get(dataNode.getDataSourceName()); + private void addTableRowsAndDataLength(final Map storageUnits, final DataNode dataNode, final List row) throws SQLException { + DatabaseType databaseType = storageUnits.get(dataNode.getDataSourceName()).getStorageType(); Optional dialectCollector = DatabaseTypedSPILoader.findService(DialectShardingStatisticsTableCollector.class, databaseType); boolean isAppended = false; if (dialectCollector.isPresent()) { - try (Connection connection = dataSources.get(dataNode.getDataSourceName()).getConnection()) { + try (Connection connection = storageUnits.get(dataNode.getDataSourceName()).getDataSource().getConnection()) { isAppended = dialectCollector.get().appendRow(connection, dataNode, row); } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/reviser/index/ShardingIndexReviser.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/reviser/index/ShardingIndexReviser.java index 86e9c2941e782..00c6b9e86b317 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/reviser/index/ShardingIndexReviser.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/metadata/reviser/index/ShardingIndexReviser.java @@ -37,19 +37,16 @@ public final class ShardingIndexReviser implements IndexReviser { @Override public Optional revise(final String tableName, final IndexMetaData originalMetaData, final ShardingRule rule) { for (DataNode each : tableRule.getActualDataNodes()) { - Optional logicIndexName = getLogicIndex(originalMetaData.getName(), each.getTableName()); - if (logicIndexName.isPresent()) { - IndexMetaData result = new IndexMetaData(logicIndexName.get()); - result.getColumns().addAll(originalMetaData.getColumns()); - result.setUnique(originalMetaData.isUnique()); - return Optional.of(result); - } + IndexMetaData result = new IndexMetaData(getLogicIndex(originalMetaData.getName(), each.getTableName())); + result.getColumns().addAll(originalMetaData.getColumns()); + result.setUnique(originalMetaData.isUnique()); + return Optional.of(result); } return Optional.empty(); } - private Optional getLogicIndex(final String actualIndexName, final String actualTableName) { + private String getLogicIndex(final String actualIndexName, final String actualTableName) { String indexNameSuffix = "_" + actualTableName; - return actualIndexName.endsWith(indexNameSuffix) ? Optional.of(actualIndexName.replace(indexNameSuffix, "")) : Optional.empty(); + return actualIndexName.endsWith(indexNameSuffix) ? actualIndexName.replace(indexNameSuffix, "") : actualIndexName; } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/ExpressionConditionUtils.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/ExpressionConditionUtils.java index 781cad0526b02..ce656794edb9a 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/ExpressionConditionUtils.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/ExpressionConditionUtils.java @@ -34,6 +34,6 @@ public final class ExpressionConditionUtils { * @return true or false */ public static boolean isNowExpression(final ExpressionSegment segment) { - return segment instanceof ComplexExpressionSegment && "now()".equalsIgnoreCase(((ComplexExpressionSegment) segment).getText()); + return segment instanceof ComplexExpressionSegment && "now()".equalsIgnoreCase(segment.getText()); } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngine.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngine.java index c70613582abd8..c6b2b889b0bdf 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngine.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngine.java @@ -100,7 +100,7 @@ private void appendMissingShardingConditions(final InsertStatementContext sqlSta return; } for (String each : allColumnNames) { - if (!columnNames.contains(each) && !shardingRule.isGenerateKeyColumn(each, tableName) && shardingRule.findShardingColumn(each, tableName).isPresent()) { + if (!columnNames.contains(each.toLowerCase()) && !shardingRule.isGenerateKeyColumn(each, tableName) && shardingRule.findShardingColumn(each, tableName).isPresent()) { appendMissingShardingConditions(shardingConditions, each, tableName); } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/ConditionValue.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/ConditionValue.java index bcea24aba2219..439c9caccd392 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/ConditionValue.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/ConditionValue.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; +import lombok.Getter; import java.util.List; import java.util.Optional; @@ -35,6 +36,9 @@ public final class ConditionValue { private final int parameterMarkerIndex; + @Getter + private boolean isNull; + public ConditionValue(final ExpressionSegment expressionSegment, final List params) { value = getValue(expressionSegment, params); parameterMarkerIndex = expressionSegment instanceof ParameterMarkerExpressionSegment ? ((ParameterMarkerExpressionSegment) expressionSegment).getParameterMarkerIndex() : -1; @@ -62,7 +66,8 @@ private Comparable getValue(final ParameterMarkerExpressionSegment expression private Comparable getValue(final LiteralExpressionSegment expressionSegment) { Object result = expressionSegment.getLiterals(); - ShardingSpherePreconditions.checkState(result instanceof Comparable, () -> new NotImplementComparableValueException("Sharding", result)); + isNull = null == result; + ShardingSpherePreconditions.checkState(null == result || result instanceof Comparable, () -> new NotImplementComparableValueException("Sharding", result)); return (Comparable) result; } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGenerator.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGenerator.java index da95610608a4b..62dbc165aadb0 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGenerator.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGenerator.java @@ -44,6 +44,11 @@ public Optional generate(final InExpression predicate, f for (ExpressionSegment each : predicate.getExpressionList()) { ConditionValue conditionValue = new ConditionValue(each, params); Optional> value = conditionValue.getValue(); + if (conditionValue.isNull()) { + shardingConditionValues.add(null); + conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); + continue; + } if (value.isPresent()) { shardingConditionValues.add(value.get()); conditionValue.getParameterMarkerIndex().ifPresent(parameterMarkerIndexes::add); diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/value/ListShardingConditionValue.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/value/ListShardingConditionValue.java index 76bcbcc2942f7..a0cef659f1a38 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/value/ListShardingConditionValue.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/condition/value/ListShardingConditionValue.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.sharding.route.engine.condition.value; +import com.google.common.base.Joiner; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -24,7 +25,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.stream.Collectors; /** * Sharding condition value for list values. @@ -49,7 +49,7 @@ public ListShardingConditionValue(final String columnName, final String tableNam @Override public String toString() { - String condition = 1 == values.size() ? " = " + new ArrayList<>(values).get(0) : " in (" + values.stream().map(Object::toString).collect(Collectors.joining(",")) + ")"; + String condition = 1 == values.size() ? " = " + new ArrayList<>(values).get(0) : " in (" + Joiner.on(",").useForNull("").join(values) + ")"; return tableName + "." + columnName + condition; } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/type/complex/ShardingCartesianRoutingEngine.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/type/complex/ShardingCartesianRoutingEngine.java index f5bc83f2889e4..34876e677a0f5 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/type/complex/ShardingCartesianRoutingEngine.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/route/engine/type/complex/ShardingCartesianRoutingEngine.java @@ -31,6 +31,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -112,7 +113,7 @@ private RouteMapper findRoutingTable(final String dataSource, final String actua private Collection getRouteUnits(final String dataSource, final Set> cartesianRoutingTableGroups) { Collection result = new LinkedHashSet<>(); for (List each : cartesianRoutingTableGroups) { - result.add(new RouteUnit(new RouteMapper(dataSource, dataSource), each)); + result.add(new RouteUnit(new RouteMapper(dataSource, dataSource), new LinkedList<>(each))); } return result; } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java index 4dca4fe6b8185..17d9d126e79bb 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/ShardingRule.java @@ -143,7 +143,7 @@ public ShardingRule(final ShardingRuleConfiguration ruleConfig, final Collection if (defaultKeyGenerateAlgorithm instanceof InstanceContextAware && -1 == instanceContext.getWorkerId()) { ((InstanceContextAware) defaultKeyGenerateAlgorithm).setInstanceContext(instanceContext); } - shardingCache = null != ruleConfig.getShardingCache() ? new ShardingCache(ruleConfig.getShardingCache(), this) : null; + shardingCache = null == ruleConfig.getShardingCache() ? null : new ShardingCache(ruleConfig.getShardingCache(), this); logicalTableMapper = createLogicalTableMapper(); actualTableMapper = createActualTableMapper(); } @@ -627,7 +627,7 @@ public Comparable generateKey(final String logicTableName) { private KeyGenerateAlgorithm getKeyGenerateAlgorithm(final String logicTableName) { Optional tableRule = findTableRule(logicTableName); ShardingSpherePreconditions.checkState(tableRule.isPresent(), () -> new GenerateKeyStrategyNotFoundException(logicTableName)); - return null != tableRule.get().getKeyGeneratorName() ? keyGenerators.get(tableRule.get().getKeyGeneratorName()) : defaultKeyGenerateAlgorithm; + return null == tableRule.get().getKeyGeneratorName() ? defaultKeyGenerateAlgorithm : keyGenerators.get(tableRule.get().getKeyGeneratorName()); } /** @@ -793,9 +793,4 @@ private boolean isJoinConditionExpression(final ExpressionSegment expression) { public boolean isShardingCacheEnabled() { return null != shardingCache; } - - @Override - public String getType() { - return ShardingRule.class.getSimpleName(); - } } diff --git a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/TableRule.java b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/TableRule.java index 0d2829f159e63..ea7c5af15c883 100644 --- a/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/TableRule.java +++ b/features/sharding/core/src/main/java/org/apache/shardingsphere/sharding/rule/TableRule.java @@ -114,7 +114,7 @@ public TableRule(final ShardingTableRuleConfiguration tableRuleConfig, final Col tableShardingStrategyConfig = tableRuleConfig.getTableShardingStrategy(); auditStrategyConfig = tableRuleConfig.getAuditStrategy(); KeyGenerateStrategyConfiguration keyGeneratorConfig = tableRuleConfig.getKeyGenerateStrategy(); - generateKeyColumn = null != keyGeneratorConfig && !Strings.isNullOrEmpty(keyGeneratorConfig.getColumn()) ? keyGeneratorConfig.getColumn() : defaultGenerateKeyColumn; + generateKeyColumn = null == keyGeneratorConfig || Strings.isNullOrEmpty(keyGeneratorConfig.getColumn()) ? defaultGenerateKeyColumn : keyGeneratorConfig.getColumn(); keyGeneratorName = null == keyGeneratorConfig ? null : keyGeneratorConfig.getKeyGeneratorName(); dataSourceDataNode = actualDataNodes.isEmpty() ? null : createDataSourceDataNode(actualDataNodes); tableDataNode = actualDataNodes.isEmpty() ? null : createTableDataNode(actualDataNodes); @@ -132,7 +132,7 @@ public TableRule(final ShardingAutoTableRuleConfiguration tableRuleConfig, final actualDataNodes = isEmptyDataNodes(dataNodes) ? generateDataNodes(tableRuleConfig.getLogicTable(), dataSourceNames) : generateDataNodes(dataNodes, dataSourceNames); actualTables = getActualTables(); KeyGenerateStrategyConfiguration keyGeneratorConfig = tableRuleConfig.getKeyGenerateStrategy(); - generateKeyColumn = null != keyGeneratorConfig && !Strings.isNullOrEmpty(keyGeneratorConfig.getColumn()) ? keyGeneratorConfig.getColumn() : defaultGenerateKeyColumn; + generateKeyColumn = null == keyGeneratorConfig || Strings.isNullOrEmpty(keyGeneratorConfig.getColumn()) ? defaultGenerateKeyColumn : keyGeneratorConfig.getColumn(); keyGeneratorName = null == keyGeneratorConfig ? null : keyGeneratorConfig.getKeyGeneratorName(); dataSourceDataNode = actualDataNodes.isEmpty() ? null : createDataSourceDataNode(actualDataNodes); tableDataNode = actualDataNodes.isEmpty() ? null : createTableDataNode(actualDataNodes); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/cache/checker/ShardingRouteCacheableCheckerTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/cache/checker/ShardingRouteCacheableCheckerTest.java index 77e9b4d2047a4..e3a30f754cafe 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/cache/checker/ShardingRouteCacheableCheckerTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/cache/checker/ShardingRouteCacheableCheckerTest.java @@ -122,6 +122,15 @@ private ShardingSphereDatabase createDatabase(final ShardingRule shardingRule, f new ShardingSphereColumn("warehouse_id", Types.INTEGER, false, false, false, true, false, false), new ShardingSphereColumn("order_broadcast_table_id", Types.INTEGER, true, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + schema.getTables().put("t_non_sharding_table", new ShardingSphereTable("t_non_sharding_table", Collections.singleton( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); + schema.getTables().put("t_non_cacheable_database_sharding", new ShardingSphereTable("t_non_cacheable_database_sharding", Collections.singleton( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); + schema.getTables().put("t_non_cacheable_table_sharding", new ShardingSphereTable("t_non_cacheable_table_sharding", Collections.singleton( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); return new ShardingSphereDatabase(DATABASE_NAME, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"), new ResourceMetaData(DATABASE_NAME, Collections.emptyMap()), new RuleMetaData(Arrays.asList(shardingRule, timestampServiceRule)), Collections.singletonMap(SCHEMA_NAME, schema)); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/token/OrderByTokenGeneratorTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/token/OrderByTokenGeneratorTest.java index f10883033a73d..3adc32877bfde 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/token/OrderByTokenGeneratorTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rewrite/token/OrderByTokenGeneratorTest.java @@ -72,10 +72,10 @@ void assertIsGenerateSQLToken() { void assertGenerateSQLToken() { WindowSegment windowSegment = mock(WindowSegment.class); when(windowSegment.getStopIndex()).thenReturn(2); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getWindow()).thenReturn(Optional.of(windowSegment)); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getWindow()).thenReturn(Optional.of(windowSegment)); SelectStatementContext selectStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); - when(selectStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatementContext.getSqlStatement()).thenReturn(selectStatement); Collection orderByItems = getOrderByItems(); when(selectStatementContext.getOrderByContext().getItems()).thenReturn(orderByItems); OrderByTokenGenerator generator = new OrderByTokenGenerator(); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngineTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngineTest.java index 87be0cbb26ccc..58b5150a6a0a3 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngineTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/engine/InsertClauseShardingConditionEngineTest.java @@ -82,7 +82,7 @@ void setUp() { InsertStatement insertStatement = mockInsertStatement(); shardingConditionEngine = new InsertClauseShardingConditionEngine(database, shardingRule, new TimestampServiceRule(new TimestampServiceRuleConfiguration("System", new Properties()))); when(insertStatementContext.getSqlStatement()).thenReturn(insertStatement); - when(insertStatementContext.getColumnNames()).thenReturn(Collections.singletonList("foo_col_1")); + when(insertStatementContext.getColumnNames()).thenReturn(Arrays.asList("foo_col_1", "foo_col_3")); when(insertStatementContext.getInsertValueContexts()).thenReturn(Collections.singletonList(createInsertValueContext())); when(insertStatementContext.getInsertSelectContext()).thenReturn(null); when(insertStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); @@ -94,7 +94,7 @@ private static ShardingSphereDatabase mockDatabase() { when(result.getName()).thenReturn(DefaultDatabase.LOGIC_NAME); ShardingSphereSchema schema = mock(ShardingSphereSchema.class, RETURNS_DEEP_STUBS); when(schema.containsTable("foo_table")).thenReturn(true); - when(schema.getTable("foo_table").getColumnNames()).thenReturn(Arrays.asList("foo_col_1", "foo_col_2")); + when(schema.getTable("foo_table").getColumnNames()).thenReturn(Arrays.asList("foo_col_1", "foo_col_2", "foo_Col_3")); when(result.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); return result; } @@ -209,4 +209,12 @@ void assertCreateShardingConditionsWithoutShardingColumn() { assertThat(actual.get(0).getValues().get(0).getColumnName(), is("foo_col_2")); assertThat(actual.get(0).getValues().get(0).getTableName(), is("foo_table")); } + + @Test + void assertCreateShardingConditionsWithCaseSensitiveField() { + when(shardingRule.findShardingColumn("foo_Col_3", "foo_table")).thenReturn(Optional.of("foo_Col_3")); + List actual = shardingConditionEngine.createShardingConditions(insertStatementContext, Collections.emptyList()); + assertThat(actual.size(), is(1)); + + } } diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java index 737a459626679..75d2e65ed4c3d 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/condition/generator/impl/ConditionValueInOperatorGeneratorTest.java @@ -24,12 +24,14 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.timeservice.api.config.TimestampServiceRuleConfiguration; import org.apache.shardingsphere.timeservice.core.rule.TimestampServiceRule; import org.junit.jupiter.api.Test; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.LinkedList; @@ -61,6 +63,34 @@ void assertNowExpression() { assertTrue(shardingConditionValue.get().getParameterMarkerIndexes().isEmpty()); } + @Test + void assertNullExpression() { + ListExpression listExpression = new ListExpression(0, 0); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, null)); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, null)); + InExpression inExpression = new InExpression(0, 0, null, listExpression, false); + Optional shardingConditionValue = generator.generate(inExpression, column, new LinkedList<>(), timestampServiceRule); + assertTrue(shardingConditionValue.isPresent()); + assertThat(((ListShardingConditionValue) shardingConditionValue.get()).getValues(), is(Arrays.asList(null, null))); + assertTrue(shardingConditionValue.get().getParameterMarkerIndexes().isEmpty()); + assertThat(shardingConditionValue.get().toString(), is("tbl.id in (,)")); + } + + @Test + void assertNullAndCommonExpression() { + ListExpression listExpression = new ListExpression(0, 0); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, "test1")); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, null)); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, null)); + listExpression.getItems().add(new LiteralExpressionSegment(0, 0, "test2")); + InExpression inExpression = new InExpression(0, 0, null, listExpression, false); + Optional shardingConditionValue = generator.generate(inExpression, column, new LinkedList<>(), timestampServiceRule); + assertTrue(shardingConditionValue.isPresent()); + assertThat(((ListShardingConditionValue) shardingConditionValue.get()).getValues(), is(Arrays.asList("test1", null, null, "test2"))); + assertTrue(shardingConditionValue.get().getParameterMarkerIndexes().isEmpty()); + assertThat(shardingConditionValue.get().toString(), is("tbl.id in (test1,,,test2)")); + } + @SuppressWarnings("unchecked") @Test void assertGenerateConditionValueWithParameter() { diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/SQLRouteTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/SQLRouteTest.java index a03f96e283b09..cb014c81b5427 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/SQLRouteTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/SQLRouteTest.java @@ -44,12 +44,13 @@ private static class TestCaseArgumentsProvider implements ArgumentsProvider { public Stream provideArguments(final ExtensionContext extensionContext) { return Stream.of( Arguments.of("noTableUnicastRandomDataSource", "SELECT 1, 1 + 2", Collections.singletonList(1)), - Arguments.of("withBroadcastTable", "SELECT user_id, status from t_order_item a join t_product b on a.product_id = b.product_id where user_id = ?", Collections.singletonList(1)), + Arguments.of("withBroadcastTable", "SELECT a.user_id, status from t_order_item a join t_product b on a.product_id = b.product_id where a.user_id = ?", + Collections.singletonList(1)), Arguments.of("allBindingWithBroadcastTable", "SELECT a.user_id, a.status from t_order a join t_order_item b on a.order_id = b.order_id join t_product c on b.product_id = c.product_id where a.user_id = ?", Collections.singletonList(1)), Arguments.of("complexTableWithBroadcastTable", - "SELECT user_id, status from t_order a join t_user b on a.user_id = b.user_id join t_product c on a.product_id = c.product_id where a.user_id = ? and b.user_id =?", + "SELECT a.user_id, status from t_order a join t_user b on a.user_id = b.user_id join t_product c on a.product_id = c.product_id where a.user_id = ? and b.user_id =?", Arrays.asList(1, 1)), Arguments.of("insertTable", "INSERT INTO t_order (order_id, user_id) VALUES (?, ?)", Arrays.asList(1, 1))); } diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/ShardingStandardRoutingEngineTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/ShardingStandardRoutingEngineTest.java index fb53ca620e8c6..ba64c8f7f4c42 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/ShardingStandardRoutingEngineTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/ShardingStandardRoutingEngineTest.java @@ -97,7 +97,7 @@ void assertRouteByErrorShardingTableStrategy() { @Test void assertRouteByHint() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_hint_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_test", new ShardingConditions(Collections.emptyList(), sqlStatementContext, mock(ShardingRule.class)), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); @@ -115,7 +115,7 @@ void assertRouteByHint() { @Test void assertRouteByMixedWithHintDataSource() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_hint_ds_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_ds_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_ds_test", ShardingRoutingEngineFixtureBuilder.createShardingConditions("t_hint_ds_test"), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); @@ -132,7 +132,7 @@ void assertRouteByMixedWithHintDataSource() { @Test void assertRouteByMixedWithHintDataSourceOnly() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_hint_ds_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_ds_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_ds_test", new ShardingConditions(Collections.emptyList(), sqlStatementContext, mock(ShardingRule.class)), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); @@ -153,7 +153,7 @@ void assertRouteByMixedWithHintDataSourceOnly() { @Test void assertRouteByMixedWithHintTable() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_hint_table_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_table_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_table_test", ShardingRoutingEngineFixtureBuilder.createShardingConditions("t_hint_table_test"), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); @@ -170,7 +170,7 @@ void assertRouteByMixedWithHintTable() { @Test void assertRouteByMixedWithHintTableOnly() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_hint_table_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_table_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_table_test", new ShardingConditions(Collections.emptyList(), sqlStatementContext, mock(ShardingRule.class)), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); @@ -191,7 +191,7 @@ void assertRouteByMixedWithHintTableOnly() { @Test void assertRouteByIntervalTableShardingStrategyOnly() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); - when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singletonList("t_interval_test")); + when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_interval_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_interval_test", ShardingRoutingEngineFixtureBuilder.createIntervalShardingConditions("t_interval_test"), sqlStatementContext, new HintValueContext()); RouteContext routeContext = standardRoutingEngine.route(ShardingRoutingEngineFixtureBuilder.createIntervalTableShardingRule()); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/assertion/ShardingRouteAssert.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/assertion/ShardingRouteAssert.java index b0ac91b31d727..1375141f36f04 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/assertion/ShardingRouteAssert.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/standard/assertion/ShardingRouteAssert.java @@ -53,7 +53,6 @@ import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; /** * Sharding route assert. @@ -71,17 +70,14 @@ public final class ShardingRouteAssert { public static RouteContext assertRoute(final String sql, final List params) { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "MySQL"); ShardingRule shardingRule = ShardingRoutingEngineFixtureBuilder.createAllShardingRule(); - SingleRule singleRule = ShardingRoutingEngineFixtureBuilder.createSingleRule(Collections.singletonList(shardingRule)); + SingleRule singleRule = ShardingRoutingEngineFixtureBuilder.createSingleRule(Collections.singleton(shardingRule)); TimestampServiceRule timestampServiceRule = ShardingRoutingEngineFixtureBuilder.createTimeServiceRule(); Map schemas = buildSchemas(); ConfigurationProperties props = new ConfigurationProperties(new Properties()); SQLStatementParserEngine sqlStatementParserEngine = new SQLStatementParserEngine(databaseType, new CacheOption(2000, 65535L), new CacheOption(128, 1024L), false); RuleMetaData ruleMetaData = new RuleMetaData(Arrays.asList(shardingRule, singleRule, timestampServiceRule)); - ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); - when(resourceMetaData.getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", databaseType)); - ShardingSphereDatabase database = new ShardingSphereDatabase( - DefaultDatabase.LOGIC_NAME, databaseType, resourceMetaData, ruleMetaData, schemas); + ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, databaseType, mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), ruleMetaData, schemas); SQLStatementContext sqlStatementContext = new SQLBindEngine(createShardingSphereMetaData(database), DefaultDatabase.LOGIC_NAME).bind(sqlStatementParserEngine.parse(sql, false), params); QueryContext queryContext = new QueryContext(sqlStatementContext, sql, params); @@ -97,17 +93,22 @@ private static Map buildSchemas() { Map tables = new HashMap<>(3, 1F); tables.put("t_order", new ShardingSphereTable("t_order", Arrays.asList(new ShardingSphereColumn("order_id", Types.INTEGER, true, false, false, true, false, false), new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("product_id", Types.INTEGER, false, false, false, true, false, false), new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); tables.put("t_order_item", new ShardingSphereTable("t_order_item", Arrays.asList(new ShardingSphereColumn("item_id", Types.INTEGER, true, false, false, true, false, false), new ShardingSphereColumn("order_id", Types.INTEGER, false, false, false, true, false, false), new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("product_id", Types.INTEGER, false, false, false, true, false, false), new ShardingSphereColumn("status", Types.VARCHAR, false, false, false, true, false, false), new ShardingSphereColumn("c_date", Types.TIMESTAMP, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); tables.put("t_other", new ShardingSphereTable("t_other", Collections.singletonList( new ShardingSphereColumn("order_id", Types.INTEGER, true, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("t_category", new ShardingSphereTable("t_category", Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); - tables.put("t_product", new ShardingSphereTable("t_product", Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); - tables.put("t_user", new ShardingSphereTable("t_user", Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); + tables.put("t_category", new ShardingSphereTable("t_category", Collections.singleton(new ShardingSphereColumn("id", Types.INTEGER, true, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); + tables.put("t_product", new ShardingSphereTable("t_product", Collections.singleton(new ShardingSphereColumn("product_id", Types.INTEGER, true, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); + tables.put("t_user", new ShardingSphereTable("t_user", Collections.singleton(new ShardingSphereColumn("user_id", Types.INTEGER, true, false, false, true, false, false)), + Collections.emptyList(), Collections.emptyList())); tables.put("t_hint_test", new ShardingSphereTable("t_hint_test", Collections.singleton(new ShardingSphereColumn("user_id", Types.INTEGER, true, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); return Collections.singletonMap(DefaultDatabase.LOGIC_NAME, new ShardingSphereSchema(tables, Collections.emptyMap())); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/unicast/ShardingUnicastRoutingEngineTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/unicast/ShardingUnicastRoutingEngineTest.java index 5f90242ea4ed7..64f67966fa1c1 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/unicast/ShardingUnicastRoutingEngineTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/type/unicast/ShardingUnicastRoutingEngineTest.java @@ -90,7 +90,7 @@ void assertRoutingForBroadcastTableWithCursorStatement() { @Test void assertRoutingForBroadcastTableWithPreferredDataSource() { - ConnectionContext connectionContext = new ConnectionContext(() -> Collections.singletonList("ds_1")); + ConnectionContext connectionContext = new ConnectionContext(() -> Collections.singleton("ds_1")); RouteContext actual = new ShardingUnicastRoutingEngine(mock(SelectStatementContext.class), Collections.singleton("t_config"), connectionContext).route(shardingRule); assertThat(actual.getRouteUnits().size(), is(1)); assertThat(actual.getRouteUnits().iterator().next().getDataSourceMapper().getActualName(), is("ds_1")); diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/validator/ddl/ShardingDropTableStatementValidatorTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/validator/ddl/ShardingDropTableStatementValidatorTest.java index e8cfeef86e963..20e25dff97a6b 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/validator/ddl/ShardingDropTableStatementValidatorTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/route/engine/validator/ddl/ShardingDropTableStatementValidatorTest.java @@ -127,8 +127,8 @@ void assertPostValidateDropTableWithSameRouteResultShardingTableForPostgreSQL() when(shardingRule.isShardingTable("t_order")).thenReturn(true); when(shardingRule.getTableRule("t_order")).thenReturn(new TableRule(Arrays.asList("ds_0", "ds_1"), "t_order")); Collection routeUnits = new LinkedList<>(); - routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singletonList(new RouteMapper("t_order", "t_order_0")))); - routeUnits.add(new RouteUnit(new RouteMapper("ds_1", "ds_1"), Collections.singletonList(new RouteMapper("t_order", "t_order_0")))); + routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singleton(new RouteMapper("t_order", "t_order_0")))); + routeUnits.add(new RouteUnit(new RouteMapper("ds_1", "ds_1"), Collections.singleton(new RouteMapper("t_order", "t_order_0")))); when(routeContext.getRouteUnits()).thenReturn(routeUnits); assertDoesNotThrow(() -> new ShardingDropTableStatementValidator().postValidate( shardingRule, new DropTableStatementContext(sqlStatement), new HintValueContext(), Collections.emptyList(), @@ -142,7 +142,7 @@ void assertPostValidateDropTableWithDifferentRouteResultShardingTableForPostgreS when(shardingRule.isShardingTable("t_order")).thenReturn(true); when(shardingRule.getTableRule("t_order")).thenReturn(new TableRule(Arrays.asList("ds_0", "ds_1"), "t_order")); Collection routeUnits = new LinkedList<>(); - routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singletonList(new RouteMapper("t_order", "t_order_0")))); + routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singleton(new RouteMapper("t_order", "t_order_0")))); when(routeContext.getRouteUnits()).thenReturn(routeUnits); assertThrows(ShardingDDLRouteException.class, () -> new ShardingDropTableStatementValidator().postValidate(shardingRule, new DropTableStatementContext(sqlStatement), new HintValueContext(), @@ -155,8 +155,8 @@ void assertPostValidateDropTableWithSameRouteResultBroadcastTableForPostgreSQL() sqlStatement.getTables().add(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_config")))); when(shardingRule.getTableRule("t_config")).thenReturn(new TableRule(Arrays.asList("ds_0", "ds_1"), "t_config")); Collection routeUnits = new LinkedList<>(); - routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singletonList(new RouteMapper("t_config", "t_config")))); - routeUnits.add(new RouteUnit(new RouteMapper("ds_1", "ds_1"), Collections.singletonList(new RouteMapper("t_config", "t_config")))); + routeUnits.add(new RouteUnit(new RouteMapper("ds_0", "ds_0"), Collections.singleton(new RouteMapper("t_config", "t_config")))); + routeUnits.add(new RouteUnit(new RouteMapper("ds_1", "ds_1"), Collections.singleton(new RouteMapper("t_config", "t_config")))); when(routeContext.getRouteUnits()).thenReturn(routeUnits); assertDoesNotThrow(() -> new ShardingDropTableStatementValidator().postValidate( shardingRule, new DropTableStatementContext(sqlStatement), new HintValueContext(), diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java index a4889b2c5a2a2..2281e541c4029 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/rule/ShardingRuleTest.java @@ -486,7 +486,7 @@ private ShardingRule createMaximumShardingRule() { shardingRuleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("table_id", "standard")); shardingRuleConfig.setDefaultShardingColumn("table_id"); shardingRuleConfig.setDefaultKeyGenerateStrategy(new KeyGenerateStrategyConfiguration("id", "default")); - shardingRuleConfig.setDefaultAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singletonList("audit_algorithm"), false)); + shardingRuleConfig.setDefaultAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singleton("audit_algorithm"), false)); shardingRuleConfig.getShardingAlgorithms().put("core_standard_fixture", new AlgorithmConfiguration("CORE.STANDARD.FIXTURE", new Properties())); shardingRuleConfig.getKeyGenerators().put("uuid", new AlgorithmConfiguration("UUID", new Properties())); shardingRuleConfig.getKeyGenerators().put("default", new AlgorithmConfiguration("UUID", new Properties())); @@ -635,7 +635,7 @@ void assertIsAllBindingTableWithJoinQueryWithDatabaseTableJoinCondition() { when(sqlStatementContext.isContainsJoinQuery()).thenReturn(true); when(sqlStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(sqlStatementContext.getTablesContext().getSchemaName()).thenReturn(Optional.empty()); - when(sqlStatementContext.getWhereSegments()).thenReturn(Collections.singletonList(new WhereSegment(0, 0, condition))); + when(sqlStatementContext.getWhereSegments()).thenReturn(Collections.singleton(new WhereSegment(0, 0, condition))); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); when(sqlStatementContext.getTablesContext().findTableNamesByColumnSegment(Arrays.asList(leftDatabaseJoin, rightDatabaseJoin), schema)).thenReturn(createColumnTableNameMap()); when(sqlStatementContext.getTablesContext().findTableNamesByColumnSegment(Arrays.asList(leftTableJoin, rightTableJoin), schema)).thenReturn(createColumnTableNameMap()); @@ -759,7 +759,7 @@ void assertGetAllDataNodes() { private void assertGetDataNodes(final Collection dataNodes, final String tableNamePrefix) { int dataSourceNameSuffix = 0; int tableNameSuffix = 0; - for (final DataNode each : dataNodes) { + for (DataNode each : dataNodes) { assertThat(each.getDataSourceName(), is("ds_" + dataSourceNameSuffix)); assertThat(each.getTableName(), is(tableNamePrefix + tableNameSuffix)); if (++tableNameSuffix == (dataNodes.size() / 2)) { diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/ShardingRuleConfigurationYamlIT.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/ShardingRuleConfigurationYamlIT.java index a3c2bf8bfbbc7..d31fd2ab2eab5 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/ShardingRuleConfigurationYamlIT.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/ShardingRuleConfigurationYamlIT.java @@ -97,10 +97,10 @@ private void assertBindingTable(final YamlShardingRuleConfiguration actual) { private void assertShardingCache(final YamlShardingRuleConfiguration actual) { YamlShardingCacheConfiguration actualShardingCache = actual.getShardingCache(); assertThat(actualShardingCache.getAllowedMaxSqlLength(), is(512)); - YamlShardingCacheOptionsConfiguration actualRouteCacheConfiguration = actualShardingCache.getRouteCache(); - assertThat(actualRouteCacheConfiguration.getInitialCapacity(), is(65536)); - assertThat(actualRouteCacheConfiguration.getMaximumSize(), is(262144)); - assertTrue(actualRouteCacheConfiguration.isSoftValues()); + YamlShardingCacheOptionsConfiguration actualRouteCacheConfig = actualShardingCache.getRouteCache(); + assertThat(actualRouteCacheConfig.getInitialCapacity(), is(65536)); + assertThat(actualRouteCacheConfig.getMaximumSize(), is(262144)); + assertTrue(actualRouteCacheConfig.isSoftValues()); } private void assertProps(final YamlRootConfiguration actual) { diff --git a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/swapper/NewYamlShardingRuleConfigurationSwapperTest.java b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/swapper/NewYamlShardingRuleConfigurationSwapperTest.java index 144b201f933d3..bf1444a98c52d 100644 --- a/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/swapper/NewYamlShardingRuleConfigurationSwapperTest.java +++ b/features/sharding/core/src/test/java/org/apache/shardingsphere/sharding/yaml/swapper/NewYamlShardingRuleConfigurationSwapperTest.java @@ -84,11 +84,11 @@ private ShardingRuleConfiguration createMaximumShardingRule() { ShardingTableRuleConfiguration subTableRuleConfig = createTableRuleConfiguration("SUB_LOGIC_TABLE", "ds_${0..1}.sub_table_${0..2}"); subTableRuleConfig.setKeyGenerateStrategy(new KeyGenerateStrategyConfiguration("id", "auto_increment")); result.getTables().add(subTableRuleConfig); - ShardingAutoTableRuleConfiguration autoTableRuleConfiguration = new ShardingAutoTableRuleConfiguration("auto_table", "ds_1,ds_2"); - autoTableRuleConfiguration.setShardingStrategy(new StandardShardingStrategyConfiguration("user_id", "hash_mod")); - autoTableRuleConfiguration.setKeyGenerateStrategy(new KeyGenerateStrategyConfiguration("id", "auto_increment")); - autoTableRuleConfiguration.setAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singleton("audit_algorithm"), true)); - result.getAutoTables().add(autoTableRuleConfiguration); + ShardingAutoTableRuleConfiguration autoTableRuleConfig = new ShardingAutoTableRuleConfiguration("auto_table", "ds_1,ds_2"); + autoTableRuleConfig.setShardingStrategy(new StandardShardingStrategyConfiguration("user_id", "hash_mod")); + autoTableRuleConfig.setKeyGenerateStrategy(new KeyGenerateStrategyConfiguration("id", "auto_increment")); + autoTableRuleConfig.setAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singleton("audit_algorithm"), true)); + result.getAutoTables().add(autoTableRuleConfig); result.getBindingTableGroups().add(new ShardingTableReferenceRuleConfiguration("foo", shardingTableRuleConfig.getLogicTable() + "," + subTableRuleConfig.getLogicTable())); result.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("ds_id", "standard")); result.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("table_id", "standard")); diff --git a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/query/ShowShardingTableReferenceRuleExecutor.java b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/query/ShowShardingTableReferenceRuleExecutor.java index de239d3fae605..12e35a2a41349 100644 --- a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/query/ShowShardingTableReferenceRuleExecutor.java +++ b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/sharding/distsql/handler/query/ShowShardingTableReferenceRuleExecutor.java @@ -43,7 +43,7 @@ public Collection getRows(final ShardingSphereDatabase return Collections.emptyList(); } Collection result = new LinkedList<>(); - for (final ShardingTableReferenceRuleConfiguration referenceRule : ((ShardingRuleConfiguration) rule.get().getConfiguration()).getBindingTableGroups()) { + for (ShardingTableReferenceRuleConfiguration referenceRule : ((ShardingRuleConfiguration) rule.get().getConfiguration()).getBindingTableGroups()) { if (null == sqlStatement.getRuleName() || referenceRule.getName().equalsIgnoreCase(sqlStatement.getRuleName())) { result.add(new LocalDataQueryResultRow(referenceRule.getName(), referenceRule.getReference())); } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/checker/ShardingRuleStatementCheckerTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/checker/ShardingRuleStatementCheckerTest.java index a3db153fd65d8..6748fc914ea7d 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/checker/ShardingRuleStatementCheckerTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/checker/ShardingRuleStatementCheckerTest.java @@ -127,19 +127,19 @@ void assertCheckCreationWithDuplicated() { @Test void assertCheckCreationWithIdentical() { - List rules = Collections.singletonList(new AutoTableRuleSegment("t_order", Arrays.asList("ds_0", "ds_1"))); + Collection rules = Collections.singleton(new AutoTableRuleSegment("t_order", Arrays.asList("ds_0", "ds_1"))); assertThrows(DuplicateRuleException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @Test void assertCheckAlterationWithRuleRequiredMissed() { - List rules = Collections.singletonList(new AutoTableRuleSegment("t_order_required_missed", Arrays.asList("ds_0", "ds_1"))); + Collection rules = Collections.singleton(new AutoTableRuleSegment("t_order_required_missed", Arrays.asList("ds_0", "ds_1"))); assertThrows(MissingRequiredRuleException.class, () -> ShardingTableRuleStatementChecker.checkAlteration(database, rules, shardingRuleConfig)); } @Test void assertCheckCreationWithResourceRequiredMissed() { - List rules = Collections.singletonList(new AutoTableRuleSegment("t_product", Arrays.asList("ds_required_missed", "ds_1"))); + Collection rules = Collections.singleton(new AutoTableRuleSegment("t_product", Arrays.asList("ds_required_missed", "ds_1"))); assertThrows(MissingRequiredStorageUnitsException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -154,7 +154,7 @@ void assertCheckCreationWithInvalidKeyGenerateAlgorithm() { @Test void assertCheckCreationWithInvalidAuditAlgorithm() { AutoTableRuleSegment autoTableRuleSegment = new AutoTableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1")); - autoTableRuleSegment.setAuditStrategySegment(new AuditStrategySegment(Collections.singletonList(new ShardingAuditorSegment("sharding_key_required_auditor", + autoTableRuleSegment.setAuditStrategySegment(new AuditStrategySegment(Collections.singleton(new ShardingAuditorSegment("sharding_key_required_auditor", new AlgorithmSegment("invalid", new Properties()))), true)); assertThrows(ServiceProviderNotFoundException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, Collections.singleton(autoTableRuleSegment), false, shardingRuleConfig)); @@ -165,7 +165,7 @@ void assertCheckAutoTableWithNotExistShardingAlgorithms() { AutoTableRuleSegment autoTableRuleSegment = new AutoTableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1")); autoTableRuleSegment.setShardingColumn("product_id"); autoTableRuleSegment.setShardingAlgorithmSegment(new AlgorithmSegment("not_exist", PropertiesBuilder.build(new Property("", "")))); - List rules = Collections.singletonList(autoTableRuleSegment); + Collection rules = Collections.singleton(autoTableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -174,7 +174,7 @@ void assertCheckAutoTableWithComplexShardingAlgorithms() { AutoTableRuleSegment autoTableRuleSegment = new AutoTableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1")); autoTableRuleSegment.setShardingColumn("product_id"); autoTableRuleSegment.setShardingAlgorithmSegment(new AlgorithmSegment("complex", PropertiesBuilder.build(new Property("", "")))); - List rules = Collections.singletonList(autoTableRuleSegment); + Collection rules = Collections.singleton(autoTableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -183,7 +183,7 @@ void assertCheckTableWithInvalidShardingStrategyType() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("invalid", "product_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(UnsupportedSQLOperationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -192,7 +192,7 @@ void assertCheckTableWithUnmatchedShardingStrategyType1() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("complex", "product_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -201,7 +201,7 @@ void assertCheckTableWithUnmatchedShardingStrategyType2() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("standard", "product_id,user_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -211,7 +211,7 @@ void assertCheckTableWithUnmatchedShardingStrategyType3() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("standard", "user_id", databaseAlgorithmSegment)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -220,7 +220,7 @@ void assertCheckTableWithInvalidAlgorithmName() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("hint", "product_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -229,7 +229,7 @@ void assertCheckTableWithInvalidAlgorithmNameWhenCurrentRuleConfigIsNull() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("hint", "product_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, null)); } @@ -239,7 +239,7 @@ void assertCheckNullAlgorithmNameAndAlgorithmSegment() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("standard", "product_id", databaseAlgorithmSegment)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig); } @@ -248,7 +248,7 @@ void assertCheckNullAlgorithmNameAndNullAlgorithmSegment() { KeyGenerateStrategySegment keyGenerateStrategy = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); TableRuleSegment tableRuleSegment = new TableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1"), keyGenerateStrategy, null); tableRuleSegment.setTableStrategySegment(new ShardingStrategySegment("standard", "product_id", null)); - List rules = Collections.singletonList(tableRuleSegment); + Collection rules = Collections.singleton(tableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -256,7 +256,7 @@ void assertCheckNullAlgorithmNameAndNullAlgorithmSegment() { void assertCheckAutoTableRuleWithStandardShardingAlgorithm() { AutoTableRuleSegment autoTableRuleSegment = new AutoTableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1")); autoTableRuleSegment.setShardingAlgorithmSegment(new AlgorithmSegment("INLINE", PropertiesBuilder.build(new Property("algorithm-expression", "ds_${product_id % 2}")))); - List rules = Collections.singletonList(autoTableRuleSegment); + Collection rules = Collections.singleton(autoTableRuleSegment); assertThrows(InvalidAlgorithmConfigurationException.class, () -> ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig)); } @@ -264,7 +264,7 @@ void assertCheckAutoTableRuleWithStandardShardingAlgorithm() { void assertCheckAutoTableRuleWithAutoShardingAlgorithm() { AutoTableRuleSegment autoTableRuleSegment = new AutoTableRuleSegment("t_product", Arrays.asList("ds_0", "ds_1")); autoTableRuleSegment.setShardingAlgorithmSegment(new AlgorithmSegment("CORE.AUTO.FIXTURE", PropertiesBuilder.build(new Property("sharding-count", "4")))); - List rules = Collections.singletonList(autoTableRuleSegment); + Collection rules = Collections.singleton(autoTableRuleSegment); ShardingTableRuleStatementChecker.checkCreation(database, rules, false, shardingRuleConfig); } @@ -323,7 +323,7 @@ private AutoTableRuleSegment createCompleteAutoTableRule() { private TableRuleSegment createCompleteTableRule() { Properties props = new Properties(); KeyGenerateStrategySegment keyGenerator = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", props)); - TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singletonList("ds_${0..1}.t_order${0..1}"), keyGenerator, null); + TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singleton("ds_${0..1}.t_order${0..1}"), keyGenerator, null); result.setTableStrategySegment(new ShardingStrategySegment("hint", null, new AlgorithmSegment("CORE.HINT.FIXTURE", props))); result.setDatabaseStrategySegment(new ShardingStrategySegment("hint", null, new AlgorithmSegment("CORE.HINT.FIXTURE", props))); return result; @@ -332,7 +332,7 @@ private TableRuleSegment createCompleteTableRule() { private TableRuleSegment createWrongTableRuleWithNoneTypeStrategy() { Properties props = new Properties(); KeyGenerateStrategySegment keyGenerator = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", props)); - TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singletonList("ds_${0..1}.t_order${0..1}"), keyGenerator, null); + TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singleton("ds_${0..1}.t_order${0..1}"), keyGenerator, null); result.setDatabaseStrategySegment(new ShardingStrategySegment("none", null, null)); result.setTableStrategySegment(new ShardingStrategySegment("none", null, null)); return result; @@ -341,7 +341,7 @@ private TableRuleSegment createWrongTableRuleWithNoneTypeStrategy() { private TableRuleSegment createCompleteTableRuleWithNoneTypeStrategy() { Properties props = PropertiesBuilder.build(new Property("algorithm-expression", "t_order_${order_id % 2}")); KeyGenerateStrategySegment keyGenerator = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", props)); - TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singletonList("ds_0.t_order${0..1}"), keyGenerator, null); + TableRuleSegment result = new TableRuleSegment("t_product_1", Collections.singleton("ds_0.t_order${0..1}"), keyGenerator, null); result.setDatabaseStrategySegment(new ShardingStrategySegment("none", null, null)); result.setTableStrategySegment(new ShardingStrategySegment("standard", "order_id", new AlgorithmSegment("inline", props))); return result; diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/CountShardingRuleExecutorTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/CountShardingRuleExecutorTest.java index a3f8648fcffb7..93510757b5d65 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/CountShardingRuleExecutorTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/CountShardingRuleExecutorTest.java @@ -70,7 +70,7 @@ void assertGetColumns() { private ShardingSphereDatabase mockDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(result.getName()).thenReturn("db_1"); - RuleMetaData ruleMetaData = new RuleMetaData(Collections.singletonList(mockShardingRule())); + RuleMetaData ruleMetaData = new RuleMetaData(Collections.singleton(mockShardingRule())); when(result.getRuleMetaData()).thenReturn(ruleMetaData); return result; } @@ -79,12 +79,12 @@ private ShardingRule mockShardingRule() { Map tableRules = new LinkedHashMap<>(); tableRules.put("t_order_item", mock(TableRule.class)); tableRules.put("t_order", mock(TableRule.class)); - ShardingRuleConfiguration ruleConfiguration = new ShardingRuleConfiguration(); - ShardingTableReferenceRuleConfiguration shardingTableReferenceRuleConfiguration = new ShardingTableReferenceRuleConfiguration("refRule", "ref"); - ruleConfiguration.getBindingTableGroups().add(shardingTableReferenceRuleConfiguration); + ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration(); + ShardingTableReferenceRuleConfiguration shardingTableReferenceRuleConfig = new ShardingTableReferenceRuleConfiguration("refRule", "ref"); + ruleConfig.getBindingTableGroups().add(shardingTableReferenceRuleConfig); ShardingRule result = mock(ShardingRule.class); when(result.getTableRules()).thenReturn(tableRules); - when(result.getConfiguration()).thenReturn(ruleConfiguration); + when(result.getConfiguration()).thenReturn(ruleConfig); return result; } } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableReferenceRuleStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableReferenceRuleStatementUpdaterTest.java index b767914244643..da8e9c74203ae 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableReferenceRuleStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableReferenceRuleStatementUpdaterTest.java @@ -71,7 +71,7 @@ void assertUpdate() { } private AlterShardingTableReferenceRuleStatement createSQLStatement(final String name, final String reference) { - return new AlterShardingTableReferenceRuleStatement(Collections.singletonList(new TableReferenceRuleSegment(name, reference))); + return new AlterShardingTableReferenceRuleStatement(Collections.singleton(new TableReferenceRuleSegment(name, reference))); } private ShardingRuleConfiguration createCurrentRuleConfiguration() { diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableRuleStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableRuleStatementUpdaterTest.java index 299c9e1a21973..421ea0ead08cc 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableRuleStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/AlterShardingTableRuleStatementUpdaterTest.java @@ -159,7 +159,7 @@ private AutoTableRuleSegment createCompleteAutoTableRule(final String logicTable private TableRuleSegment createCompleteTableRule(final String logicTableName) { KeyGenerateStrategySegment keyGenerator = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); - TableRuleSegment result = new TableRuleSegment(logicTableName, Collections.singletonList("ds_${0..1}.t_order${0..1}"), keyGenerator, null); + TableRuleSegment result = new TableRuleSegment(logicTableName, Collections.singleton("ds_${0..1}.t_order${0..1}"), keyGenerator, null); result.setTableStrategySegment(new ShardingStrategySegment("standard", "product_id", new AlgorithmSegment("CORE.STANDARD.FIXTURE", new Properties()))); AlgorithmSegment databaseAlgorithmSegment = new AlgorithmSegment("inline", PropertiesBuilder.build(new Property("algorithm-expression", "ds_${user_id % 2}"))); result.setDatabaseStrategySegment(new ShardingStrategySegment("standard", "product_id", databaseAlgorithmSegment)); diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleStatementUpdaterTest.java index 75fba5fbcc535..3d064c295af74 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableReferenceRuleStatementUpdaterTest.java @@ -71,9 +71,9 @@ void assertUpdateWithIfNotExists() { updater.checkSQLStatement(database, sqlStatement, currentRuleConfig); ShardingRuleConfiguration toBeCreatedRuleConfig = updater.buildToBeCreatedRuleConfiguration(currentRuleConfig, sqlStatement); updater.updateCurrentRuleConfiguration(currentRuleConfig, toBeCreatedRuleConfig); - Collection referenceRuleConfigurations = currentRuleConfig.getBindingTableGroups(); - assertThat(referenceRuleConfigurations.size(), is(1)); - Iterator iterator = referenceRuleConfigurations.iterator(); + Collection referenceRuleConfigs = currentRuleConfig.getBindingTableGroups(); + assertThat(referenceRuleConfigs.size(), is(1)); + Iterator iterator = referenceRuleConfigs.iterator(); assertThat(iterator.next().getReference(), is("t_1,t_2")); } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableRuleStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableRuleStatementUpdaterTest.java index 5b88271097fb0..1a88ef4819ac2 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableRuleStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/CreateShardingTableRuleStatementUpdaterTest.java @@ -240,7 +240,7 @@ void assertUpdateWithIfNotExistsStatement() { } private AutoTableRuleSegment createCompleteAutoTableRule() { - AutoTableRuleSegment result = new AutoTableRuleSegment("t_order_item_input", Collections.singletonList("logic_ds")); + AutoTableRuleSegment result = new AutoTableRuleSegment("t_order_item_input", Collections.singleton("logic_ds")); result.setKeyGenerateStrategySegment(new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties()))); result.setShardingColumn("order_id"); result.setShardingAlgorithmSegment(new AlgorithmSegment("FOO.DISTSQL.FIXTURE", PropertiesBuilder.build(new Property("", "")))); @@ -249,7 +249,7 @@ private AutoTableRuleSegment createCompleteAutoTableRule() { private TableRuleSegment createCompleteTableRule() { KeyGenerateStrategySegment keyGenerator = new KeyGenerateStrategySegment("product_id", new AlgorithmSegment("DISTSQL.FIXTURE", new Properties())); - TableRuleSegment result = new TableRuleSegment("t_order_input", Collections.singletonList("ds_${0..1}.t_order_${0..1}"), keyGenerator, null); + TableRuleSegment result = new TableRuleSegment("t_order_input", Collections.singleton("ds_${0..1}.t_order_${0..1}"), keyGenerator, null); result.setTableStrategySegment(new ShardingStrategySegment("standard", "product_id", new AlgorithmSegment("CORE.STANDARD.FIXTURE", new Properties()))); AlgorithmSegment databaseAlgorithmSegment = new AlgorithmSegment("inline", PropertiesBuilder.build(new Property("algorithm-expression", "ds_${user_id % 2}"))); result.setDatabaseStrategySegment(new ShardingStrategySegment("standard", "product_id", databaseAlgorithmSegment)); @@ -305,10 +305,5 @@ public RuleConfiguration getConfiguration() { public Map> getDataSourceMapper() { return Collections.singletonMap("logic_ds", null); } - - @Override - public String getType() { - return "mock"; - } } } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingAuditorStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingAuditorStatementUpdaterTest.java index b496be82def50..d2e25849ab24f 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingAuditorStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingAuditorStatementUpdaterTest.java @@ -64,7 +64,7 @@ void assertExecuteWithNotExist() { @Test void assertExecuteWithNotExistWithIfExists() { - DropShardingAuditorStatement sqlStatement = new DropShardingAuditorStatement(true, Collections.singletonList("sharding_key_required_auditor")); + DropShardingAuditorStatement sqlStatement = new DropShardingAuditorStatement(true, Collections.singleton("sharding_key_required_auditor")); updater.checkSQLStatement(database, sqlStatement, new ShardingRuleConfiguration()); } @@ -86,7 +86,7 @@ void assertExecuteWithUsed() { private ShardingAutoTableRuleConfiguration createShardingAutoTableRuleConfiguration() { ShardingAutoTableRuleConfiguration result = new ShardingAutoTableRuleConfiguration("auto_table", null); - result.setAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singletonList("sharding_key_required_auditor"), true)); + result.setAuditStrategy(new ShardingAuditStrategyConfiguration(Collections.singleton("sharding_key_required_auditor"), true)); return result; } diff --git a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingKeyGeneratorStatementUpdaterTest.java b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingKeyGeneratorStatementUpdaterTest.java index 3dc84acb8024a..947b675620580 100644 --- a/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingKeyGeneratorStatementUpdaterTest.java +++ b/features/sharding/distsql/handler/src/test/java/org/apache/shardingsphere/sharding/distsql/update/DropShardingKeyGeneratorStatementUpdaterTest.java @@ -46,7 +46,7 @@ void assertExecuteWithNotExist() { @Test void assertExecuteWithNotExistWithIfExists() { - DropShardingKeyGeneratorStatement sqlStatement = new DropShardingKeyGeneratorStatement(true, Collections.singletonList("uuid_key_generator")); + DropShardingKeyGeneratorStatement sqlStatement = new DropShardingKeyGeneratorStatement(true, Collections.singleton("uuid_key_generator")); new DropShardingKeyGeneratorStatementUpdater().checkSQLStatement(mock(ShardingSphereDatabase.class), sqlStatement, new ShardingRuleConfiguration()); } diff --git a/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/BaseRule.g4 b/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/BaseRule.g4 index f1587ee566a7b..7d08bef733f8f 100644 --- a/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/BaseRule.g4 +++ b/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/BaseRule.g4 @@ -39,9 +39,6 @@ buildInShardingAlgorithmType | AUTO_INTERVAL | INLINE | INTERVAL - | COSID_MOD - | COSID_INTERVAL - | COSID_INTERVAL_SNOWFLAKE | COMPLEX_INLINE | HINT_INLINE | CLASS_BASED @@ -49,10 +46,7 @@ buildInShardingAlgorithmType buildInKeyGenerateAlgorithmType : SNOWFLAKE - | NANOID | UUID - | COSID - | COSID_SNOWFLAKE ; buildInShardingAuditAlgorithmType diff --git a/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/Keyword.g4 b/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/Keyword.g4 index cb6a0481498b5..1ede9e3a993ee 100644 --- a/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/Keyword.g4 +++ b/features/sharding/distsql/parser/src/main/antlr4/imports/sharding/Keyword.g4 @@ -215,10 +215,6 @@ MOD : M O D ; -COSID_MOD - : C O S I D UL_ M O D - ; - HASH_MOD : H A S H UL_ M O D ; @@ -243,14 +239,6 @@ INTERVAL : I N T E R V A L ; -COSID_INTERVAL - : C O S I D UL_ I N T E R V A L - ; - -COSID_INTERVAL_SNOWFLAKE - : C O S I D UL_ I N T E R V A L UL_ S N O W F L A K E - ; - COMPLEX_INLINE : C O M P L E X UL_ I N L I N E ; @@ -267,22 +255,10 @@ SNOWFLAKE : S N O W F L A K E ; -NANOID - : N A N O I D - ; - UUID : U U I D ; -COSID - : C O S I D - ; - -COSID_SNOWFLAKE - : C O S I D UL_ S N O W F L A K E - ; - STANDARD : S T A N D A R D ; diff --git a/features/sharding/distsql/parser/src/main/java/org/apache/shardingsphere/sharding/distsql/parser/core/ShardingDistSQLStatementVisitor.java b/features/sharding/distsql/parser/src/main/java/org/apache/shardingsphere/sharding/distsql/parser/core/ShardingDistSQLStatementVisitor.java index 30f2cd6b0cb56..22afc469d1026 100644 --- a/features/sharding/distsql/parser/src/main/java/org/apache/shardingsphere/sharding/distsql/parser/core/ShardingDistSQLStatementVisitor.java +++ b/features/sharding/distsql/parser/src/main/java/org/apache/shardingsphere/sharding/distsql/parser/core/ShardingDistSQLStatementVisitor.java @@ -273,8 +273,8 @@ public ASTNode visitShardingStrategy(final ShardingStrategyContext ctx) { if ("none".equalsIgnoreCase(strategyType)) { return new ShardingStrategySegment(strategyType, null, null); } - AlgorithmSegment algorithmSegment = null != ctx.shardingAlgorithm().algorithmDefinition() ? (AlgorithmSegment) visitAlgorithmDefinition(ctx.shardingAlgorithm().algorithmDefinition()) : null; - String shardingColumn = null != ctx.shardingColumnDefinition() ? buildShardingColumn(ctx.shardingColumnDefinition()) : null; + AlgorithmSegment algorithmSegment = null == ctx.shardingAlgorithm().algorithmDefinition() ? null : (AlgorithmSegment) visitAlgorithmDefinition(ctx.shardingAlgorithm().algorithmDefinition()); + String shardingColumn = null == ctx.shardingColumnDefinition() ? null : buildShardingColumn(ctx.shardingColumnDefinition()); return new ShardingStrategySegment(strategyType, shardingColumn, algorithmSegment); } diff --git a/features/sharding/plugin/cosid/pom.xml b/features/sharding/plugin/cosid/pom.xml deleted file mode 100644 index 79ba4467783ac..0000000000000 --- a/features/sharding/plugin/cosid/pom.xml +++ /dev/null @@ -1,53 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-sharding-plugin - 5.4.1-SNAPSHOT - - shardingsphere-sharding-cosid - ${project.artifactId} - - - 1.18.5 - - - - - org.apache.shardingsphere - shardingsphere-sharding-api - ${project.version} - - - - org.apache.shardingsphere - shardingsphere-test-util - ${project.version} - test - - - - me.ahoo.cosid - cosid-core - ${cosid.version} - - - diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/CosIdAlgorithmConstants.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/CosIdAlgorithmConstants.java deleted file mode 100644 index 4d8921f867479..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/CosIdAlgorithmConstants.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -/** - * CosId algorithm constants. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class CosIdAlgorithmConstants { - - public static final String TYPE_PREFIX = "COSID_"; - - public static final String ID_NAME_KEY = "id-name"; - - public static final String LOGIC_NAME_PREFIX_KEY = "logic-name-prefix"; -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithm.java deleted file mode 100644 index 3c7b59af57be3..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithm.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.keygen; - -import me.ahoo.cosid.CosId; -import me.ahoo.cosid.provider.IdGeneratorProvider; -import me.ahoo.cosid.provider.LazyIdGenerator; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; - -import java.util.Properties; - -/** - * CosId key generate algorithm. - */ -public final class CosIdKeyGenerateAlgorithm implements KeyGenerateAlgorithm { - - private static final String AS_STRING_KEY = "as-string"; - - private LazyIdGenerator lazyIdGenerator; - - private boolean asString; - - @Override - public void init(final Properties props) { - lazyIdGenerator = new LazyIdGenerator(props.getProperty(CosIdAlgorithmConstants.ID_NAME_KEY, IdGeneratorProvider.SHARE)); - asString = Boolean.parseBoolean(props.getProperty(AS_STRING_KEY, Boolean.FALSE.toString())); - lazyIdGenerator.tryGet(false); - } - - @Override - public Comparable generateKey() { - if (asString) { - return lazyIdGenerator.generateAsString(); - } - return lazyIdGenerator.generate(); - } - - @Override - public String getType() { - return CosId.COSID.toUpperCase(); - } -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithm.java deleted file mode 100644 index fe437e8473041..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithm.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.keygen; - -import me.ahoo.cosid.converter.Radix62IdConverter; -import me.ahoo.cosid.snowflake.ClockSyncSnowflakeId; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeId; -import me.ahoo.cosid.snowflake.SnowflakeId; -import me.ahoo.cosid.snowflake.StringSnowflakeId; -import org.apache.shardingsphere.infra.instance.InstanceContext; -import org.apache.shardingsphere.infra.instance.InstanceContextAware; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.exception.ShardingPluginException; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; - -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.util.Properties; - -/** - * CosId snowflake key generate algorithm. - */ -public final class CosIdSnowflakeKeyGenerateAlgorithm implements KeyGenerateAlgorithm, InstanceContextAware { - - public static final long DEFAULT_EPOCH; - - public static final String AS_STRING_KEY = "as-string"; - - public static final String EPOCH_KEY = "epoch"; - - private Properties props; - - private SnowflakeId snowflakeId; - - private boolean asString; - - private long epoch; - - static { - DEFAULT_EPOCH = LocalDateTime.of(2016, 11, 1, 0, 0, 0).toInstant(ZoneId.systemDefault().getRules().getOffset(Instant.now())).toEpochMilli(); - } - - @Override - public void init(final Properties props) { - this.props = props; - asString = getAsString(props); - epoch = getEpoch(props); - } - - private boolean getAsString(final Properties props) { - return Boolean.parseBoolean(props.getProperty(AS_STRING_KEY, Boolean.FALSE.toString())); - } - - private long getEpoch(final Properties props) { - long result = Long.parseLong(props.getProperty(EPOCH_KEY, String.valueOf(DEFAULT_EPOCH))); - ShardingSpherePreconditions.checkState(result > 0L, - () -> new ShardingPluginException("Key generate algorithm `%s` initialization failed, reason is: %s.", getType(), "Epoch must be positive.")); - return result; - } - - @Override - public void setInstanceContext(final InstanceContext instanceContext) { - int workerId = instanceContext.generateWorkerId(props); - MillisecondSnowflakeId millisecondSnowflakeId = - new MillisecondSnowflakeId(epoch, MillisecondSnowflakeId.DEFAULT_TIMESTAMP_BIT, MillisecondSnowflakeId.DEFAULT_MACHINE_BIT, MillisecondSnowflakeId.DEFAULT_SEQUENCE_BIT, workerId); - snowflakeId = new StringSnowflakeId(new ClockSyncSnowflakeId(millisecondSnowflakeId), Radix62IdConverter.PAD_START); - } - - @Override - public Comparable generateKey() { - if (asString) { - return getSnowflakeId().generateAsString(); - } - return getSnowflakeId().generate(); - } - - private SnowflakeId getSnowflakeId() { - ShardingSpherePreconditions.checkNotNull(snowflakeId, () -> new ShardingPluginException("Instance context not set yet.")); - return snowflakeId; - } - - @Override - public String getType() { - return CosIdAlgorithmConstants.TYPE_PREFIX + "SNOWFLAKE"; - } -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/AbstractCosIdIntervalShardingAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/AbstractCosIdIntervalShardingAlgorithm.java deleted file mode 100644 index 61f99ce9ca14a..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/AbstractCosIdIntervalShardingAlgorithm.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.sharding.interval; - -import com.google.common.collect.BoundType; -import com.google.common.collect.Range; -import me.ahoo.cosid.sharding.IntervalStep; -import me.ahoo.cosid.sharding.IntervalTimeline; -import me.ahoo.cosid.sharding.LocalDateTimeConvertor; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.exception.ShardingPluginException; - -import java.time.LocalDateTime; -import java.time.format.DateTimeFormatter; -import java.time.temporal.ChronoUnit; -import java.util.Collection; -import java.util.Properties; - -/** - * Abstract interval range sharding algorithm with CosId. - * - * @param type of sharding value - */ -public abstract class AbstractCosIdIntervalShardingAlgorithm> implements StandardShardingAlgorithm { - - public static final String DEFAULT_DATE_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; - - public static final DateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern(DEFAULT_DATE_TIME_PATTERN); - - public static final String DATE_TIME_LOWER_KEY = "datetime-lower"; - - public static final String DATE_TIME_UPPER_KEY = "datetime-upper"; - - public static final String SHARDING_SUFFIX_FORMAT_KEY = "sharding-suffix-pattern"; - - public static final String INTERVAL_UNIT_KEY = "datetime-interval-unit"; - - public static final String INTERVAL_AMOUNT_KEY = "datetime-interval-amount"; - - private IntervalTimeline intervalTimeline; - - private LocalDateTimeConvertor localDateTimeConvertor; - - @Override - public void init(final Properties props) { - intervalTimeline = createIntervalTimeline(props); - localDateTimeConvertor = createLocalDateTimeConvertor(props); - } - - private IntervalTimeline createIntervalTimeline(final Properties props) { - String logicNamePrefix = getRequiredValue(props, CosIdAlgorithmConstants.LOGIC_NAME_PREFIX_KEY); - LocalDateTime effectiveLower = LocalDateTime.parse(getRequiredValue(props, DATE_TIME_LOWER_KEY), DEFAULT_DATE_TIME_FORMATTER); - LocalDateTime effectiveUpper = LocalDateTime.parse(getRequiredValue(props, DATE_TIME_UPPER_KEY), DEFAULT_DATE_TIME_FORMATTER); - ChronoUnit stepUnit = ChronoUnit.valueOf(getRequiredValue(props, INTERVAL_UNIT_KEY)); - int stepAmount = Integer.parseInt(props.getOrDefault(INTERVAL_AMOUNT_KEY, 1).toString()); - DateTimeFormatter suffixFormatter = DateTimeFormatter.ofPattern(getRequiredValue(props, SHARDING_SUFFIX_FORMAT_KEY)); - return new IntervalTimeline(logicNamePrefix, Range.closed(effectiveLower, effectiveUpper), IntervalStep.of(stepUnit, stepAmount), suffixFormatter); - } - - private String getRequiredValue(final Properties props, final String key) { - ShardingSpherePreconditions.checkState(props.containsKey(key), () -> new ShardingPluginException("%s can not be null.", key)); - return props.getProperty(key); - } - - protected abstract LocalDateTimeConvertor createLocalDateTimeConvertor(Properties props); - - @Override - public String doSharding(final Collection availableTargetNames, final PreciseShardingValue shardingValue) { - return intervalTimeline.sharding(localDateTimeConvertor.toLocalDateTime(shardingValue.getValue())); - } - - @Override - public Collection doSharding(final Collection availableTargetNames, final RangeShardingValue shardingValue) { - return intervalTimeline.sharding(toLocalDateTimeRange(shardingValue.getValueRange())); - } - - @SuppressWarnings("unchecked") - private Range toLocalDateTimeRange(final Range shardingValue) { - if (Range.all().equals(shardingValue)) { - return Range.all(); - } - Object endpointValue = shardingValue.hasLowerBound() ? shardingValue.lowerEndpoint() : shardingValue.upperEndpoint(); - if (endpointValue instanceof LocalDateTime) { - return (Range) shardingValue; - } - if (shardingValue.hasLowerBound() && shardingValue.hasUpperBound()) { - LocalDateTime lower = localDateTimeConvertor.toLocalDateTime(shardingValue.lowerEndpoint()); - LocalDateTime upper = localDateTimeConvertor.toLocalDateTime(shardingValue.upperEndpoint()); - return Range.range(lower, shardingValue.lowerBoundType(), upper, shardingValue.upperBoundType()); - } - if (shardingValue.hasLowerBound()) { - LocalDateTime lower = localDateTimeConvertor.toLocalDateTime(shardingValue.lowerEndpoint()); - return BoundType.OPEN == shardingValue.lowerBoundType() ? Range.greaterThan(lower) : Range.atLeast(lower); - } - LocalDateTime upper = localDateTimeConvertor.toLocalDateTime(shardingValue.upperEndpoint()); - return BoundType.OPEN == shardingValue.upperBoundType() ? Range.lessThan(upper) : Range.atMost(upper); - } -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdIntervalShardingAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdIntervalShardingAlgorithm.java deleted file mode 100644 index c79b5a5524697..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdIntervalShardingAlgorithm.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.sharding.interval; - -import me.ahoo.cosid.sharding.LocalDateTimeConvertor; -import me.ahoo.cosid.sharding.StandardLocalDateTimeConvertor; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; - -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.util.Properties; - -/** - * Interval sharding algorithm with CosId. - */ -public final class CosIdIntervalShardingAlgorithm extends AbstractCosIdIntervalShardingAlgorithm> { - - private static final String ZONE_ID_KEY = "zone-id"; - - private static final String DATE_TIME_PATTERN_KEY = "datetime-pattern"; - - private static final String TIMESTAMP_SECOND_UNIT = "SECOND"; - - private static final String TIMESTAMP_UNIT_KEY = "ts-unit"; - - @Override - protected LocalDateTimeConvertor createLocalDateTimeConvertor(final Properties props) { - ZoneId zoneId = props.containsKey(ZONE_ID_KEY) ? ZoneId.of(props.getProperty(ZONE_ID_KEY)) : ZoneId.systemDefault(); - boolean isSecondTs = props.containsKey(TIMESTAMP_UNIT_KEY) && TIMESTAMP_SECOND_UNIT.equalsIgnoreCase(props.getProperty(TIMESTAMP_UNIT_KEY)); - DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(props.getProperty(DATE_TIME_PATTERN_KEY, DEFAULT_DATE_TIME_PATTERN)); - return new StandardLocalDateTimeConvertor(zoneId, isSecondTs, dateTimeFormatter); - } - - @Override - public String getType() { - return CosIdAlgorithmConstants.TYPE_PREFIX + "INTERVAL"; - } -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdSnowflakeIntervalShardingAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdSnowflakeIntervalShardingAlgorithm.java deleted file mode 100644 index 966b610972f95..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/interval/CosIdSnowflakeIntervalShardingAlgorithm.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.sharding.interval; - -import me.ahoo.cosid.sharding.LocalDateTimeConvertor; -import me.ahoo.cosid.sharding.SnowflakeLocalDateTimeConvertor; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeId; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeIdStateParser; -import me.ahoo.cosid.snowflake.SnowflakeIdStateParser; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.cosid.algorithm.keygen.CosIdSnowflakeKeyGenerateAlgorithm; - -import java.time.ZoneId; -import java.util.Properties; - -/** - * Snowflake interval sharding algorithm with CosId. - */ -public final class CosIdSnowflakeIntervalShardingAlgorithm extends AbstractCosIdIntervalShardingAlgorithm> { - - private static final String EPOCH_KEY = "epoch"; - - private static final String ZONE_ID_KEY = "zone-id"; - - @Override - protected LocalDateTimeConvertor createLocalDateTimeConvertor(final Properties props) { - return new SnowflakeLocalDateTimeConvertor(createSnowflakeIdStateParser(props)); - } - - private SnowflakeIdStateParser createSnowflakeIdStateParser(final Properties props) { - long epoch = Long.parseLong(props.getProperty(EPOCH_KEY, String.valueOf(CosIdSnowflakeKeyGenerateAlgorithm.DEFAULT_EPOCH))); - ZoneId zoneId = props.containsKey(ZONE_ID_KEY) ? ZoneId.of(props.getProperty(ZONE_ID_KEY)) : ZoneId.systemDefault(); - return new MillisecondSnowflakeIdStateParser( - epoch, MillisecondSnowflakeId.DEFAULT_TIMESTAMP_BIT, MillisecondSnowflakeId.DEFAULT_MACHINE_BIT, MillisecondSnowflakeId.DEFAULT_SEQUENCE_BIT, zoneId); - } - - @Override - public String getType() { - return CosIdAlgorithmConstants.TYPE_PREFIX + "INTERVAL_SNOWFLAKE"; - } -} diff --git a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/mod/CosIdModShardingAlgorithm.java b/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/mod/CosIdModShardingAlgorithm.java deleted file mode 100644 index 6167b0880fde6..0000000000000 --- a/features/sharding/plugin/cosid/src/main/java/org/apache/shardingsphere/sharding/cosid/algorithm/sharding/mod/CosIdModShardingAlgorithm.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.sharding.mod; - -import me.ahoo.cosid.sharding.ModCycle; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; -import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.exception.ShardingPluginException; - -import java.util.Collection; -import java.util.Properties; - -/** - * Modular sharding algorithm with CosId. - * - * @param type of sharding value - */ -public final class CosIdModShardingAlgorithm> implements StandardShardingAlgorithm { - - private static final String MODULO_KEY = "mod"; - - private ModCycle modCycle; - - @Override - public void init(final Properties props) { - String divisorStr = getRequiredValue(props, MODULO_KEY); - int divisor = Integer.parseInt(divisorStr); - String logicNamePrefix = getRequiredValue(props, CosIdAlgorithmConstants.LOGIC_NAME_PREFIX_KEY); - modCycle = new ModCycle<>(divisor, logicNamePrefix); - } - - private String getRequiredValue(final Properties props, final String key) { - ShardingSpherePreconditions.checkState(props.containsKey(key), () -> new ShardingPluginException("%s can not be null.", key)); - return props.getProperty(key); - } - - @Override - public String doSharding(final Collection availableTargetNames, final PreciseShardingValue shardingValue) { - return modCycle.sharding(shardingValue.getValue()); - } - - @Override - public Collection doSharding(final Collection availableTargetNames, final RangeShardingValue shardingValue) { - return modCycle.sharding(shardingValue.getValueRange()); - } - - @Override - public String getType() { - return CosIdAlgorithmConstants.TYPE_PREFIX + "MOD"; - } -} diff --git a/features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm b/features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm deleted file mode 100644 index 2198fe21a3300..0000000000000 --- a/features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.ShardingAlgorithm +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.sharding.cosid.algorithm.sharding.mod.CosIdModShardingAlgorithm -org.apache.shardingsphere.sharding.cosid.algorithm.sharding.interval.CosIdIntervalShardingAlgorithm -org.apache.shardingsphere.sharding.cosid.algorithm.sharding.interval.CosIdSnowflakeIntervalShardingAlgorithm diff --git a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithmTest.java b/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithmTest.java deleted file mode 100644 index 13682616b3ea0..0000000000000 --- a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdKeyGenerateAlgorithmTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.keygen; - -import me.ahoo.cosid.IdGenerator; -import me.ahoo.cosid.StringIdGeneratorDecorator; -import me.ahoo.cosid.converter.PrefixIdConverter; -import me.ahoo.cosid.converter.Radix62IdConverter; -import me.ahoo.cosid.provider.DefaultIdGeneratorProvider; -import me.ahoo.cosid.provider.NotFoundIdGeneratorException; -import me.ahoo.cosid.segment.DefaultSegmentId; -import me.ahoo.cosid.segment.IdSegmentDistributor; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeId; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.sharding.cosid.algorithm.CosIdAlgorithmConstants; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.Test; - -import java.util.Properties; - -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class CosIdKeyGenerateAlgorithmTest { - - @Test - void assertGenerateKey() { - String idName = "test-cosid"; - DefaultSegmentId defaultSegmentId = new DefaultSegmentId(new IdSegmentDistributor.Mock()); - DefaultIdGeneratorProvider.INSTANCE.set(idName, defaultSegmentId); - KeyGenerateAlgorithm algorithm = TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID", PropertiesBuilder.build(new Property(CosIdAlgorithmConstants.ID_NAME_KEY, idName))); - assertThat(algorithm.generateKey(), is(1L)); - assertThat(algorithm.generateKey(), is(2L)); - } - - @Test - void assertGenerateKeyWhenNotSetIdName() { - DefaultSegmentId defaultSegmentId = new DefaultSegmentId(new IdSegmentDistributor.Mock()); - DefaultIdGeneratorProvider.INSTANCE.setShare(defaultSegmentId); - KeyGenerateAlgorithm algorithm = TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID"); - assertThat(algorithm.generateKey(), is(1L)); - assertThat(algorithm.generateKey(), is(2L)); - } - - @Test - void assertGenerateKeyWhenIdProviderIsEmpty() { - DefaultIdGeneratorProvider.INSTANCE.clear(); - assertThrows(NotFoundIdGeneratorException.class, () -> TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID").generateKey()); - } - - @Test - void assertGenerateKeyAsString() { - String idName = "test-cosid-as-string"; - String prefix = "test_"; - IdGenerator idGeneratorDecorator = new StringIdGeneratorDecorator(new MillisecondSnowflakeId(1, 0), new PrefixIdConverter(prefix, Radix62IdConverter.INSTANCE)); - DefaultIdGeneratorProvider.INSTANCE.set(idName, idGeneratorDecorator); - Properties props = PropertiesBuilder.build(new Property(CosIdAlgorithmConstants.ID_NAME_KEY, idName), new Property("as-string", Boolean.TRUE.toString())); - KeyGenerateAlgorithm algorithm = TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID", props); - Comparable actual = algorithm.generateKey(); - assertThat(actual, instanceOf(String.class)); - assertThat(actual.toString(), startsWith(prefix)); - assertThat(actual.toString().length(), lessThanOrEqualTo(16)); - } -} diff --git a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithmTest.java b/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithmTest.java deleted file mode 100644 index 4c1b9abace519..0000000000000 --- a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/CosIdSnowflakeKeyGenerateAlgorithmTest.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.keygen; - -import me.ahoo.cosid.converter.Radix62IdConverter; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeId; -import me.ahoo.cosid.snowflake.MillisecondSnowflakeIdStateParser; -import me.ahoo.cosid.snowflake.SnowflakeIdState; -import me.ahoo.cosid.snowflake.SnowflakeIdStateParser; -import org.apache.shardingsphere.infra.config.mode.ModeConfiguration; -import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; -import org.apache.shardingsphere.infra.instance.InstanceContext; -import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; -import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; -import org.apache.shardingsphere.infra.lock.LockContext; -import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.sharding.cosid.algorithm.keygen.fixture.WorkerIdGeneratorFixture; -import org.apache.shardingsphere.sharding.exception.ShardingPluginException; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.Test; - -import java.util.Properties; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.locks.LockSupport; - -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.closeTo; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; - -class CosIdSnowflakeKeyGenerateAlgorithmTest { - - private static final int FIXTURE_WORKER_ID = 0; - - private final SnowflakeIdStateParser snowflakeIdStateParser = new MillisecondSnowflakeIdStateParser( - CosIdSnowflakeKeyGenerateAlgorithm.DEFAULT_EPOCH, - MillisecondSnowflakeId.DEFAULT_TIMESTAMP_BIT, - MillisecondSnowflakeId.DEFAULT_MACHINE_BIT, - MillisecondSnowflakeId.DEFAULT_SEQUENCE_BIT); - - private final EventBusContext eventBusContext = new EventBusContext(); - - @Test - void assertGenerateKey() { - CosIdSnowflakeKeyGenerateAlgorithm algorithm = (CosIdSnowflakeKeyGenerateAlgorithm) TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE"); - algorithm.setInstanceContext(new InstanceContext(new ComputeNodeInstance(mock(InstanceMetaData.class)), new WorkerIdGeneratorFixture(FIXTURE_WORKER_ID), - new ModeConfiguration("Standalone", null), mock(ModeContextManager.class), mock(LockContext.class), eventBusContext)); - long firstActualKey = (Long) algorithm.generateKey(); - long secondActualKey = (Long) algorithm.generateKey(); - SnowflakeIdState firstActualState = snowflakeIdStateParser.parse(firstActualKey); - SnowflakeIdState secondActualState = snowflakeIdStateParser.parse(secondActualKey); - assertThat(firstActualState.getMachineId(), is(FIXTURE_WORKER_ID)); - assertThat(firstActualState.getSequence(), is(1L)); - assertThat(secondActualState.getMachineId(), is(FIXTURE_WORKER_ID)); - long expectedSecondSequence = 2L; - assertThat(secondActualState.getSequence(), is(expectedSecondSequence)); - } - - @Test - void assertGenerateKeyModUniformity() { - CosIdSnowflakeKeyGenerateAlgorithm algorithm = (CosIdSnowflakeKeyGenerateAlgorithm) TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE"); - algorithm.setInstanceContext(new InstanceContext(new ComputeNodeInstance(mock(InstanceMetaData.class)), new WorkerIdGeneratorFixture(FIXTURE_WORKER_ID), - new ModeConfiguration("Standalone", null), mock(ModeContextManager.class), mock(LockContext.class), eventBusContext)); - int divisor = 4; - int total = 99999; - int avg = total / divisor; - double tolerance = avg * .0015; - int mod0Counter = 0; - int mod1Counter = 0; - int mod2Counter = 0; - int mod3Counter = 0; - for (int i = 0; i < total; i++) { - long id = (Long) algorithm.generateKey(); - int mod = (int) (id % divisor); - switch (mod) { - case 0: - mod0Counter++; - break; - case 1: - mod1Counter++; - break; - case 2: - mod2Counter++; - break; - case 3: - mod3Counter++; - break; - default: - throw new IllegalStateException("Unexpected value: " + mod); - } - int wait = ThreadLocalRandom.current().nextInt(10, 1000); - LockSupport.parkNanos(wait); - } - assertThat((double) mod0Counter, closeTo(avg, tolerance)); - assertThat((double) mod1Counter, closeTo(avg, tolerance)); - assertThat((double) mod2Counter, closeTo(avg, tolerance)); - assertThat((double) mod3Counter, closeTo(avg, tolerance)); - } - - @Test - void assertGenerateKeyAsString() { - Properties props = PropertiesBuilder.build(new Property(CosIdSnowflakeKeyGenerateAlgorithm.AS_STRING_KEY, Boolean.TRUE.toString())); - CosIdSnowflakeKeyGenerateAlgorithm algorithm = (CosIdSnowflakeKeyGenerateAlgorithm) TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE", props); - algorithm.setInstanceContext(new InstanceContext(new ComputeNodeInstance(mock(InstanceMetaData.class)), - new WorkerIdGeneratorFixture(FIXTURE_WORKER_ID), new ModeConfiguration("Standalone", null), - mock(ModeContextManager.class), mock(LockContext.class), eventBusContext)); - Comparable actualKey = algorithm.generateKey(); - assertThat(actualKey, instanceOf(String.class)); - String actualStringKey = (String) actualKey; - assertThat(actualStringKey.length(), is(Radix62IdConverter.MAX_CHAR_SIZE)); - long actualLongKey = Radix62IdConverter.PAD_START.asLong(actualStringKey); - SnowflakeIdState actualState = snowflakeIdStateParser.parse(actualLongKey); - assertThat(actualState.getMachineId(), is(FIXTURE_WORKER_ID)); - assertThat(actualState.getSequence(), is(1L)); - } - - @Test - void assertGenerateKeyWhenNoneInstanceContext() { - assertThrows(ShardingPluginException.class, () -> TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE").generateKey()); - } - - @Test - void assertGenerateKeyWhenNegative() { - CosIdSnowflakeKeyGenerateAlgorithm algorithm = (CosIdSnowflakeKeyGenerateAlgorithm) TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE"); - assertThrows(IllegalArgumentException.class, () -> algorithm.setInstanceContext(new InstanceContext(new ComputeNodeInstance(mock(InstanceMetaData.class)), new WorkerIdGeneratorFixture(-1), - new ModeConfiguration("Standalone", null), mock(ModeContextManager.class), mock(LockContext.class), eventBusContext))); - } - - @Test - void assertGenerateKeyWhenGreaterThen1023() { - CosIdSnowflakeKeyGenerateAlgorithm algorithm = (CosIdSnowflakeKeyGenerateAlgorithm) TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE"); - assertThrows(IllegalArgumentException.class, () -> algorithm.setInstanceContext(new InstanceContext(new ComputeNodeInstance(mock(InstanceMetaData.class)), new WorkerIdGeneratorFixture(1024), - new ModeConfiguration("Standalone", null), mock(ModeContextManager.class), mock(LockContext.class), eventBusContext))); - } - - @Test - void assertEpochWhenOutOfRange() { - assertThrows(ShardingPluginException.class, () -> TypedSPILoader.getService(KeyGenerateAlgorithm.class, "COSID_SNOWFLAKE", PropertiesBuilder.build(new Property("epoch", "0"))).generateKey()); - } -} diff --git a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java b/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java deleted file mode 100644 index 1587f46ef25cf..0000000000000 --- a/features/sharding/plugin/cosid/src/test/java/org/apache/shardingsphere/sharding/cosid/algorithm/keygen/fixture/WorkerIdGeneratorFixture.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.sharding.cosid.algorithm.keygen.fixture; - -import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.instance.workerid.WorkerIdGenerator; - -import java.util.Properties; - -@RequiredArgsConstructor -public final class WorkerIdGeneratorFixture implements WorkerIdGenerator { - - private final int fixtureWorkerId; - - @Override - public int generate(final Properties props) { - return fixtureWorkerId; - } -} diff --git a/features/sharding/plugin/pom.xml b/features/sharding/plugin/pom.xml deleted file mode 100644 index 389fc2aff991f..0000000000000 --- a/features/sharding/plugin/pom.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-sharding - 5.4.1-SNAPSHOT - - shardingsphere-sharding-plugin - pom - ${project.artifactId} - - - cosid - nanoid - - diff --git a/features/sharding/pom.xml b/features/sharding/pom.xml index 49e7bacacbb76..4dad19fcd5f2c 100644 --- a/features/sharding/pom.xml +++ b/features/sharding/pom.xml @@ -31,6 +31,5 @@ api core distsql - plugin diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/insert/keygen/engine/GeneratedKeyContextEngine.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/insert/keygen/engine/GeneratedKeyContextEngine.java index edcb1764881cf..fa7921f4cdc29 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/insert/keygen/engine/GeneratedKeyContextEngine.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/insert/keygen/engine/GeneratedKeyContextEngine.java @@ -52,7 +52,7 @@ public final class GeneratedKeyContextEngine { * @return generate key context */ public Optional createGenerateKeyContext(final List insertColumnNames, final List> valueExpressions, final List params) { - String tableName = insertStatement.getTable().getTableName().getIdentifier().getValue(); + String tableName = Optional.ofNullable(insertStatement.getTable()).map(optional -> optional.getTableName().getIdentifier().getValue()).orElse(""); return findGenerateKeyColumn(tableName).map(optional -> containsGenerateKey(insertColumnNames, optional) ? findGeneratedKey(insertColumnNames, valueExpressions, params, optional) : new GeneratedKeyContext(optional, true)); diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/orderby/engine/OrderByContextEngine.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/orderby/engine/OrderByContextEngine.java index 5bfb1e9ba6517..2d64240ab4ee2 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/orderby/engine/OrderByContextEngine.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/orderby/engine/OrderByContextEngine.java @@ -48,7 +48,7 @@ public final class OrderByContextEngine { public OrderByContext createOrderBy(final SelectStatement selectStatement, final GroupByContext groupByContext) { if (!selectStatement.getOrderBy().isPresent() || selectStatement.getOrderBy().get().getOrderByItems().isEmpty()) { OrderByContext orderByItems = createOrderByContextForDistinctRowWithoutGroupBy(selectStatement, groupByContext); - return null != orderByItems ? orderByItems : new OrderByContext(groupByContext.getItems(), !groupByContext.getItems().isEmpty()); + return null == orderByItems ? new OrderByContext(groupByContext.getItems(), !groupByContext.getItems().isEmpty()) : orderByItems; } List orderByItems = new LinkedList<>(); for (OrderByItemSegment each : selectStatement.getOrderBy().get().getOrderByItems()) { diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/engine/ProjectionEngine.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/engine/ProjectionEngine.java index 472f7f60ba420..20460a0522935 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/engine/ProjectionEngine.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/engine/ProjectionEngine.java @@ -111,8 +111,8 @@ private ColumnProjection createProjection(final ColumnProjectionSegment projecti IdentifierValue owner = projectionSegment.getColumn().getOwner().isPresent() ? projectionSegment.getColumn().getOwner().get().getIdentifier() : null; IdentifierValue alias = projectionSegment.getAliasName().isPresent() ? projectionSegment.getAlias().orElse(null) : null; ColumnProjection result = new ColumnProjection(owner, projectionSegment.getColumn().getIdentifier(), alias, databaseType); - result.setOriginalColumn(projectionSegment.getColumn().getOriginalColumn()); - result.setOriginalTable(projectionSegment.getColumn().getOriginalTable()); + result.setOriginalColumn(projectionSegment.getColumn().getColumnBoundedInfo().getOriginalColumn()); + result.setOriginalTable(projectionSegment.getColumn().getColumnBoundedInfo().getOriginalTable()); return result; } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/impl/ColumnProjection.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/impl/ColumnProjection.java index e6118e17afa07..9d8c74ff031f4 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/impl/ColumnProjection.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/select/projection/impl/ColumnProjection.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.infra.binder.context.segment.select.projection.impl; +import com.google.common.base.Strings; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -93,7 +94,7 @@ public Optional getOwner() { * @return original table */ public IdentifierValue getOriginalTable() { - if (null == originalTable) { + if (null == originalTable || Strings.isNullOrEmpty(originalTable.getValue())) { return null == owner ? new IdentifierValue("") : owner; } return originalTable; @@ -101,10 +102,10 @@ public IdentifierValue getOriginalTable() { /** * Get original column. - * + * * @return original column */ public IdentifierValue getOriginalColumn() { - return null == originalColumn ? name : originalColumn; + return null == originalColumn || Strings.isNullOrEmpty(originalColumn.getValue()) ? name : originalColumn; } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/table/TablesContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/table/TablesContext.java index d47750d7fdbca..09836ebed261d 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/table/TablesContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/segment/table/TablesContext.java @@ -32,6 +32,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Collection; import java.util.Collections; @@ -63,6 +64,8 @@ public final class TablesContext { private final Map> subqueryTables = new HashMap<>(); + private final Map tableNameAliasMap = new HashMap<>(); + public TablesContext(final SimpleTableSegment tableSegment, final DatabaseType databaseType) { this(Collections.singletonList(tableSegment), databaseType); } @@ -83,6 +86,7 @@ public TablesContext(final Collection tableSegments, fin tableNames.add(simpleTableSegment.getTableName().getIdentifier().getValue()); simpleTableSegment.getOwner().ifPresent(optional -> schemaNames.add(optional.getIdentifier().getValue())); findDatabaseName(simpleTableSegment, databaseType).ifPresent(databaseNames::add); + tableNameAliasMap.put(simpleTableSegment.getTableName().getIdentifier().getValue().toLowerCase(), each.getAlias().orElse(simpleTableSegment.getTableName().getIdentifier())); } if (each instanceof SubqueryTableSegment) { subqueryTables.putAll(createSubqueryTables(subqueryContexts, (SubqueryTableSegment) each)); diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactory.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactory.java index 95dde06e49fce..22bf72d0906ab 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactory.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactory.java @@ -59,6 +59,7 @@ import org.apache.shardingsphere.infra.binder.context.statement.dal.ShowIndexStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dal.ShowTableStatusStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dal.ShowTablesStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.dml.MergeStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dcl.DenyUserStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -109,6 +110,7 @@ import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLLoadDataStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLLoadXMLStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.ddl.OpenGaussCursorStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleMergeStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.sqlserver.dcl.SQLServerDenyUserStatement; import java.util.List; @@ -173,6 +175,9 @@ private static SQLStatementContext getDMLStatementContext(final ShardingSphereMe if (sqlStatement instanceof MySQLLoadXMLStatement) { return new LoadXMLStatementContext((MySQLLoadXMLStatement) sqlStatement); } + if (sqlStatement instanceof OracleMergeStatement) { + return new MergeStatementContext((OracleMergeStatement) sqlStatement); + } throw new UnsupportedSQLOperationException(String.format("Unsupported SQL statement `%s`", sqlStatement.getClass().getSimpleName())); } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CloseStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CloseStatementContext.java index 56d8bf820bbce..03a6012dc0a4f 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CloseStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CloseStatementContext.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.cursor.CursorNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CloseStatement; @@ -68,11 +69,16 @@ public void setUpCursorDefinition(final CursorStatementContext cursorStatementCo @Override public Collection getWhereSegments() { - return null != cursorStatementContext ? cursorStatementContext.getWhereSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getWhereSegments(); } @Override public Collection getColumnSegments() { - return null != cursorStatementContext ? cursorStatementContext.getColumnSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getColumnSegments(); + } + + @Override + public Collection getJoinConditions() { + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getJoinConditions(); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CursorStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CursorStatementContext.java index eaeb280b87935..f27ddd2f6aad2 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CursorStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/CursorStatementContext.java @@ -29,11 +29,9 @@ import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.cursor.CursorNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; -import org.apache.shardingsphere.sql.parser.sql.common.util.ColumnExtractor; -import org.apache.shardingsphere.sql.parser.sql.common.util.WhereExtractUtils; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.ddl.OpenGaussCursorStatement; import java.util.Collection; @@ -51,6 +49,8 @@ public final class CursorStatementContext extends CommonSQLStatementContext impl private final Collection columnSegments = new LinkedList<>(); + private final Collection joinConditions = new LinkedList<>(); + private final TablesContext tablesContext; private final SelectStatementContext selectStatementContext; @@ -59,9 +59,10 @@ public CursorStatementContext(final ShardingSphereMetaData metaData, final List< final OpenGaussCursorStatement sqlStatement, final String defaultDatabaseName) { super(sqlStatement); tablesContext = new TablesContext(getSimpleTableSegments(), getDatabaseType()); - extractWhereSegments(whereSegments, sqlStatement.getSelect()); - ColumnExtractor.extractColumnSegments(columnSegments, whereSegments); selectStatementContext = new SelectStatementContext(metaData, params, sqlStatement.getSelect(), defaultDatabaseName); + whereSegments.addAll(selectStatementContext.getWhereSegments()); + columnSegments.addAll(selectStatementContext.getColumnSegments()); + joinConditions.addAll(selectStatementContext.getJoinConditions()); } private Collection getSimpleTableSegments() { @@ -70,12 +71,6 @@ private Collection getSimpleTableSegments() { return tableExtractor.getRewriteTables(); } - private void extractWhereSegments(final Collection whereSegments, final SelectStatement select) { - select.getWhere().ifPresent(whereSegments::add); - whereSegments.addAll(WhereExtractUtils.getSubqueryWhereSegments(select)); - whereSegments.addAll(WhereExtractUtils.getJoinWhereSegments(select)); - } - @Override public OpenGaussCursorStatement getSqlStatement() { return (OpenGaussCursorStatement) super.getSqlStatement(); @@ -100,4 +95,9 @@ public Collection getWhereSegments() { public Collection getColumnSegments() { return columnSegments; } + + @Override + public Collection getJoinConditions() { + return joinConditions; + } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/FetchStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/FetchStatementContext.java index f548ed5a6715b..47925e7036b12 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/FetchStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/FetchStatementContext.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.cursor.CursorNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.FetchStatement; @@ -68,11 +69,16 @@ public void setUpCursorDefinition(final CursorStatementContext cursorStatementCo @Override public Collection getWhereSegments() { - return null != cursorStatementContext ? cursorStatementContext.getWhereSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getWhereSegments(); } @Override public Collection getColumnSegments() { - return null != cursorStatementContext ? cursorStatementContext.getColumnSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getColumnSegments(); + } + + @Override + public Collection getJoinConditions() { + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getJoinConditions(); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/MoveStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/MoveStatementContext.java index 83fe0e53e67a5..be8c35b49c1be 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/MoveStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/MoveStatementContext.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.cursor.CursorNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.MoveStatement; @@ -68,11 +69,16 @@ public void setUpCursorDefinition(final CursorStatementContext cursorStatementCo @Override public Collection getWhereSegments() { - return null != cursorStatementContext ? cursorStatementContext.getWhereSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getWhereSegments(); } @Override public Collection getColumnSegments() { - return null != cursorStatementContext ? cursorStatementContext.getColumnSegments() : Collections.emptyList(); + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getColumnSegments(); + } + + @Override + public Collection getJoinConditions() { + return null == cursorStatementContext ? Collections.emptyList() : cursorStatementContext.getJoinConditions(); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/DeleteStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/DeleteStatementContext.java index 561495e7e97a6..3fb5868ec3e0c 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/DeleteStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/DeleteStatementContext.java @@ -24,10 +24,12 @@ import org.apache.shardingsphere.infra.binder.context.type.WhereAvailable; import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; import org.apache.shardingsphere.sql.parser.sql.common.util.ColumnExtractor; +import org.apache.shardingsphere.sql.parser.sql.common.util.ExpressionExtractUtils; import java.util.Collection; import java.util.HashMap; @@ -46,11 +48,14 @@ public final class DeleteStatementContext extends CommonSQLStatementContext impl private final Collection columnSegments = new LinkedList<>(); + private final Collection joinConditions = new LinkedList<>(); + public DeleteStatementContext(final DeleteStatement sqlStatement) { super(sqlStatement); tablesContext = new TablesContext(getAllSimpleTableSegments(), getDatabaseType()); getSqlStatement().getWhere().ifPresent(whereSegments::add); ColumnExtractor.extractColumnSegments(columnSegments, whereSegments); + ExpressionExtractUtils.extractJoinConditions(joinConditions, whereSegments); } private Collection getAllSimpleTableSegments() { @@ -98,4 +103,9 @@ public Collection getWhereSegments() { public Collection getColumnSegments() { return columnSegments; } + + @Override + public Collection getJoinConditions() { + return joinConditions; + } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContext.java index 56144c459bbae..2099a80a8f22d 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContext.java @@ -18,8 +18,6 @@ package org.apache.shardingsphere.infra.binder.context.statement.dml; import lombok.Getter; -import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.NoDatabaseSelectedException; -import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; import org.apache.shardingsphere.infra.binder.context.aware.ParameterAware; import org.apache.shardingsphere.infra.binder.context.segment.insert.keygen.GeneratedKeyContext; import org.apache.shardingsphere.infra.binder.context.segment.insert.keygen.engine.GeneratedKeyContextEngine; @@ -30,10 +28,13 @@ import org.apache.shardingsphere.infra.binder.context.statement.CommonSQLStatementContext; import org.apache.shardingsphere.infra.binder.context.type.TableAvailable; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.NoDatabaseSelectedException; +import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; @@ -93,7 +94,8 @@ public InsertStatementContext(final ShardingSphereMetaData metaData, final List< onDuplicateKeyUpdateValueContext = getOnDuplicateKeyUpdateValueContext(params, parametersOffset).orElse(null); tablesContext = new TablesContext(getAllSimpleTableSegments(), getDatabaseType()); ShardingSphereSchema schema = getSchema(metaData, defaultDatabaseName); - columnNames = containsInsertColumns() ? insertColumnNames : schema.getVisibleColumnNames(sqlStatement.getTable().getTableName().getIdentifier().getValue()); + columnNames = containsInsertColumns() ? insertColumnNames + : Optional.ofNullable(sqlStatement.getTable()).map(optional -> schema.getVisibleColumnNames(optional.getTableName().getIdentifier().getValue())).orElseGet(Collections::emptyList); generatedKeyContext = new GeneratedKeyContextEngine(sqlStatement, schema).createGenerateKeyContext(insertColumnNames, getAllValueExpressions(sqlStatement), params).orElse(null); } @@ -129,6 +131,7 @@ private Optional getInsertSelectContext(final ShardingSpher } SubquerySegment insertSelectSegment = getSqlStatement().getInsertSelect().get(); SelectStatementContext selectStatementContext = new SelectStatementContext(metaData, params, insertSelectSegment.getSelect(), defaultDatabaseName); + selectStatementContext.setSubqueryType(SubqueryType.INSERT_SELECT_SUBQUERY); InsertSelectContext insertSelectContext = new InsertSelectContext(selectStatementContext, params, paramsOffset.get()); paramsOffset.addAndGet(insertSelectContext.getParameterCount()); return Optional.of(insertSelectContext); @@ -164,7 +167,7 @@ public List> getGroupedParameters() { for (InsertValueContext each : insertValueContexts) { result.add(each.getParameters()); } - if (null != insertSelectContext) { + if (null != insertSelectContext && !insertSelectContext.getParameters().isEmpty()) { result.add(insertSelectContext.getParameters()); } return result; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/MergeStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/MergeStatementContext.java new file mode 100644 index 0000000000000..40ab28d71d29d --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/MergeStatementContext.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.context.statement.dml; + +import lombok.Getter; +import org.apache.shardingsphere.infra.binder.context.statement.CommonSQLStatementContext; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleMergeStatement; + +/** + * Load xml statement context. + */ +@Getter +public final class MergeStatementContext extends CommonSQLStatementContext { + + public MergeStatementContext(final OracleMergeStatement sqlStatement) { + super(sqlStatement); + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContext.java index f634f5e4c4717..b06a78ad1e1b7 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContext.java @@ -50,6 +50,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; @@ -101,6 +102,8 @@ public final class SelectStatementContext extends CommonSQLStatementContext impl private final Collection columnSegments = new LinkedList<>(); + private final Collection joinConditions = new LinkedList<>(); + private final boolean containsEnhancedTable; private SubqueryType subqueryType; @@ -113,6 +116,7 @@ public SelectStatementContext(final ShardingSphereMetaData metaData, final List< super(sqlStatement); extractWhereSegments(whereSegments, sqlStatement); ColumnExtractor.extractColumnSegments(columnSegments, whereSegments); + ExpressionExtractUtils.extractJoinConditions(joinConditions, whereSegments); subqueryContexts = createSubqueryContexts(metaData, params, defaultDatabaseName); tablesContext = new TablesContext(getAllTableSegments(), subqueryContexts, getDatabaseType()); groupByContext = new GroupByContextEngine().createGroupByContext(sqlStatement); @@ -356,6 +360,11 @@ public Collection getColumnSegments() { return columnSegments; } + @Override + public Collection getJoinConditions() { + return joinConditions; + } + private void extractWhereSegments(final Collection whereSegments, final SelectStatement selectStatement) { selectStatement.getWhere().ifPresent(whereSegments::add); whereSegments.addAll(WhereExtractUtils.getSubqueryWhereSegments(selectStatement)); @@ -374,6 +383,15 @@ private Collection getAllTableSegments() { return result; } + /** + * Judge whether sql statement contains table subquery segment or not. + * + * @return whether sql statement contains table subquery segment or not + */ + public boolean containsTableSubquery() { + return getSqlStatement().getFrom() instanceof SubqueryTableSegment; + } + @Override public void setUpParameters(final List params) { paginationContext = new PaginationContextEngine().createPaginationContext(getSqlStatement(), projectionsContext, params, whereSegments); diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/UpdateStatementContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/UpdateStatementContext.java index 32b22a9ec8e2b..da44a84703cc0 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/UpdateStatementContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/statement/dml/UpdateStatementContext.java @@ -24,10 +24,12 @@ import org.apache.shardingsphere.infra.binder.context.type.WhereAvailable; import org.apache.shardingsphere.sql.parser.sql.common.extractor.TableExtractor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sql.parser.sql.common.util.ColumnExtractor; +import org.apache.shardingsphere.sql.parser.sql.common.util.ExpressionExtractUtils; import java.util.Collection; import java.util.LinkedList; @@ -44,11 +46,14 @@ public final class UpdateStatementContext extends CommonSQLStatementContext impl private final Collection columnSegments = new LinkedList<>(); + private final Collection joinConditions = new LinkedList<>(); + public UpdateStatementContext(final UpdateStatement sqlStatement) { super(sqlStatement); tablesContext = new TablesContext(getAllSimpleTableSegments(), getDatabaseType()); getSqlStatement().getWhere().ifPresent(whereSegments::add); ColumnExtractor.extractColumnSegments(columnSegments, whereSegments); + ExpressionExtractUtils.extractJoinConditions(joinConditions, whereSegments); } private Collection getAllSimpleTableSegments() { diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/type/WhereAvailable.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/type/WhereAvailable.java index 7a812feaebfe0..d90931c0fe193 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/type/WhereAvailable.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/context/type/WhereAvailable.java @@ -18,6 +18,7 @@ package org.apache.shardingsphere.infra.binder.context.type; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import java.util.Collection; @@ -40,4 +41,11 @@ public interface WhereAvailable { * @return column segments */ Collection getColumnSegments(); + + /** + * Get join condition segments. + * + * @return join condition segments + */ + Collection getJoinConditions(); } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/engine/SQLBindEngine.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/engine/SQLBindEngine.java index 5847c5800ba13..a42e3a4639baf 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/engine/SQLBindEngine.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/engine/SQLBindEngine.java @@ -17,11 +17,15 @@ package org.apache.shardingsphere.infra.binder.engine; +import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContextFactory; import org.apache.shardingsphere.infra.binder.statement.ddl.CursorStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.dml.DeleteStatementBinder; import org.apache.shardingsphere.infra.binder.statement.dml.InsertStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.dml.MergeStatementBinder; import org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.dml.UpdateStatementBinder; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.hint.SQLHintUtils; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -29,8 +33,11 @@ import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.DDLStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DMLStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.MergeStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.ddl.OpenGaussCursorStatement; import java.util.List; @@ -38,15 +45,19 @@ /** * SQL bind engine. */ +@RequiredArgsConstructor public final class SQLBindEngine { private final ShardingSphereMetaData metaData; private final String defaultDatabaseName; + private final HintValueContext hintValueContext; + public SQLBindEngine(final ShardingSphereMetaData metaData, final String defaultDatabaseName) { this.metaData = metaData; this.defaultDatabaseName = defaultDatabaseName; + this.hintValueContext = new HintValueContext(); } /** @@ -62,7 +73,7 @@ public SQLStatementContext bind(final SQLStatement sqlStatement, final List tableBinderContexts) { + Collection boundedColumns = new LinkedList<>(); + segment.getColumns().forEach(each -> boundedColumns.add(ColumnSegmentBinder.bind(each, SegmentType.INSERT_COLUMNS, statementBinderContext, tableBinderContexts, Collections.emptyMap()))); + return new InsertColumnsSegment(segment.getStartIndex(), segment.getStopIndex(), boundedColumns); + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/combine/CombineSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/combine/CombineSegmentBinder.java index 89eb00a14af93..b305daf2e3e18 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/combine/CombineSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/combine/CombineSegmentBinder.java @@ -19,6 +19,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementBinder; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.combine.CombineSegment; @@ -34,13 +35,16 @@ public final class CombineSegmentBinder { * Bind combine segment with metadata. * * @param segment table segment - * @param metaData meta data - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context * @return bounded combine segment */ - public static CombineSegment bind(final CombineSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - SelectStatement boundedLeftSelect = new SelectStatementBinder().bind(segment.getLeft(), metaData, defaultDatabaseName); - SelectStatement boundedRightSelect = new SelectStatementBinder().bind(segment.getRight(), metaData, defaultDatabaseName); + public static CombineSegment bind(final CombineSegment segment, final SQLStatementBinderContext statementBinderContext) { + ShardingSphereMetaData metaData = statementBinderContext.getMetaData(); + String defaultDatabaseName = statementBinderContext.getDefaultDatabaseName(); + SelectStatement boundedLeftSelect = new SelectStatementBinder().bindWithExternalTableContexts(segment.getLeft(), metaData, defaultDatabaseName, + statementBinderContext.getExternalTableBinderContexts()); + SelectStatement boundedRightSelect = new SelectStatementBinder().bindWithExternalTableContexts(segment.getRight(), metaData, defaultDatabaseName, + statementBinderContext.getExternalTableBinderContexts()); return new CombineSegment(segment.getStartIndex(), segment.getStopIndex(), boundedLeftSelect, segment.getCombineType(), boundedRightSelect); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/ExpressionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/ExpressionSegmentBinder.java index b342ac81d2e34..06384e0a6d028 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/ExpressionSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/ExpressionSegmentBinder.java @@ -19,17 +19,28 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; import org.apache.shardingsphere.infra.binder.segment.expression.impl.BinaryOperationExpressionBinder; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.ColumnSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.expression.impl.ExistsSubqueryExpressionBinder; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.FunctionExpressionSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.expression.impl.InExpressionBinder; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.NotExpressionBinder; import org.apache.shardingsphere.infra.binder.segment.expression.impl.SubqueryExpressionSegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExistsSubqueryExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment; +import java.util.LinkedHashMap; +import java.util.Map; + /** * Expression segment binder. */ @@ -40,22 +51,37 @@ public final class ExpressionSegmentBinder { * Bind expression segment with metadata. * * @param segment expression segment - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded expression segment */ - public static ExpressionSegment bind(final ExpressionSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { + public static ExpressionSegment bind(final ExpressionSegment segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { if (segment instanceof BinaryOperationExpression) { - return BinaryOperationExpressionBinder.bind((BinaryOperationExpression) segment, metaData, defaultDatabaseName); + return BinaryOperationExpressionBinder.bind((BinaryOperationExpression) segment, parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); } if (segment instanceof ExistsSubqueryExpression) { - return ExistsSubqueryExpressionBinder.bind((ExistsSubqueryExpression) segment, metaData, defaultDatabaseName); + return ExistsSubqueryExpressionBinder.bind((ExistsSubqueryExpression) segment, statementBinderContext, tableBinderContexts); } if (segment instanceof SubqueryExpressionSegment) { - return SubqueryExpressionSegmentBinder.bind((SubqueryExpressionSegment) segment, metaData, defaultDatabaseName); + Map newOuterTableBinderContexts = new LinkedHashMap<>(); + newOuterTableBinderContexts.putAll(outerTableBinderContexts); + newOuterTableBinderContexts.putAll(tableBinderContexts); + return SubqueryExpressionSegmentBinder.bind((SubqueryExpressionSegment) segment, statementBinderContext, newOuterTableBinderContexts); } if (segment instanceof InExpression) { - return InExpressionBinder.bind((InExpression) segment, metaData, defaultDatabaseName); + return InExpressionBinder.bind((InExpression) segment, parentSegmentType, statementBinderContext, tableBinderContexts); + } + if (segment instanceof NotExpression) { + return NotExpressionBinder.bind((NotExpression) segment, parentSegmentType, statementBinderContext, tableBinderContexts); + } + if (segment instanceof ColumnSegment) { + return ColumnSegmentBinder.bind((ColumnSegment) segment, parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); + } + if (segment instanceof FunctionSegment) { + return FunctionExpressionSegmentBinder.bind((FunctionSegment) segment, parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); } // TODO support more ExpressionSegment bind return segment; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/BinaryOperationExpressionBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/BinaryOperationExpressionBinder.java index 913aabbcafce9..2b1b60915fa37 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/BinaryOperationExpressionBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/BinaryOperationExpressionBinder.java @@ -19,11 +19,15 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import java.util.Map; + /** * Binary operation expression binder. */ @@ -34,13 +38,16 @@ public final class BinaryOperationExpressionBinder { * Bind binary operation expression with metadata. * * @param segment binary operation expression segment - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded binary operation expression segment */ - public static BinaryOperationExpression bind(final BinaryOperationExpression segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - ExpressionSegment boundedLeft = ExpressionSegmentBinder.bind(segment.getLeft(), metaData, defaultDatabaseName); - ExpressionSegment boundedRight = ExpressionSegmentBinder.bind(segment.getRight(), metaData, defaultDatabaseName); + public static BinaryOperationExpression bind(final BinaryOperationExpression segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { + ExpressionSegment boundedLeft = ExpressionSegmentBinder.bind(segment.getLeft(), parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); + ExpressionSegment boundedRight = ExpressionSegmentBinder.bind(segment.getRight(), parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); return new BinaryOperationExpression(segment.getStartIndex(), segment.getStopIndex(), boundedLeft, boundedRight, segment.getOperator(), segment.getText()); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinder.java index 5f75ec882c749..d59a8884b18ee 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinder.java @@ -17,21 +17,34 @@ package org.apache.shardingsphere.infra.binder.segment.expression.impl; +import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.groovy.util.Maps; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.from.FunctionTableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.from.SimpleTableSegmentBinderContext; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.exception.AmbiguousColumnException; import org.apache.shardingsphere.infra.exception.UnknownColumnException; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.ListIterator; import java.util.Map; +import java.util.Optional; /** * Column segment binder. @@ -39,41 +52,216 @@ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class ColumnSegmentBinder { - private static final Collection EXCLUDE_BIND_COLUMNS = new LinkedHashSet<>(Arrays.asList("ROWNUM", "ROW_NUMBER")); + private static final Collection EXCLUDE_BIND_COLUMNS = new LinkedHashSet<>(Arrays.asList("ROWNUM", "ROW_NUMBER", "ROWNUM_", "SYSDATE", "SYSTIMESTAMP", "CURRENT_TIMESTAMP", + "LOCALTIMESTAMP", "UID", "USER", "NEXTVAL", "ROWID")); + + private static final Map SEGMENT_TYPE_MESSAGES = Maps.of(SegmentType.PROJECTION, "field list", SegmentType.JOIN_ON, "on clause", SegmentType.JOIN_USING, "from clause", + SegmentType.PREDICATE, "where clause", SegmentType.ORDER_BY, "order clause", SegmentType.GROUP_BY, "group statement", SegmentType.INSERT_COLUMNS, "field list"); + + private static final String UNKNOWN_SEGMENT_TYPE_MESSAGE = "unknown clause"; /** * Bind column segment with metadata. * * @param segment table segment + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded column segment */ - public static ColumnSegment bind(final ColumnSegment segment, final Map tableBinderContexts) { + public static ColumnSegment bind(final ColumnSegment segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { if (EXCLUDE_BIND_COLUMNS.contains(segment.getIdentifier().getValue().toUpperCase())) { return segment; } ColumnSegment result = new ColumnSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getIdentifier()); segment.getOwner().ifPresent(result::setOwner); Collection tableBinderContextValues = - segment.getOwner().isPresent() ? Collections.singleton(tableBinderContexts.get(segment.getOwner().get().getIdentifier().getValue())) : tableBinderContexts.values(); - ColumnSegment inputColumnSegment = findInputColumnSegment(segment.getIdentifier().getValue(), tableBinderContextValues); - result.setOriginalDatabase(inputColumnSegment.getOriginalDatabase()); - result.setOriginalSchema(inputColumnSegment.getOriginalSchema()); - result.setOriginalTable(null == segment.getOriginalTable() ? inputColumnSegment.getOriginalTable() : segment.getOriginalTable()); - result.setOriginalColumn(null == segment.getOriginalColumn() ? segment.getIdentifier() : segment.getOriginalColumn()); + getTableSegmentBinderContexts(segment, parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts); + Optional inputColumnSegment = findInputColumnSegment(segment, parentSegmentType, tableBinderContextValues, outerTableBinderContexts, statementBinderContext); + inputColumnSegment.ifPresent(optional -> result.setVariable(optional.isVariable())); + result.setColumnBoundedInfo(createColumnSegmentBoundedInfo(segment, inputColumnSegment.orElse(null))); return result; } - private static ColumnSegment findInputColumnSegment(final String columnName, final Collection tableBinderContexts) { + private static Collection getTableSegmentBinderContexts(final ColumnSegment segment, final SegmentType parentSegmentType, + final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, + final Map outerTableBinderContexts) { + if (segment.getOwner().isPresent()) { + return getTableBinderContextByOwner(segment.getOwner().get().getIdentifier().getValue().toLowerCase(), tableBinderContexts, outerTableBinderContexts, + statementBinderContext.getExternalTableBinderContexts()); + } + if (!statementBinderContext.getJoinTableProjectionSegments().isEmpty() && isNeedUseJoinTableProjectionBind(segment, parentSegmentType, statementBinderContext)) { + return Collections.singleton(new SimpleTableSegmentBinderContext(statementBinderContext.getJoinTableProjectionSegments())); + } + return tableBinderContexts.values(); + } + + private static boolean isNeedUseJoinTableProjectionBind(final ColumnSegment segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext) { + return SegmentType.PROJECTION == parentSegmentType + || SegmentType.PREDICATE == parentSegmentType && statementBinderContext.getUsingColumnNames().contains(segment.getIdentifier().getValue().toLowerCase()); + } + + private static Collection getTableBinderContextByOwner(final String owner, final Map tableBinderContexts, + final Map outerTableBinderContexts, + final Map externalTableBinderContexts) { + if (tableBinderContexts.containsKey(owner)) { + return Collections.singleton(tableBinderContexts.get(owner)); + } + if (outerTableBinderContexts.containsKey(owner)) { + return Collections.singleton(outerTableBinderContexts.get(owner)); + } + if (externalTableBinderContexts.containsKey(owner)) { + return Collections.singleton(externalTableBinderContexts.get(owner)); + } + return Collections.emptyList(); + } + + private static Optional findInputColumnSegment(final ColumnSegment segment, final SegmentType parentSegmentType, final Collection tableBinderContexts, + final Map outerTableBinderContexts, final SQLStatementBinderContext statementBinderContext) { ColumnSegment result = null; + boolean isFindInputColumn = false; + for (TableSegmentBinderContext each : tableBinderContexts) { + Optional projectionSegment = each.findProjectionSegmentByColumnLabel(segment.getIdentifier().getValue()); + if (projectionSegment.isPresent() && projectionSegment.get() instanceof ColumnProjectionSegment) { + ShardingSpherePreconditions.checkState(null == result, + () -> new AmbiguousColumnException(segment.getExpression(), SEGMENT_TYPE_MESSAGES.getOrDefault(parentSegmentType, UNKNOWN_SEGMENT_TYPE_MESSAGE))); + result = ((ColumnProjectionSegment) projectionSegment.get()).getColumn(); + } + if (!isFindInputColumn && projectionSegment.isPresent()) { + isFindInputColumn = true; + } + } + if (!isFindInputColumn) { + Optional projectionSegment = findInputColumnSegmentFromOuterTable(segment, outerTableBinderContexts); + isFindInputColumn = projectionSegment.isPresent(); + if (projectionSegment.isPresent() && projectionSegment.get() instanceof ColumnProjectionSegment) { + result = ((ColumnProjectionSegment) projectionSegment.get()).getColumn(); + } + } + if (!isFindInputColumn) { + Optional projectionSegment = findInputColumnSegmentFromExternalTables(segment, statementBinderContext.getExternalTableBinderContexts()); + isFindInputColumn = projectionSegment.isPresent(); + if (projectionSegment.isPresent() && projectionSegment.get() instanceof ColumnProjectionSegment) { + result = ((ColumnProjectionSegment) projectionSegment.get()).getColumn(); + } + } + if (!isFindInputColumn) { + result = findInputColumnSegmentByVariables(segment, statementBinderContext.getVariableNames()).orElse(null); + isFindInputColumn = result != null; + } + if (!isFindInputColumn) { + result = findInputColumnSegmentByPivotColumns(segment, statementBinderContext.getPivotColumnNames()).orElse(null); + isFindInputColumn = result != null; + } + ShardingSpherePreconditions.checkState(isFindInputColumn || containsFunctionTable(tableBinderContexts, outerTableBinderContexts.values()), + () -> new UnknownColumnException(segment.getExpression(), SEGMENT_TYPE_MESSAGES.getOrDefault(parentSegmentType, UNKNOWN_SEGMENT_TYPE_MESSAGE))); + return Optional.ofNullable(result); + } + + private static Optional findInputColumnSegmentByPivotColumns(final ColumnSegment segment, final Collection pivotColumnNames) { + if (pivotColumnNames.isEmpty()) { + return Optional.empty(); + } + if (pivotColumnNames.contains(segment.getIdentifier().getValue().toLowerCase())) { + return Optional.of(new ColumnSegment(0, 0, segment.getIdentifier())); + } + return Optional.empty(); + } + + private static Optional findInputColumnSegmentFromOuterTable(final ColumnSegment segment, final Map outerTableBinderContexts) { + ListIterator listIterator = new ArrayList<>(outerTableBinderContexts.values()).listIterator(outerTableBinderContexts.size()); + while (listIterator.hasPrevious()) { + TableSegmentBinderContext each = listIterator.previous(); + Optional result = each.findProjectionSegmentByColumnLabel(segment.getIdentifier().getValue()); + if (result.isPresent()) { + return result; + } + } + return Optional.empty(); + } + + private static Optional findInputColumnSegmentFromExternalTables(final ColumnSegment segment, final Map externalTableBinderContexts) { + for (TableSegmentBinderContext each : externalTableBinderContexts.values()) { + Optional result = each.findProjectionSegmentByColumnLabel(segment.getIdentifier().getValue()); + if (result.isPresent()) { + return result; + } + } + return Optional.empty(); + } + + private static Optional findInputColumnSegmentByVariables(final ColumnSegment segment, final Collection variableNames) { + if (variableNames.isEmpty()) { + return Optional.empty(); + } + if (variableNames.contains(segment.getIdentifier().getValue().toLowerCase())) { + ColumnSegment result = new ColumnSegment(0, 0, segment.getIdentifier()); + result.setVariable(true); + return Optional.of(result); + } + return Optional.empty(); + } + + private static boolean containsFunctionTable(final Collection tableBinderContexts, final Collection outerBinderContexts) { + for (TableSegmentBinderContext each : tableBinderContexts) { + if (each instanceof FunctionTableSegmentBinderContext) { + return true; + } + } + for (TableSegmentBinderContext each : outerBinderContexts) { + if (each instanceof FunctionTableSegmentBinderContext) { + return true; + } + } + return false; + } + + private static ColumnSegmentBoundedInfo createColumnSegmentBoundedInfo(final ColumnSegment segment, final ColumnSegment inputColumnSegment) { + IdentifierValue originalDatabase = null == inputColumnSegment ? null : inputColumnSegment.getColumnBoundedInfo().getOriginalDatabase(); + IdentifierValue originalSchema = null == inputColumnSegment ? null : inputColumnSegment.getColumnBoundedInfo().getOriginalSchema(); + IdentifierValue segmentOriginalTable = segment.getColumnBoundedInfo().getOriginalTable(); + IdentifierValue originalTable = Strings.isNullOrEmpty(segmentOriginalTable.getValue()) + ? Optional.ofNullable(inputColumnSegment).map(optional -> optional.getColumnBoundedInfo().getOriginalTable()).orElse(segmentOriginalTable) + : segmentOriginalTable; + IdentifierValue segmentOriginalColumn = segment.getColumnBoundedInfo().getOriginalColumn(); + IdentifierValue originalColumn = Strings.isNullOrEmpty(segmentOriginalColumn.getValue()) + ? Optional.ofNullable(inputColumnSegment).map(optional -> optional.getColumnBoundedInfo().getOriginalColumn()).orElse(segmentOriginalColumn) + : segmentOriginalColumn; + return new ColumnSegmentBoundedInfo(originalDatabase, originalSchema, originalTable, originalColumn); + } + + /** + * Bind using column segment with metadata. + * + * @param segment using column segment + * @param parentSegmentType parent segment type + * @param tableBinderContexts table binder contexts + * @return bounded using column segment + */ + public static ColumnSegment bindUsingColumn(final ColumnSegment segment, final SegmentType parentSegmentType, final Map tableBinderContexts) { + ColumnSegment result = new ColumnSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getIdentifier()); + segment.getOwner().ifPresent(result::setOwner); + Collection tableBinderContextValues = tableBinderContexts.values(); + Collection usingInputColumnSegments = findUsingInputColumnSegments(segment.getIdentifier().getValue(), tableBinderContextValues); + ShardingSpherePreconditions.checkState(usingInputColumnSegments.size() >= 2, + () -> new UnknownColumnException(segment.getExpression(), SEGMENT_TYPE_MESSAGES.getOrDefault(parentSegmentType, UNKNOWN_SEGMENT_TYPE_MESSAGE))); + Iterator iterator = usingInputColumnSegments.iterator(); + result.setColumnBoundedInfo(createColumnSegmentBoundedInfo(segment, iterator.next())); + result.setOtherUsingColumnBoundedInfo(createColumnSegmentBoundedInfo(segment, iterator.next())); + return result; + } + + private static Collection findUsingInputColumnSegments(final String columnName, final Collection tableBinderContexts) { + Collection result = new LinkedList<>(); for (TableSegmentBinderContext each : tableBinderContexts) { - ProjectionSegment projectionSegment = each.getProjectionSegmentByColumnLabel(columnName); - if (projectionSegment instanceof ColumnProjectionSegment) { - ShardingSpherePreconditions.checkState(null == result, () -> new AmbiguousColumnException(columnName)); - result = ((ColumnProjectionSegment) projectionSegment).getColumn(); + Optional projectionSegment = each.findProjectionSegmentByColumnLabel(columnName); + if (projectionSegment.isPresent() && projectionSegment.get() instanceof ColumnProjectionSegment) { + result.add(((ColumnProjectionSegment) projectionSegment.get()).getColumn()); } } - ShardingSpherePreconditions.checkNotNull(result, () -> new UnknownColumnException(columnName)); return result; } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ExistsSubqueryExpressionBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ExistsSubqueryExpressionBinder.java index dac518c89dd1e..7bec71aeab16a 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ExistsSubqueryExpressionBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ExistsSubqueryExpressionBinder.java @@ -19,10 +19,13 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExistsSubqueryExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import java.util.Map; + /** * Exists subquery expression binder. */ @@ -33,12 +36,13 @@ public final class ExistsSubqueryExpressionBinder { * Bind exists subquery expression with metadata. * * @param segment exists subquery expression segment - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts * @return bounded exists subquery expression segment */ - public static ExistsSubqueryExpression bind(final ExistsSubqueryExpression segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - SubquerySegment boundedSubquery = SubquerySegmentBinder.bind(segment.getSubquery(), metaData, defaultDatabaseName); + public static ExistsSubqueryExpression bind(final ExistsSubqueryExpression segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + SubquerySegment boundedSubquery = SubquerySegmentBinder.bind(segment.getSubquery(), statementBinderContext, tableBinderContexts); ExistsSubqueryExpression result = new ExistsSubqueryExpression(segment.getStartIndex(), segment.getStopIndex(), boundedSubquery); result.setNot(segment.isNot()); return result; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/FunctionExpressionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/FunctionExpressionSegmentBinder.java new file mode 100644 index 0000000000000..4732ea9d195dd --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/FunctionExpressionSegmentBinder.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.expression.impl; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; + +import java.util.Map; + +/** + * Function expression binder. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class FunctionExpressionSegmentBinder { + + /** + * Bind function expression with metadata. + * + * @param segment function expression segment + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts + * @return function segment + */ + public static FunctionSegment bind(final FunctionSegment segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { + FunctionSegment result = new FunctionSegment(segment.getStartIndex(), segment.getStopIndex(), segment.getFunctionName(), segment.getText()); + result.setOwner(segment.getOwner()); + for (ExpressionSegment each : segment.getParameters()) { + result.getParameters().add(ExpressionSegmentBinder.bind(each, parentSegmentType, statementBinderContext, tableBinderContexts, outerTableBinderContexts)); + } + return result; + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/InExpressionBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/InExpressionBinder.java index 3f3f1ba13fe17..75ac6c5a3c000 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/InExpressionBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/InExpressionBinder.java @@ -19,11 +19,16 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; +import java.util.Collections; +import java.util.Map; + /** * In expression binder. */ @@ -34,13 +39,15 @@ public final class InExpressionBinder { * Bind in expression segment with metadata. * * @param segment in expression - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts * @return bounded in expression */ - public static InExpression bind(final InExpression segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - ExpressionSegment boundedLeft = ExpressionSegmentBinder.bind(segment.getLeft(), metaData, defaultDatabaseName); - ExpressionSegment boundedRight = ExpressionSegmentBinder.bind(segment.getRight(), metaData, defaultDatabaseName); + public static InExpression bind(final InExpression segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + ExpressionSegment boundedLeft = ExpressionSegmentBinder.bind(segment.getLeft(), parentSegmentType, statementBinderContext, tableBinderContexts, Collections.emptyMap()); + ExpressionSegment boundedRight = ExpressionSegmentBinder.bind(segment.getRight(), parentSegmentType, statementBinderContext, tableBinderContexts, Collections.emptyMap()); return new InExpression(segment.getStartIndex(), segment.getStopIndex(), boundedLeft, boundedRight, segment.isNot()); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/NotExpressionBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/NotExpressionBinder.java new file mode 100644 index 0000000000000..f1400e589b971 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/NotExpressionBinder.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.expression.impl; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; + +import java.util.Collections; +import java.util.Map; + +/** + * Not expression binder. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class NotExpressionBinder { + + /** + * Bind not expression segment with metadata. + * + * @param segment not expression + * @param parentSegmentType parent segment type + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @return bounded not expression + */ + public static NotExpression bind(final NotExpression segment, final SegmentType parentSegmentType, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + ExpressionSegment boundedExpression = ExpressionSegmentBinder.bind(segment.getExpression(), parentSegmentType, statementBinderContext, tableBinderContexts, Collections.emptyMap()); + return new NotExpression(segment.getStartIndex(), segment.getStopIndex(), boundedExpression, segment.getNotSign()); + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubqueryExpressionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubqueryExpressionSegmentBinder.java index ac16a62dbaa03..96881fc6223dc 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubqueryExpressionSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubqueryExpressionSegmentBinder.java @@ -19,10 +19,13 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import java.util.Map; + /** * Subquery expression segment binder. */ @@ -33,12 +36,13 @@ public final class SubqueryExpressionSegmentBinder { * Bind subquery expression segment with metadata. * * @param segment subquery expression segment - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts * @return bounded subquery expression segment */ - public static SubqueryExpressionSegment bind(final SubqueryExpressionSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - SubquerySegment boundedSubquery = SubquerySegmentBinder.bind(segment.getSubquery(), metaData, defaultDatabaseName); + public static SubqueryExpressionSegment bind(final SubqueryExpressionSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + SubquerySegment boundedSubquery = SubquerySegmentBinder.bind(segment.getSubquery(), statementBinderContext, tableBinderContexts); return new SubqueryExpressionSegment(boundedSubquery); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubquerySegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubquerySegmentBinder.java index e3b8b42a7f417..7a7273dab298d 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubquerySegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/SubquerySegmentBinder.java @@ -19,11 +19,14 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; +import java.util.Map; + /** * Subquery segment binder. */ @@ -34,12 +37,13 @@ public final class SubquerySegmentBinder { * Bind subquery segment with metadata. * * @param segment subquery segment - * @param metaData metaData - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context + * @param outerTableBinderContexts outer table binder contexts * @return bounded subquery segment */ - public static SubquerySegment bind(final SubquerySegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - SelectStatement boundedSelectStatement = new SelectStatementBinder().bind(segment.getSelect(), metaData, defaultDatabaseName); + public static SubquerySegment bind(final SubquerySegment segment, final SQLStatementBinderContext statementBinderContext, final Map outerTableBinderContexts) { + SelectStatement boundedSelectStatement = new SelectStatementBinder().bindCorrelateSubquery(segment.getSelect(), statementBinderContext.getMetaData(), + statementBinderContext.getDefaultDatabaseName(), outerTableBinderContexts, statementBinderContext.getExternalTableBinderContexts()); SubquerySegment result = new SubquerySegment(segment.getStartIndex(), segment.getStopIndex(), boundedSelectStatement); result.setSubqueryType(segment.getSubqueryType()); return result; diff --git a/features/sharding/plugin/nanoid/src/main/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithm.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/FunctionTableSegmentBinderContext.java similarity index 58% rename from features/sharding/plugin/nanoid/src/main/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithm.java rename to infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/FunctionTableSegmentBinderContext.java index 4ff26b95d9e1a..9fc755f337469 100644 --- a/features/sharding/plugin/nanoid/src/main/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithm.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/FunctionTableSegmentBinderContext.java @@ -15,25 +15,26 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.nanoid.algorithm.keygen; +package org.apache.shardingsphere.infra.binder.segment.from; -import com.aventrix.jnanoid.jnanoid.NanoIdUtils; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; -import java.util.concurrent.ThreadLocalRandom; +import java.util.Collection; +import java.util.Collections; +import java.util.Optional; /** - * NanoId key generate algorithm. + * Function table segment binder context. */ -public final class NanoIdKeyGenerateAlgorithm implements KeyGenerateAlgorithm { +public final class FunctionTableSegmentBinderContext implements TableSegmentBinderContext { @Override - public String generateKey() { - return NanoIdUtils.randomNanoId(ThreadLocalRandom.current(), NanoIdUtils.DEFAULT_ALPHABET, NanoIdUtils.DEFAULT_SIZE); + public Optional findProjectionSegmentByColumnLabel(final String columnLabel) { + return Optional.empty(); } @Override - public String getType() { - return "NANOID"; + public Collection getProjectionSegments() { + return Collections.emptyList(); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/SimpleTableSegmentBinderContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/SimpleTableSegmentBinderContext.java new file mode 100644 index 0000000000000..82852b7d83d2d --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/SimpleTableSegmentBinderContext.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.from; + +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; + +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Optional; + +/** + * Simple table segment binder context. + */ +@RequiredArgsConstructor +public final class SimpleTableSegmentBinderContext implements TableSegmentBinderContext { + + private final Map columnLabelProjectionSegments; + + public SimpleTableSegmentBinderContext(final Collection projectionSegments) { + columnLabelProjectionSegments = new LinkedHashMap<>(projectionSegments.size(), 1F); + projectionSegments.forEach(each -> putColumnLabelProjectionSegments(each, columnLabelProjectionSegments)); + } + + private void putColumnLabelProjectionSegments(final ProjectionSegment projectionSegment, final Map columnLabelProjectionSegments) { + if (projectionSegment instanceof ShorthandProjectionSegment) { + ((ShorthandProjectionSegment) projectionSegment).getActualProjectionSegments().forEach(each -> columnLabelProjectionSegments.put(each.getColumnLabel().toLowerCase(), each)); + } else { + columnLabelProjectionSegments.put(projectionSegment.getColumnLabel().toLowerCase(), projectionSegment); + } + } + + @Override + public Optional findProjectionSegmentByColumnLabel(final String columnLabel) { + return Optional.ofNullable(columnLabelProjectionSegments.get(columnLabel.toLowerCase())); + } + + @Override + public Collection getProjectionSegments() { + return columnLabelProjectionSegments.values(); + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinder.java index d2f857506c466..37b778e3afa5f 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinder.java @@ -19,11 +19,13 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.segment.from.impl.DeleteMultiTableSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.impl.JoinTableSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.impl.SimpleTableSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.impl.SubqueryTableSegmentBinder; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.DeleteMultiTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.FunctionTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; @@ -41,22 +43,28 @@ public final class TableSegmentBinder { * Bind table segment with metadata. * * @param segment table segment - * @param metaData meta data - * @param defaultDatabaseName default database name - * @param databaseType database type + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded table segment */ - public static TableSegment bind(final TableSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, - final DatabaseType databaseType, final Map tableBinderContexts) { + public static TableSegment bind(final TableSegment segment, final SQLStatementBinderContext statementBinderContext, final Map tableBinderContexts, + final Map outerTableBinderContexts) { if (segment instanceof SimpleTableSegment) { - return SimpleTableSegmentBinder.bind((SimpleTableSegment) segment, metaData, defaultDatabaseName, databaseType, tableBinderContexts); + return SimpleTableSegmentBinder.bind((SimpleTableSegment) segment, statementBinderContext, tableBinderContexts); } if (segment instanceof JoinTableSegment) { - return JoinTableSegmentBinder.bind((JoinTableSegment) segment, metaData, defaultDatabaseName, databaseType, tableBinderContexts); + return JoinTableSegmentBinder.bind((JoinTableSegment) segment, statementBinderContext, tableBinderContexts, outerTableBinderContexts); } if (segment instanceof SubqueryTableSegment) { - return SubqueryTableSegmentBinder.bind((SubqueryTableSegment) segment, metaData, defaultDatabaseName, tableBinderContexts); + return SubqueryTableSegmentBinder.bind((SubqueryTableSegment) segment, statementBinderContext, tableBinderContexts, outerTableBinderContexts); + } + if (segment instanceof DeleteMultiTableSegment) { + return DeleteMultiTableSegmentBinder.bind((DeleteMultiTableSegment) segment, statementBinderContext, tableBinderContexts); + } + if (segment instanceof FunctionTableSegment) { + tableBinderContexts.put(segment.getAliasName().orElseGet(() -> ((FunctionTableSegment) segment).getTableFunction().getText()).toLowerCase(), new FunctionTableSegmentBinderContext()); + return segment; } return segment; } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinderContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinderContext.java index 7f826fd107b39..d70691525272b 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinderContext.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/TableSegmentBinderContext.java @@ -18,48 +18,27 @@ package org.apache.shardingsphere.infra.binder.segment.from; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.Optional; /** * Table segment binder context. */ -public final class TableSegmentBinderContext { - - private final Map columnLabelProjectionSegments; - - public TableSegmentBinderContext(final Collection projectionSegments) { - columnLabelProjectionSegments = new LinkedHashMap<>(projectionSegments.size(), 1F); - projectionSegments.forEach(each -> putColumnLabelProjectionSegments(each, columnLabelProjectionSegments)); - } - - private void putColumnLabelProjectionSegments(final ProjectionSegment projectionSegment, final Map columnLabelProjectionSegments) { - if (projectionSegment instanceof ShorthandProjectionSegment) { - ((ShorthandProjectionSegment) projectionSegment).getActualProjectionSegments().forEach(each -> columnLabelProjectionSegments.put(each.getColumnLabel().toLowerCase(), each)); - } else { - columnLabelProjectionSegments.put(projectionSegment.getColumnLabel().toLowerCase(), projectionSegment); - } - } +public interface TableSegmentBinderContext { /** - * Get projection segment by column label. - * + * Find projection segment by column label. + * * @param columnLabel column label * @return projection segment */ - public ProjectionSegment getProjectionSegmentByColumnLabel(final String columnLabel) { - return columnLabelProjectionSegments.get(columnLabel.toLowerCase()); - } + Optional findProjectionSegmentByColumnLabel(String columnLabel); /** * Get projection segments. * * @return projection segments */ - public Collection getProjectionSegments() { - return columnLabelProjectionSegments.values(); - } + Collection getProjectionSegments(); } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/DeleteMultiTableSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/DeleteMultiTableSegmentBinder.java new file mode 100644 index 0000000000000..86628e72da9a8 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/DeleteMultiTableSegmentBinder.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.from.impl; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.DeleteMultiTableSegment; + +import java.util.Collections; +import java.util.Map; + +/** + * Delete multi table segment binder. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class DeleteMultiTableSegmentBinder { + + /** + * Bind delete multi table segment with metadata. + * + * @param segment delete multi table segment + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @return bounded join table segment + */ + public static DeleteMultiTableSegment bind(final DeleteMultiTableSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + DeleteMultiTableSegment result = new DeleteMultiTableSegment(); + result.setStartIndex(segment.getStartIndex()); + result.setStopIndex(segment.getStopIndex()); + result.getActualDeleteTables().addAll(segment.getActualDeleteTables()); + result.setRelationTable(TableSegmentBinder.bind(segment.getRelationTable(), statementBinderContext, tableBinderContexts, Collections.emptyMap())); + return result; + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinder.java index 9f67fc6e776b3..d8fe4caedc0da 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinder.java @@ -17,16 +17,22 @@ package org.apache.shardingsphere.infra.binder.segment.from.impl; +import com.google.common.collect.LinkedHashMultimap; +import com.google.common.collect.Multimap; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.ColumnSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; @@ -34,8 +40,10 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; import java.util.Collection; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; +import java.util.List; import java.util.Map; /** @@ -48,38 +56,64 @@ public final class JoinTableSegmentBinder { * Bind join table segment with metadata. * * @param segment join table segment - * @param metaData meta data - * @param defaultDatabaseName default database name - * @param databaseType database type + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded join table segment */ - public static JoinTableSegment bind(final JoinTableSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, - final DatabaseType databaseType, final Map tableBinderContexts) { + public static JoinTableSegment bind(final JoinTableSegment segment, final SQLStatementBinderContext statementBinderContext, final Map tableBinderContexts, + final Map outerTableBinderContexts) { JoinTableSegment result = new JoinTableSegment(); result.setStartIndex(segment.getStartIndex()); result.setStopIndex(segment.getStopIndex()); segment.getAliasSegment().ifPresent(result::setAlias); - result.setLeft(TableSegmentBinder.bind(segment.getLeft(), metaData, defaultDatabaseName, databaseType, tableBinderContexts)); + result.setLeft(TableSegmentBinder.bind(segment.getLeft(), statementBinderContext, tableBinderContexts, outerTableBinderContexts)); result.setNatural(segment.isNatural()); result.setJoinType(segment.getJoinType()); - result.setRight(TableSegmentBinder.bind(segment.getRight(), metaData, defaultDatabaseName, databaseType, tableBinderContexts)); - result.setCondition(segment.getCondition()); - // TODO bind condition and using column in join table segment - result.setUsing(segment.getUsing()); - result.getJoinTableProjectionSegments().addAll(getJoinTableProjectionSegments(result, databaseType, tableBinderContexts)); + result.setRight(TableSegmentBinder.bind(segment.getRight(), statementBinderContext, tableBinderContexts, outerTableBinderContexts)); + result.setCondition(ExpressionSegmentBinder.bind(segment.getCondition(), SegmentType.JOIN_ON, statementBinderContext, tableBinderContexts, Collections.emptyMap())); + result.setUsing(bindUsingColumns(segment.getUsing(), tableBinderContexts)); + result.getUsing().forEach(each -> statementBinderContext.getUsingColumnNames().add(each.getIdentifier().getValue().toLowerCase())); + Map usingColumnsByNaturalJoin = Collections.emptyMap(); + if (result.isNatural()) { + usingColumnsByNaturalJoin = getUsingColumnsByNaturalJoin(result, tableBinderContexts); + Collection derivedUsingColumns = getDerivedUsingColumns(usingColumnsByNaturalJoin); + result.setDerivedUsing(bindUsingColumns(derivedUsingColumns, tableBinderContexts)); + result.getDerivedUsing().forEach(each -> statementBinderContext.getUsingColumnNames().add(each.getIdentifier().getValue().toLowerCase())); + } + result.getDerivedJoinTableProjectionSegments().addAll(getDerivedJoinTableProjectionSegments(result, statementBinderContext.getDatabaseType(), usingColumnsByNaturalJoin, tableBinderContexts)); + statementBinderContext.getJoinTableProjectionSegments().addAll(result.getDerivedJoinTableProjectionSegments()); + return result; + } + + private static Collection getDerivedUsingColumns(final Map usingColumnsByNaturalJoin) { + Collection result = new LinkedList<>(); + for (ProjectionSegment each : usingColumnsByNaturalJoin.values()) { + if (each instanceof ColumnProjectionSegment) { + ColumnSegment column = ((ColumnProjectionSegment) each).getColumn(); + result.add(new ColumnSegment(column.getStartIndex(), column.getStopIndex(), column.getIdentifier())); + } + } + return result; + } + + private static List bindUsingColumns(final Collection usingColumns, final Map tableBinderContexts) { + List result = new LinkedList<>(); + for (ColumnSegment each : usingColumns) { + result.add(ColumnSegmentBinder.bindUsingColumn(each, SegmentType.JOIN_USING, tableBinderContexts)); + } return result; } - private static Collection getJoinTableProjectionSegments(final JoinTableSegment segment, final DatabaseType databaseType, - final Map tableBinderContexts) { + private static Collection getDerivedJoinTableProjectionSegments(final JoinTableSegment segment, final DatabaseType databaseType, + final Map usingColumnsByNaturalJoin, + final Map tableBinderContexts) { Collection projectionSegments = getProjectionSegments(segment, databaseType, tableBinderContexts); if (segment.getUsing().isEmpty() && !segment.isNatural()) { return projectionSegments; } Collection result = new LinkedList<>(); - Map originalUsingColumns = - segment.getUsing().isEmpty() ? getUsingColumnsByNaturalJoin(segment, tableBinderContexts) : getUsingColumns(projectionSegments, segment.getUsing()); + Map originalUsingColumns = segment.getUsing().isEmpty() ? usingColumnsByNaturalJoin : getUsingColumns(projectionSegments, segment.getUsing(), segment.getJoinType()); Collection orderedUsingColumns = databaseType instanceof MySQLDatabaseType ? getJoinUsingColumnsByProjectionOrder(projectionSegments, originalUsingColumns) : originalUsingColumns.values(); result.addAll(orderedUsingColumns); @@ -106,7 +140,7 @@ private static Collection getProjectionSegments(final TableSe String tableAliasOrName = tableSegment.getAliasName().orElseGet(() -> ((SimpleTableSegment) tableSegment).getTableName().getIdentifier().getValue()); result.addAll(getProjectionSegmentsByTableAliasOrName(tableBinderContexts, tableAliasOrName)); } else if (tableSegment instanceof JoinTableSegment) { - result.addAll(((JoinTableSegment) tableSegment).getJoinTableProjectionSegments()); + result.addAll(((JoinTableSegment) tableSegment).getDerivedJoinTableProjectionSegments()); } else if (tableSegment instanceof SubqueryTableSegment) { result.addAll(getProjectionSegmentsByTableAliasOrName(tableBinderContexts, tableSegment.getAliasName().orElse(""))); } @@ -114,9 +148,9 @@ private static Collection getProjectionSegments(final TableSe } private static Collection getProjectionSegmentsByTableAliasOrName(final Map tableBinderContexts, final String tableAliasOrName) { - ShardingSpherePreconditions.checkState(tableBinderContexts.containsKey(tableAliasOrName), + ShardingSpherePreconditions.checkState(tableBinderContexts.containsKey(tableAliasOrName.toLowerCase()), () -> new IllegalStateException(String.format("Can not find table binder context by table alias or name %s.", tableAliasOrName))); - return tableBinderContexts.get(tableAliasOrName).getProjectionSegments(); + return tableBinderContexts.get(tableAliasOrName.toLowerCase()).getProjectionSegments(); } private static Map getUsingColumnsByNaturalJoin(final JoinTableSegment segment, final Map tableBinderContexts) { @@ -133,14 +167,16 @@ private static Map getUsingColumnsByNaturalJoin(final return result; } - private static Map getUsingColumns(final Collection projectionSegments, final Collection usingColumns) { - Map columnLabelProjectionSegments = new LinkedHashMap<>(projectionSegments.size(), 1F); - projectionSegments.forEach(each -> columnLabelProjectionSegments.putIfAbsent(each.getColumnLabel().toLowerCase(), each)); + private static Map getUsingColumns(final Collection projectionSegments, final Collection usingColumns, final String joinType) { + Multimap columnLabelProjectionSegments = LinkedHashMultimap.create(); + projectionSegments.forEach(each -> columnLabelProjectionSegments.put(each.getColumnLabel().toLowerCase(), each)); Map result = new LinkedHashMap<>(); for (ColumnSegment each : usingColumns) { - ProjectionSegment projectionSegment = columnLabelProjectionSegments.get(each.getIdentifier().getValue().toLowerCase()); - if (null != projectionSegment) { - result.put(projectionSegment.getColumnLabel().toLowerCase(), projectionSegment); + LinkedList groupProjectionSegments = new LinkedList<>(columnLabelProjectionSegments.get(each.getIdentifier().getValue().toLowerCase())); + if (!groupProjectionSegments.isEmpty()) { + ProjectionSegment targetProjectionSegment = + JoinType.RIGHT.name().equalsIgnoreCase(joinType) ? groupProjectionSegments.descendingIterator().next() : groupProjectionSegments.iterator().next(); + result.put(targetProjectionSegment.getColumnLabel().toLowerCase(), targetProjectionSegment); } } return result; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinder.java index 5f1188e1475b6..8e2df609fae88 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinder.java @@ -19,13 +19,17 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.segment.from.SimpleTableSegmentBinderContext; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; import org.apache.shardingsphere.infra.database.postgresql.type.PostgreSQLDatabaseType; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.exception.TableNotExistsException; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.metadata.database.schema.builder.SystemSchemaBuilderRule; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; @@ -33,6 +37,8 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.TableSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; @@ -65,64 +71,78 @@ public final class SimpleTableSegmentBinder { * Bind simple table segment with metadata. * * @param segment simple table segment - * @param metaData metaData - * @param defaultDatabaseName default database name - * @param databaseType database type + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts * @return bounded simple table segment */ - public static SimpleTableSegment bind(final SimpleTableSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, - final DatabaseType databaseType, final Map tableBinderContexts) { - IdentifierValue originalDatabase = getDatabaseName(segment, defaultDatabaseName, databaseType); - IdentifierValue originalSchema = getSchemaName(segment, defaultDatabaseName, databaseType); - // TODO check database and schema - ShardingSphereSchema schema = metaData.getDatabase(originalDatabase.getValue()).getSchema(originalSchema.getValue()); - tableBinderContexts.put(segment.getAliasName().orElseGet(() -> segment.getTableName().getIdentifier().getValue()), - createSimpleTableBinderContext(segment, schema, originalDatabase, originalSchema, databaseType)); + public static SimpleTableSegment bind(final SimpleTableSegment segment, final SQLStatementBinderContext statementBinderContext, final Map tableBinderContexts) { + fillPivotColumnNamesInBinderContext(segment, statementBinderContext); + IdentifierValue originalDatabase = getDatabaseName(segment, statementBinderContext); + IdentifierValue originalSchema = getSchemaName(segment, statementBinderContext); + checkTableExists(segment.getTableName().getIdentifier().getValue(), statementBinderContext, originalDatabase.getValue(), originalSchema.getValue()); + ShardingSphereSchema schema = statementBinderContext.getMetaData().getDatabase(originalDatabase.getValue()).getSchema(originalSchema.getValue()); + tableBinderContexts.put((segment.getAliasName().orElseGet(() -> segment.getTableName().getIdentifier().getValue())).toLowerCase(), + createSimpleTableBinderContext(segment, schema, originalDatabase, originalSchema, statementBinderContext)); TableNameSegment tableNameSegment = new TableNameSegment(segment.getTableName().getStartIndex(), segment.getTableName().getStopIndex(), segment.getTableName().getIdentifier()); - tableNameSegment.setOriginalDatabase(originalDatabase); - tableNameSegment.setOriginalSchema(originalSchema); + tableNameSegment.setTableBoundedInfo(new TableSegmentBoundedInfo(originalDatabase, originalSchema)); SimpleTableSegment result = new SimpleTableSegment(tableNameSegment); segment.getOwner().ifPresent(result::setOwner); segment.getAliasSegment().ifPresent(result::setAlias); return result; } - private static IdentifierValue getDatabaseName(final SimpleTableSegment tableSegment, final String defaultDatabaseName, final DatabaseType databaseType) { - DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); + private static void fillPivotColumnNamesInBinderContext(final SimpleTableSegment segment, final SQLStatementBinderContext statementBinderContext) { + segment.getPivot().ifPresent(optional -> optional.getPivotColumns().forEach(each -> statementBinderContext.getPivotColumnNames().add(each.getIdentifier().getValue().toLowerCase()))); + } + + private static IdentifierValue getDatabaseName(final SimpleTableSegment tableSegment, final SQLStatementBinderContext statementBinderContext) { + DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(statementBinderContext.getDatabaseType()).getDialectDatabaseMetaData(); Optional owner = dialectDatabaseMetaData.getDefaultSchema().isPresent() ? tableSegment.getOwner().flatMap(OwnerSegment::getOwner) : tableSegment.getOwner(); - return new IdentifierValue(owner.map(optional -> optional.getIdentifier().getValue()).orElse(defaultDatabaseName)); + return new IdentifierValue(owner.map(optional -> optional.getIdentifier().getValue()).orElse(statementBinderContext.getDefaultDatabaseName())); } - private static IdentifierValue getSchemaName(final SimpleTableSegment segment, final String defaultDatabaseName, final DatabaseType databaseType) { + private static IdentifierValue getSchemaName(final SimpleTableSegment segment, final SQLStatementBinderContext statementBinderContext) { if (segment.getOwner().isPresent()) { return segment.getOwner().get().getIdentifier(); } // TODO getSchemaName according to search path + DatabaseType databaseType = statementBinderContext.getDatabaseType(); if ((databaseType instanceof PostgreSQLDatabaseType || databaseType instanceof OpenGaussDatabaseType) && SYSTEM_CATALOG_TABLES.contains(segment.getTableName().getIdentifier().getValue().toLowerCase())) { return new IdentifierValue(PG_CATALOG); } - return new IdentifierValue(new DatabaseTypeRegistry(databaseType).getDefaultSchemaName(defaultDatabaseName)); + return new IdentifierValue(new DatabaseTypeRegistry(databaseType).getDefaultSchemaName(statementBinderContext.getDefaultDatabaseName())); } - private static TableSegmentBinderContext createSimpleTableBinderContext(final SimpleTableSegment segment, final ShardingSphereSchema schema, - final IdentifierValue originalDatabase, final IdentifierValue originalSchema, final DatabaseType databaseType) { + private static SimpleTableSegmentBinderContext createSimpleTableBinderContext(final SimpleTableSegment segment, final ShardingSphereSchema schema, + final IdentifierValue originalDatabase, final IdentifierValue originalSchema, + final SQLStatementBinderContext statementBinderContext) { Collection columnNames = Optional.ofNullable(schema.getTable(segment.getTableName().getIdentifier().getValue())).map(ShardingSphereTable::getColumnValues).orElseGet(Collections::emptyList); Collection projectionSegments = new LinkedList<>(); - DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); + DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(statementBinderContext.getDatabaseType()).getDialectDatabaseMetaData(); for (ShardingSphereColumn each : columnNames) { ColumnSegment columnSegment = new ColumnSegment(0, 0, new IdentifierValue(each.getName(), dialectDatabaseMetaData.getQuoteCharacter())); columnSegment.setOwner(new OwnerSegment(0, 0, segment.getAlias().orElse(segment.getTableName().getIdentifier()))); - columnSegment.setOriginalDatabase(originalDatabase); - columnSegment.setOriginalSchema(originalSchema); - columnSegment.setOriginalTable(segment.getTableName().getIdentifier()); - columnSegment.setOriginalColumn(new IdentifierValue(each.getName(), dialectDatabaseMetaData.getQuoteCharacter())); + columnSegment.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(originalDatabase, originalSchema, segment.getTableName().getIdentifier(), + new IdentifierValue(each.getName(), dialectDatabaseMetaData.getQuoteCharacter()))); ColumnProjectionSegment columnProjectionSegment = new ColumnProjectionSegment(columnSegment); columnProjectionSegment.setVisible(each.isVisible()); projectionSegments.add(columnProjectionSegment); } - return new TableSegmentBinderContext(projectionSegments); + return new SimpleTableSegmentBinderContext(projectionSegments); + } + + private static void checkTableExists(final String tableName, final SQLStatementBinderContext statementBinderContext, final String databaseName, final String schemaName) { + if ("dual".equalsIgnoreCase(tableName)) { + return; + } + if (SystemSchemaBuilderRule.isSystemTable(schemaName, tableName)) { + return; + } + ShardingSpherePreconditions.checkState(statementBinderContext.getMetaData().containsDatabase(databaseName) + && statementBinderContext.getMetaData().getDatabase(databaseName).containsSchema(schemaName) + && statementBinderContext.getMetaData().getDatabase(databaseName).getSchema(schemaName).containsTable(tableName), + () -> new TableNotExistsException(tableName)); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinder.java index a5f706d60b61a..871a159d9b4af 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinder.java @@ -20,9 +20,10 @@ import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.segment.from.SimpleTableSegmentBinderContext; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.binder.statement.dml.SelectStatementBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; @@ -30,6 +31,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; @@ -48,23 +50,30 @@ public final class SubqueryTableSegmentBinder { * Bind subquery table segment with metadata. * * @param segment join table segment - * @param metaData meta data - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded subquery table segment */ - public static SubqueryTableSegment bind(final SubqueryTableSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, - final Map tableBinderContexts) { - SelectStatement boundedSelect = new SelectStatementBinder().bind(segment.getSubquery().getSelect(), metaData, defaultDatabaseName); + public static SubqueryTableSegment bind(final SubqueryTableSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { + fillPivotColumnNamesInBinderContext(segment, statementBinderContext); + SelectStatement boundedSelect = new SelectStatementBinder().bindCorrelateSubquery(segment.getSubquery().getSelect(), statementBinderContext.getMetaData(), + statementBinderContext.getDefaultDatabaseName(), outerTableBinderContexts, statementBinderContext.getExternalTableBinderContexts()); SubquerySegment boundedSubquerySegment = new SubquerySegment(segment.getSubquery().getStartIndex(), segment.getSubquery().getStopIndex(), boundedSelect); boundedSubquerySegment.setSubqueryType(segment.getSubquery().getSubqueryType()); SubqueryTableSegment result = new SubqueryTableSegment(boundedSubquerySegment); segment.getAliasSegment().ifPresent(result::setAlias); IdentifierValue subqueryTableName = segment.getAliasSegment().map(AliasSegment::getIdentifier).orElseGet(() -> new IdentifierValue("")); - tableBinderContexts.put(subqueryTableName.getValue(), new TableSegmentBinderContext(createSubqueryProjections(boundedSelect.getProjections().getProjections(), subqueryTableName))); + tableBinderContexts.put(subqueryTableName.getValue().toLowerCase(), + new SimpleTableSegmentBinderContext(createSubqueryProjections(boundedSelect.getProjections().getProjections(), subqueryTableName))); return result; } + private static void fillPivotColumnNamesInBinderContext(final SubqueryTableSegment segment, final SQLStatementBinderContext statementBinderContext) { + segment.getPivot().ifPresent(optional -> optional.getPivotColumns().forEach(each -> statementBinderContext.getPivotColumnNames().add(each.getIdentifier().getValue().toLowerCase()))); + } + private static Collection createSubqueryProjections(final Collection projections, final IdentifierValue subqueryTableName) { Collection result = new LinkedList<>(); for (ProjectionSegment each : projections) { @@ -84,10 +93,9 @@ private static ColumnProjectionSegment createColumnProjection(final ColumnProjec if (!Strings.isNullOrEmpty(subqueryTableName.getValue())) { newColumnSegment.setOwner(new OwnerSegment(0, 0, subqueryTableName)); } - newColumnSegment.setOriginalColumn(originalColumn.getColumn().getOriginalColumn()); - newColumnSegment.setOriginalTable(originalColumn.getColumn().getOriginalTable()); - newColumnSegment.setOriginalSchema(originalColumn.getColumn().getOriginalSchema()); - newColumnSegment.setOriginalDatabase(originalColumn.getColumn().getOriginalDatabase()); + newColumnSegment.setColumnBoundedInfo( + new ColumnSegmentBoundedInfo(originalColumn.getColumn().getColumnBoundedInfo().getOriginalDatabase(), originalColumn.getColumn().getColumnBoundedInfo().getOriginalSchema(), + originalColumn.getColumn().getColumnBoundedInfo().getOriginalTable(), originalColumn.getColumn().getColumnBoundedInfo().getOriginalColumn())); ColumnProjectionSegment result = new ColumnProjectionSegment(newColumnSegment); result.setVisible(originalColumn.isVisible()); return result; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/lock/LockSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/lock/LockSegmentBinder.java new file mode 100644 index 0000000000000..c58d2ca8616d9 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/lock/LockSegmentBinder.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.lock; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.ColumnSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.LockSegment; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.Map; + +/** + * Lock segment binder. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class LockSegmentBinder { + + /** + * Bind lock segment with metadata. + * + * @param segment lock segment + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts + * @return bounded lock segment + */ + public static LockSegment bind(final LockSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { + Collection boundedColumns = new LinkedList<>(); + segment.getColumns().forEach(each -> boundedColumns.add(ColumnSegmentBinder.bind(each, SegmentType.LOCK, statementBinderContext, tableBinderContexts, outerTableBinderContexts))); + LockSegment result = new LockSegment(segment.getStartIndex(), segment.getStopIndex()); + result.getTables().addAll(segment.getTables()); + result.getColumns().addAll(boundedColumns); + return result; + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/ProjectionsSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/ProjectionsSegmentBinder.java index 1db1dcf11554a..62cd4b7e67ffc 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/ProjectionsSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/ProjectionsSegmentBinder.java @@ -23,7 +23,7 @@ import org.apache.shardingsphere.infra.binder.segment.projection.impl.ColumnProjectionSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.projection.impl.ShorthandProjectionSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.projection.impl.SubqueryProjectionSegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; @@ -31,6 +31,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.SubqueryProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; +import java.util.LinkedHashMap; import java.util.Map; /** @@ -43,30 +44,34 @@ public final class ProjectionsSegmentBinder { * Bind projections segment with metadata. * * @param segment table segment - * @param metaData meta data - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context * @param boundedTableSegment bounded table segment * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded projections segment */ - public static ProjectionsSegment bind(final ProjectionsSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, final TableSegment boundedTableSegment, - final Map tableBinderContexts) { + public static ProjectionsSegment bind(final ProjectionsSegment segment, final SQLStatementBinderContext statementBinderContext, final TableSegment boundedTableSegment, + final Map tableBinderContexts, final Map outerTableBinderContexts) { ProjectionsSegment result = new ProjectionsSegment(segment.getStartIndex(), segment.getStopIndex()); result.setDistinctRow(segment.isDistinctRow()); - segment.getProjections().forEach(each -> result.getProjections().add(bind(each, metaData, defaultDatabaseName, boundedTableSegment, tableBinderContexts))); + segment.getProjections().forEach(each -> result.getProjections().add(bind(each, statementBinderContext, boundedTableSegment, tableBinderContexts, outerTableBinderContexts))); return result; } - private static ProjectionSegment bind(final ProjectionSegment projectionSegment, final ShardingSphereMetaData metaData, final String defaultDatabaseName, final TableSegment boundedTableSegment, - final Map tableBinderContexts) { + private static ProjectionSegment bind(final ProjectionSegment projectionSegment, final SQLStatementBinderContext statementBinderContext, + final TableSegment boundedTableSegment, final Map tableBinderContexts, + final Map outerTableBinderContexts) { if (projectionSegment instanceof ColumnProjectionSegment) { - return ColumnProjectionSegmentBinder.bind((ColumnProjectionSegment) projectionSegment, tableBinderContexts); + return ColumnProjectionSegmentBinder.bind((ColumnProjectionSegment) projectionSegment, statementBinderContext, tableBinderContexts); } if (projectionSegment instanceof ShorthandProjectionSegment) { return ShorthandProjectionSegmentBinder.bind((ShorthandProjectionSegment) projectionSegment, boundedTableSegment, tableBinderContexts); } if (projectionSegment instanceof SubqueryProjectionSegment) { - return SubqueryProjectionSegmentBinder.bind((SubqueryProjectionSegment) projectionSegment, metaData, defaultDatabaseName); + Map newOuterTableBinderContexts = new LinkedHashMap<>(); + newOuterTableBinderContexts.putAll(outerTableBinderContexts); + newOuterTableBinderContexts.putAll(tableBinderContexts); + return SubqueryProjectionSegmentBinder.bind((SubqueryProjectionSegment) projectionSegment, statementBinderContext, newOuterTableBinderContexts); } // TODO support more ProjectionSegment bind return projectionSegment; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ColumnProjectionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ColumnProjectionSegmentBinder.java index d131993c87b46..9b462e138155c 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ColumnProjectionSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ColumnProjectionSegmentBinder.java @@ -19,11 +19,14 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; import org.apache.shardingsphere.infra.binder.segment.expression.impl.ColumnSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; +import java.util.Collections; import java.util.Map; /** @@ -36,11 +39,13 @@ public final class ColumnProjectionSegmentBinder { * Bind column projection segment with metadata. * * @param segment table segment + * @param statementBinderContext statement binder context * @param tableBinderContexts table binder contexts * @return bounded column projection segment */ - public static ColumnProjectionSegment bind(final ColumnProjectionSegment segment, final Map tableBinderContexts) { - ColumnSegment boundedColumn = ColumnSegmentBinder.bind(segment.getColumn(), tableBinderContexts); + public static ColumnProjectionSegment bind(final ColumnProjectionSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + ColumnSegment boundedColumn = ColumnSegmentBinder.bind(segment.getColumn(), SegmentType.PROJECTION, statementBinderContext, tableBinderContexts, Collections.emptyMap()); ColumnProjectionSegment result = new ColumnProjectionSegment(boundedColumn); segment.getAliasSegment().ifPresent(result::setAlias); result.setVisible(segment.isVisible()); diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinder.java index 0f96d49f80d4b..a6c56c0f47704 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinder.java @@ -59,9 +59,9 @@ public static ShorthandProjectionSegment bind(final ShorthandProjectionSegment s } private static Collection getProjectionSegmentsByTableAliasOrName(final Map tableBinderContexts, final String tableAliasOrName) { - ShardingSpherePreconditions.checkState(tableBinderContexts.containsKey(tableAliasOrName), + ShardingSpherePreconditions.checkState(tableBinderContexts.containsKey(tableAliasOrName.toLowerCase()), () -> new IllegalStateException(String.format("Can not find table binder context by table alias or name %s.", tableAliasOrName))); - return tableBinderContexts.get(tableAliasOrName).getProjectionSegments(); + return tableBinderContexts.get(tableAliasOrName.toLowerCase()).getProjectionSegments(); } private static void expandVisibleColumn(final Collection projectionSegments, final ShorthandProjectionSegment segment) { @@ -78,7 +78,7 @@ private static void bindNoOwnerProjections(final TableSegment boundedTableSegmen String tableAliasOrName = boundedTableSegment.getAliasName().orElseGet(() -> ((SimpleTableSegment) boundedTableSegment).getTableName().getIdentifier().getValue()); expandVisibleColumn(getProjectionSegmentsByTableAliasOrName(tableBinderContexts, tableAliasOrName), segment); } else if (boundedTableSegment instanceof JoinTableSegment) { - expandVisibleColumn(((JoinTableSegment) boundedTableSegment).getJoinTableProjectionSegments(), segment); + expandVisibleColumn(((JoinTableSegment) boundedTableSegment).getDerivedJoinTableProjectionSegments(), segment); } else if (boundedTableSegment instanceof SubqueryTableSegment) { expandVisibleColumn(getProjectionSegmentsByTableAliasOrName(tableBinderContexts, boundedTableSegment.getAliasName().orElse("")), segment); } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/SubqueryProjectionSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/SubqueryProjectionSegmentBinder.java index 47a9431a5745b..b9bce1b6a8e1d 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/SubqueryProjectionSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/SubqueryProjectionSegmentBinder.java @@ -20,10 +20,13 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.binder.segment.expression.impl.SubquerySegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.SubqueryProjectionSegment; +import java.util.Map; + /** * Subquery projection segment binder. */ @@ -34,12 +37,13 @@ public final class SubqueryProjectionSegmentBinder { * Bind subquery projection segment with metadata. * * @param segment subquery projection segment - * @param metaData meta data - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts * @return bounded subquery projection segment */ - public static SubqueryProjectionSegment bind(final SubqueryProjectionSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - SubquerySegment boundedSubquerySegment = SubquerySegmentBinder.bind(segment.getSubquery(), metaData, defaultDatabaseName); + public static SubqueryProjectionSegment bind(final SubqueryProjectionSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts) { + SubquerySegment boundedSubquerySegment = SubquerySegmentBinder.bind(segment.getSubquery(), statementBinderContext, tableBinderContexts); SubqueryProjectionSegment result = new SubqueryProjectionSegment(boundedSubquerySegment, segment.getText()); segment.getAliasSegment().ifPresent(result::setAlias); return result; diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/where/WhereSegmentBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/where/WhereSegmentBinder.java index 1fd7f2fdb48d6..f1e91c24e3f80 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/where/WhereSegmentBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/segment/where/WhereSegmentBinder.java @@ -19,10 +19,14 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; -import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; +import java.util.Map; + /** * Where segment binder. */ @@ -33,11 +37,14 @@ public final class WhereSegmentBinder { * Bind where segment with metadata. * * @param segment where segment - * @param metaData meta data - * @param defaultDatabaseName default database name + * @param statementBinderContext statement binder context + * @param tableBinderContexts table binder contexts + * @param outerTableBinderContexts outer table binder contexts * @return bounded where segment */ - public static WhereSegment bind(final WhereSegment segment, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { - return new WhereSegment(segment.getStartIndex(), segment.getStopIndex(), ExpressionSegmentBinder.bind(segment.getExpr(), metaData, defaultDatabaseName)); + public static WhereSegment bind(final WhereSegment segment, final SQLStatementBinderContext statementBinderContext, + final Map tableBinderContexts, final Map outerTableBinderContexts) { + return new WhereSegment(segment.getStartIndex(), segment.getStopIndex(), + ExpressionSegmentBinder.bind(segment.getExpr(), SegmentType.PREDICATE, statementBinderContext, tableBinderContexts, outerTableBinderContexts)); } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/SQLStatementBinderContext.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/SQLStatementBinderContext.java new file mode 100644 index 0000000000000..34dafe75eb595 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/SQLStatementBinderContext.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; + +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Map; + +/** + * SQL statement binder context. + */ +@RequiredArgsConstructor +@Getter +public final class SQLStatementBinderContext { + + private final ShardingSphereMetaData metaData; + + private final String defaultDatabaseName; + + private final DatabaseType databaseType; + + private final Collection variableNames; + + private final Collection usingColumnNames = new HashSet<>(); + + private final Collection joinTableProjectionSegments = new LinkedList<>(); + + private final Map externalTableBinderContexts = new CaseInsensitiveMap<>(); + + private final Collection pivotColumnNames = new HashSet<>(); +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/DeleteStatementBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/DeleteStatementBinder.java new file mode 100644 index 0000000000000..d77e0443f4a13 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/DeleteStatementBinder.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement.dml; + +import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.where.WhereSegmentBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.DeleteStatementHandler; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Update statement binder. + */ +public final class DeleteStatementBinder implements SQLStatementBinder { + + @Override + public DeleteStatement bind(final DeleteStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { + return bind(sqlStatement, metaData, defaultDatabaseName, Collections.emptyMap()); + } + + @SneakyThrows + private DeleteStatement bind(final DeleteStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map externalTableBinderContexts) { + DeleteStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); + Map tableBinderContexts = new LinkedHashMap<>(); + SQLStatementBinderContext statementBinderContext = new SQLStatementBinderContext(metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), sqlStatement.getVariableNames()); + statementBinderContext.getExternalTableBinderContexts().putAll(externalTableBinderContexts); + TableSegment boundedTableSegment = TableSegmentBinder.bind(sqlStatement.getTable(), statementBinderContext, tableBinderContexts, Collections.emptyMap()); + result.setTable(boundedTableSegment); + sqlStatement.getWhere().ifPresent(optional -> result.setWhere(WhereSegmentBinder.bind(optional, statementBinderContext, tableBinderContexts, Collections.emptyMap()))); + DeleteStatementHandler.getOrderBySegment(sqlStatement).ifPresent(optional -> DeleteStatementHandler.setOrderBySegment(result, optional)); + DeleteStatementHandler.getLimitSegment(sqlStatement).ifPresent(optional -> DeleteStatementHandler.setLimitSegment(result, optional)); + DeleteStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> DeleteStatementHandler.setWithSegment(result, optional)); + DeleteStatementHandler.getOutputSegment(sqlStatement).ifPresent(optional -> DeleteStatementHandler.setOutputSegment(result, optional)); + result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); + result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); + return result; + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/InsertStatementBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/InsertStatementBinder.java index 754ad56505282..efdce0773da4b 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/InsertStatementBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/InsertStatementBinder.java @@ -18,33 +18,72 @@ package org.apache.shardingsphere.infra.binder.statement.dml; import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.binder.segment.column.InsertColumnsSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.expression.impl.SubquerySegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.from.impl.SimpleTableSegmentBinder; import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.InsertStatementHandler; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Optional; + /** * Select statement binder. */ public final class InsertStatementBinder implements SQLStatementBinder { - @SneakyThrows @Override public InsertStatement bind(final InsertStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { + return bind(sqlStatement, metaData, defaultDatabaseName, Collections.emptyMap()); + } + + @SneakyThrows + private InsertStatement bind(final InsertStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map externalTableBinderContexts) { InsertStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); - result.setTable(sqlStatement.getTable()); - sqlStatement.getInsertColumns().ifPresent(result::setInsertColumns); - sqlStatement.getInsertSelect().ifPresent(optional -> result.setInsertSelect(SubquerySegmentBinder.bind(optional, metaData, defaultDatabaseName))); + SQLStatementBinderContext statementBinderContext = new SQLStatementBinderContext(metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), sqlStatement.getVariableNames()); + statementBinderContext.getExternalTableBinderContexts().putAll(externalTableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + Optional.ofNullable(sqlStatement.getTable()).ifPresent(optional -> result.setTable(SimpleTableSegmentBinder.bind(optional, statementBinderContext, tableBinderContexts))); + if (sqlStatement.getInsertColumns().isPresent() && !sqlStatement.getInsertColumns().get().getColumns().isEmpty()) { + result.setInsertColumns(InsertColumnsSegmentBinder.bind(sqlStatement.getInsertColumns().get(), statementBinderContext, tableBinderContexts)); + } else { + sqlStatement.getInsertColumns().ifPresent(result::setInsertColumns); + tableBinderContexts.values().forEach(each -> result.getDerivedInsertColumns().addAll(getVisibleColumns(each.getProjectionSegments()))); + } + sqlStatement.getInsertSelect().ifPresent(optional -> result.setInsertSelect(SubquerySegmentBinder.bind(optional, statementBinderContext, tableBinderContexts))); result.getValues().addAll(sqlStatement.getValues()); InsertStatementHandler.getOnDuplicateKeyColumnsSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setOnDuplicateKeyColumnsSegment(result, optional)); InsertStatementHandler.getSetAssignmentSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setSetAssignmentSegment(result, optional)); InsertStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setWithSegment(result, optional)); InsertStatementHandler.getOutputSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setOutputSegment(result, optional)); - InsertStatementHandler.getInsertMultiTableElementSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setInsertMultiTableElementSegment(result, optional)); + InsertStatementHandler.getMultiTableInsertType(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableInsertType(result, optional)); + InsertStatementHandler.getMultiTableInsertIntoSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableInsertIntoSegment(result, optional)); + InsertStatementHandler.getMultiTableConditionalIntoSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableConditionalIntoSegment(result, optional)); InsertStatementHandler.getReturningSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setReturningSegment(result, optional)); result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); return result; } + + private Collection getVisibleColumns(final Collection projectionSegments) { + Collection result = new LinkedList<>(); + for (ProjectionSegment each : projectionSegments) { + if (each instanceof ColumnProjectionSegment && each.isVisible()) { + result.add(((ColumnProjectionSegment) each).getColumn()); + } + } + return result; + } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/MergeStatementBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/MergeStatementBinder.java new file mode 100644 index 0000000000000..7a38ca7e139fc --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/MergeStatementBinder.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement.dml; + +import lombok.SneakyThrows; +import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.expression.ExpressionSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.expression.impl.ColumnSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.where.WhereSegmentBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.ColumnAssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.MergeStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.InsertStatementHandler; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.UpdateStatementHandler; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +/** + * Merge statement binder. + */ +public final class MergeStatementBinder implements SQLStatementBinder { + + @Override + public MergeStatement bind(final MergeStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { + return bind(sqlStatement, metaData, defaultDatabaseName, Collections.emptyMap()); + } + + @SneakyThrows + private MergeStatement bind(final MergeStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map externalTableBinderContexts) { + MergeStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); + SQLStatementBinderContext statementBinderContext = new SQLStatementBinderContext(metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), sqlStatement.getVariableNames()); + statementBinderContext.getExternalTableBinderContexts().putAll(externalTableBinderContexts); + Map targetTableBinderContexts = new CaseInsensitiveMap<>(); + TableSegment boundedTargetTableSegment = TableSegmentBinder.bind(sqlStatement.getTarget(), statementBinderContext, targetTableBinderContexts, Collections.emptyMap()); + Map sourceTableBinderContexts = new CaseInsensitiveMap<>(); + TableSegment boundedSourceTableSegment = TableSegmentBinder.bind(sqlStatement.getSource(), statementBinderContext, sourceTableBinderContexts, Collections.emptyMap()); + result.setTarget(boundedTargetTableSegment); + result.setSource(boundedSourceTableSegment); + Map tableBinderContexts = new LinkedHashMap<>(); + tableBinderContexts.putAll(sourceTableBinderContexts); + tableBinderContexts.putAll(targetTableBinderContexts); + result.setExpr(ExpressionSegmentBinder.bind(sqlStatement.getExpr(), SegmentType.JOIN_ON, statementBinderContext, tableBinderContexts, Collections.emptyMap())); + result.setInsert(Optional.ofNullable(sqlStatement.getInsert()).map(optional -> bindMergeInsert(optional, + (SimpleTableSegment) boundedTargetTableSegment, statementBinderContext, targetTableBinderContexts, sourceTableBinderContexts)).orElse(null)); + result.setUpdate(Optional.ofNullable(sqlStatement.getUpdate()).map(optional -> bindMergeUpdate(optional, + (SimpleTableSegment) boundedTargetTableSegment, statementBinderContext, targetTableBinderContexts, sourceTableBinderContexts)).orElse(null)); + result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); + result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); + return result; + } + + @SneakyThrows + private InsertStatement bindMergeInsert(final InsertStatement sqlStatement, final SimpleTableSegment tableSegment, final SQLStatementBinderContext statementBinderContext, + final Map targetTableBinderContexts, final Map sourceTableBinderContexts) { + InsertStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); + result.setTable(tableSegment); + sqlStatement.getInsertColumns().ifPresent(result::setInsertColumns); + sqlStatement.getInsertSelect().ifPresent(result::setInsertSelect); + SQLStatementBinderContext insertStatementBinderContext = new SQLStatementBinderContext(statementBinderContext.getMetaData(), statementBinderContext.getDefaultDatabaseName(), + statementBinderContext.getDatabaseType(), statementBinderContext.getVariableNames()); + insertStatementBinderContext.getExternalTableBinderContexts().putAll(statementBinderContext.getExternalTableBinderContexts()); + insertStatementBinderContext.getExternalTableBinderContexts().putAll(sourceTableBinderContexts); + Collection insertValues = new LinkedList<>(); + for (InsertValuesSegment each : sqlStatement.getValues()) { + List values = new LinkedList<>(); + for (ExpressionSegment value : each.getValues()) { + values.add(ExpressionSegmentBinder.bind(value, SegmentType.VALUES, insertStatementBinderContext, targetTableBinderContexts, sourceTableBinderContexts)); + } + insertValues.add(new InsertValuesSegment(each.getStartIndex(), each.getStopIndex(), values)); + } + result.getValues().addAll(insertValues); + InsertStatementHandler.getOnDuplicateKeyColumnsSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setOnDuplicateKeyColumnsSegment(result, optional)); + InsertStatementHandler.getSetAssignmentSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setSetAssignmentSegment(result, optional)); + InsertStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setWithSegment(result, optional)); + InsertStatementHandler.getOutputSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setOutputSegment(result, optional)); + InsertStatementHandler.getMultiTableInsertType(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableInsertType(result, optional)); + InsertStatementHandler.getMultiTableInsertIntoSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableInsertIntoSegment(result, optional)); + InsertStatementHandler.getMultiTableConditionalIntoSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setMultiTableConditionalIntoSegment(result, optional)); + InsertStatementHandler.getReturningSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setReturningSegment(result, optional)); + InsertStatementHandler.getWhereSegment(sqlStatement).ifPresent(optional -> InsertStatementHandler.setWhereSegment(result, + WhereSegmentBinder.bind(optional, insertStatementBinderContext, targetTableBinderContexts, sourceTableBinderContexts))); + result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); + result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); + return result; + } + + @SneakyThrows + private UpdateStatement bindMergeUpdate(final UpdateStatement sqlStatement, final SimpleTableSegment tableSegment, final SQLStatementBinderContext statementBinderContext, + final Map targetTableBinderContexts, final Map sourceTableBinderContexts) { + UpdateStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); + result.setTable(tableSegment); + Collection assignments = new LinkedList<>(); + SQLStatementBinderContext updateStatementBinderContext = new SQLStatementBinderContext(statementBinderContext.getMetaData(), statementBinderContext.getDefaultDatabaseName(), + statementBinderContext.getDatabaseType(), statementBinderContext.getVariableNames()); + updateStatementBinderContext.getExternalTableBinderContexts().putAll(statementBinderContext.getExternalTableBinderContexts()); + updateStatementBinderContext.getExternalTableBinderContexts().putAll(sourceTableBinderContexts); + for (AssignmentSegment each : sqlStatement.getSetAssignment().getAssignments()) { + List columnSegments = new ArrayList<>(each.getColumns().size()); + each.getColumns().forEach(column -> columnSegments.add( + ColumnSegmentBinder.bind(column, SegmentType.SET_ASSIGNMENT, updateStatementBinderContext, targetTableBinderContexts, Collections.emptyMap()))); + ExpressionSegment value = ExpressionSegmentBinder.bind(each.getValue(), SegmentType.SET_ASSIGNMENT, updateStatementBinderContext, targetTableBinderContexts, Collections.emptyMap()); + ColumnAssignmentSegment columnAssignmentSegment = new ColumnAssignmentSegment(each.getStartIndex(), each.getStopIndex(), columnSegments, value); + assignments.add(columnAssignmentSegment); + } + SetAssignmentSegment setAssignmentSegment = new SetAssignmentSegment(sqlStatement.getSetAssignment().getStartIndex(), sqlStatement.getSetAssignment().getStopIndex(), assignments); + result.setSetAssignment(setAssignmentSegment); + sqlStatement.getWhere().ifPresent(optional -> result.setWhere(WhereSegmentBinder.bind(optional, updateStatementBinderContext, targetTableBinderContexts, Collections.emptyMap()))); + UpdateStatementHandler.getOrderBySegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setOrderBySegment(result, optional)); + UpdateStatementHandler.getLimitSegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setLimitSegment(result, optional)); + UpdateStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setWithSegment(result, optional)); + result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); + result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); + return result; + } +} diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/SelectStatementBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/SelectStatementBinder.java index 1b506c119481d..51d43f1829aa0 100644 --- a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/SelectStatementBinder.java +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/SelectStatementBinder.java @@ -18,18 +18,21 @@ package org.apache.shardingsphere.infra.binder.statement.dml; import lombok.SneakyThrows; -import org.apache.commons.collections4.map.CaseInsensitiveMap; import org.apache.shardingsphere.infra.binder.segment.combine.CombineSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.lock.LockSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.projection.ProjectionsSegmentBinder; import org.apache.shardingsphere.infra.binder.segment.where.WhereSegmentBinder; import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.SelectStatementHandler; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; /** @@ -40,19 +43,28 @@ public final class SelectStatementBinder implements SQLStatementBinder outerTableBinderContexts, final Map externalTableBinderContexts) { SelectStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - TableSegment boundedTableSegment = TableSegmentBinder.bind(sqlStatement.getFrom(), metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + SQLStatementBinderContext statementBinderContext = new SQLStatementBinderContext(metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), sqlStatement.getVariableNames()); + statementBinderContext.getExternalTableBinderContexts().putAll(externalTableBinderContexts); + TableSegment boundedTableSegment = TableSegmentBinder.bind(sqlStatement.getFrom(), statementBinderContext, tableBinderContexts, outerTableBinderContexts); result.setFrom(boundedTableSegment); - result.setProjections(ProjectionsSegmentBinder.bind(sqlStatement.getProjections(), metaData, defaultDatabaseName, boundedTableSegment, tableBinderContexts)); + result.setProjections(ProjectionsSegmentBinder.bind(sqlStatement.getProjections(), statementBinderContext, boundedTableSegment, tableBinderContexts, outerTableBinderContexts)); + sqlStatement.getWhere().ifPresent(optional -> result.setWhere(WhereSegmentBinder.bind(optional, statementBinderContext, tableBinderContexts, outerTableBinderContexts))); // TODO support other segment bind in select statement - sqlStatement.getWhere().ifPresent(optional -> result.setWhere(WhereSegmentBinder.bind(optional, metaData, defaultDatabaseName))); sqlStatement.getGroupBy().ifPresent(result::setGroupBy); sqlStatement.getHaving().ifPresent(result::setHaving); sqlStatement.getOrderBy().ifPresent(result::setOrderBy); - sqlStatement.getCombine().ifPresent(optional -> result.setCombine(CombineSegmentBinder.bind(optional, metaData, defaultDatabaseName))); + sqlStatement.getCombine().ifPresent(optional -> result.setCombine(CombineSegmentBinder.bind(optional, statementBinderContext))); SelectStatementHandler.getLimitSegment(sqlStatement).ifPresent(optional -> SelectStatementHandler.setLimitSegment(result, optional)); - SelectStatementHandler.getLockSegment(sqlStatement).ifPresent(optional -> SelectStatementHandler.setLockSegment(result, optional)); + SelectStatementHandler.getLockSegment(sqlStatement) + .ifPresent(optional -> SelectStatementHandler.setLockSegment(result, LockSegmentBinder.bind(optional, statementBinderContext, tableBinderContexts, outerTableBinderContexts))); SelectStatementHandler.getWindowSegment(sqlStatement).ifPresent(optional -> SelectStatementHandler.setWindowSegment(result, optional)); SelectStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> SelectStatementHandler.setWithSegment(result, optional)); SelectStatementHandler.getModelSegment(sqlStatement).ifPresent(optional -> SelectStatementHandler.setModelSegment(result, optional)); @@ -60,4 +72,34 @@ public SelectStatement bind(final SelectStatement sqlStatement, final ShardingSp result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); return result; } + + /** + * Bind correlate subquery select statement. + * + * @param sqlStatement subquery select statement + * @param metaData meta data + * @param defaultDatabaseName default database name + * @param outerTableBinderContexts outer select statement table binder contexts + * @param externalTableBinderContexts external table binder contexts + * @return bounded correlate subquery select statement + */ + @SneakyThrows + public SelectStatement bindCorrelateSubquery(final SelectStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map outerTableBinderContexts, final Map externalTableBinderContexts) { + return bind(sqlStatement, metaData, defaultDatabaseName, outerTableBinderContexts, externalTableBinderContexts); + } + + /** + * Bind with external table contexts. + * + * @param statement select statement + * @param metaData meta data + * @param defaultDatabaseName default database name + * @param externalTableContexts external table contexts + * @return select statement + */ + public SelectStatement bindWithExternalTableContexts(final SelectStatement statement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map externalTableContexts) { + return bind(statement, metaData, defaultDatabaseName, Collections.emptyMap(), externalTableContexts); + } } diff --git a/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/UpdateStatementBinder.java b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/UpdateStatementBinder.java new file mode 100644 index 0000000000000..c2f6ad07a9d24 --- /dev/null +++ b/infra/binder/src/main/java/org/apache/shardingsphere/infra/binder/statement/dml/UpdateStatementBinder.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement.dml; + +import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinder; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.where.WhereSegmentBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinder; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.UpdateStatementHandler; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * Update statement binder. + */ +public final class UpdateStatementBinder implements SQLStatementBinder { + + @Override + public UpdateStatement bind(final UpdateStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName) { + return bind(sqlStatement, metaData, defaultDatabaseName, Collections.emptyMap()); + } + + @SneakyThrows + private UpdateStatement bind(final UpdateStatement sqlStatement, final ShardingSphereMetaData metaData, final String defaultDatabaseName, + final Map externalTableBinderContexts) { + UpdateStatement result = sqlStatement.getClass().getDeclaredConstructor().newInstance(); + Map tableBinderContexts = new LinkedHashMap<>(); + SQLStatementBinderContext statementBinderContext = new SQLStatementBinderContext(metaData, defaultDatabaseName, sqlStatement.getDatabaseType(), sqlStatement.getVariableNames()); + statementBinderContext.getExternalTableBinderContexts().putAll(externalTableBinderContexts); + TableSegment boundedTableSegment = TableSegmentBinder.bind(sqlStatement.getTable(), statementBinderContext, tableBinderContexts, Collections.emptyMap()); + result.setTable(boundedTableSegment); + result.setSetAssignment(sqlStatement.getSetAssignment()); + sqlStatement.getWhere().ifPresent(optional -> result.setWhere(WhereSegmentBinder.bind(optional, statementBinderContext, tableBinderContexts, Collections.emptyMap()))); + UpdateStatementHandler.getOrderBySegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setOrderBySegment(result, optional)); + UpdateStatementHandler.getLimitSegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setLimitSegment(result, optional)); + UpdateStatementHandler.getWithSegment(sqlStatement).ifPresent(optional -> UpdateStatementHandler.setWithSegment(result, optional)); + result.addParameterMarkerSegments(sqlStatement.getParameterMarkerSegments()); + result.getCommentSegments().addAll(sqlStatement.getCommentSegments()); + return result; + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactoryTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactoryTest.java index 30b8a396cca13..2ff863375cc83 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactoryTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/SQLStatementContextFactoryTest.java @@ -150,7 +150,12 @@ void assertNewInstanceForFetchStatement() { } private ShardingSphereMetaData mockMetaData() { - Map databases = Collections.singletonMap(DefaultDatabase.LOGIC_NAME, mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS)); + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); + when(database.containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(database.containsSchema("public")).thenReturn(true); + when(database.getSchema(DefaultDatabase.LOGIC_NAME).containsTable("tbl")).thenReturn(true); + when(database.getSchema("public").containsTable("tbl")).thenReturn(true); + Map databases = Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database); return new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), mock(ConfigurationProperties.class)); } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowColumnsStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowColumnsStatementContextTest.java index e86c5a7546a74..78d60e511e23b 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowColumnsStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowColumnsStatementContextTest.java @@ -40,16 +40,16 @@ class ShowColumnsStatementContextTest { @Test void assertNewInstance() { - MySQLShowColumnsStatement mySQLShowColumnsStatement = mock(MySQLShowColumnsStatement.class); + MySQLShowColumnsStatement showColumnsStatement = mock(MySQLShowColumnsStatement.class); String tableName = "tbl_1"; String databaseName = "sharding_db"; SimpleTableSegment table = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue(tableName))); FromSchemaSegment fromSchema = new FromSchemaSegment(0, 0, new DatabaseSegment(0, 0, new IdentifierValue(databaseName))); - when(mySQLShowColumnsStatement.getTable()).thenReturn(table); - when(mySQLShowColumnsStatement.getFromSchema()).thenReturn(Optional.of(fromSchema)); - ShowColumnsStatementContext actual = new ShowColumnsStatementContext(mySQLShowColumnsStatement); + when(showColumnsStatement.getTable()).thenReturn(table); + when(showColumnsStatement.getFromSchema()).thenReturn(Optional.of(fromSchema)); + ShowColumnsStatementContext actual = new ShowColumnsStatementContext(showColumnsStatement); assertThat(actual, instanceOf(CommonSQLStatementContext.class)); - assertThat(actual.getSqlStatement(), is(mySQLShowColumnsStatement)); + assertThat(actual.getSqlStatement(), is(showColumnsStatement)); assertThat(actual.getAllTables().stream().map(each -> each.getTableName().getIdentifier().getValue()).collect(Collectors.toList()), is(Collections.singletonList(tableName))); assertThat(actual.getRemoveSegments(), is(Collections.singletonList(fromSchema))); } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowCreateTableStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowCreateTableStatementContextTest.java index 1895764666b7b..29e57435f7ee3 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowCreateTableStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowCreateTableStatementContextTest.java @@ -37,12 +37,12 @@ class ShowCreateTableStatementContextTest { @Test void assertNewInstance() { - MySQLShowCreateTableStatement mySQLShowCreateTableStatement = mock(MySQLShowCreateTableStatement.class); + MySQLShowCreateTableStatement showCreateTableStatement = mock(MySQLShowCreateTableStatement.class); SimpleTableSegment table = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("tbl_1"))); - when(mySQLShowCreateTableStatement.getTable()).thenReturn(table); - ShowCreateTableStatementContext actual = new ShowCreateTableStatementContext(mySQLShowCreateTableStatement); + when(showCreateTableStatement.getTable()).thenReturn(table); + ShowCreateTableStatementContext actual = new ShowCreateTableStatementContext(showCreateTableStatement); assertThat(actual, instanceOf(CommonSQLStatementContext.class)); - assertThat(actual.getSqlStatement(), is(mySQLShowCreateTableStatement)); + assertThat(actual.getSqlStatement(), is(showCreateTableStatement)); assertThat(actual.getAllTables().stream().map(each -> each.getTableName().getIdentifier().getValue()).collect(Collectors.toList()), is(Collections.singletonList("tbl_1"))); } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowIndexStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowIndexStatementContextTest.java index 00a0062257b6f..c8a259ab79bee 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowIndexStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dal/ShowIndexStatementContextTest.java @@ -37,12 +37,12 @@ class ShowIndexStatementContextTest { @Test void assertNewInstance() { - MySQLShowIndexStatement mySQLShowIndexStatement = mock(MySQLShowIndexStatement.class); + MySQLShowIndexStatement showIndexStatement = mock(MySQLShowIndexStatement.class); SimpleTableSegment table = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("tbl_1"))); - when(mySQLShowIndexStatement.getTable()).thenReturn(table); - ShowIndexStatementContext actual = new ShowIndexStatementContext(mySQLShowIndexStatement); + when(showIndexStatement.getTable()).thenReturn(table); + ShowIndexStatementContext actual = new ShowIndexStatementContext(showIndexStatement); assertThat(actual, instanceOf(CommonSQLStatementContext.class)); - assertThat(actual.getSqlStatement(), is(mySQLShowIndexStatement)); + assertThat(actual.getSqlStatement(), is(showIndexStatement)); assertThat(actual.getAllTables().stream().map(each -> each.getTableName().getIdentifier().getValue()).collect(Collectors.toList()), is(Collections.singletonList("tbl_1"))); } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/AlterViewStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/AlterViewStatementContextTest.java index 415c304fa7c75..64cd0c568e62d 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/AlterViewStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/AlterViewStatementContextTest.java @@ -50,18 +50,18 @@ void setUp() { void assertMySQLNewInstance() { SelectStatement select = mock(MySQLSelectStatement.class); when(select.getFrom()).thenReturn(view); - MySQLAlterViewStatement mySQLAlterViewStatement = mock(MySQLAlterViewStatement.class); - when(mySQLAlterViewStatement.getView()).thenReturn(view); - when(mySQLAlterViewStatement.getSelect()).thenReturn(select); - assertNewInstance(mySQLAlterViewStatement); + MySQLAlterViewStatement alterViewStatement = mock(MySQLAlterViewStatement.class); + when(alterViewStatement.getView()).thenReturn(view); + when(alterViewStatement.getSelect()).thenReturn(select); + assertNewInstance(alterViewStatement); } @Test void assertPostgreSQLNewInstance() { - PostgreSQLAlterViewStatement postgreSQLAlterViewStatement = mock(PostgreSQLAlterViewStatement.class); - when(postgreSQLAlterViewStatement.getView()).thenReturn(view); - when(postgreSQLAlterViewStatement.getRenameView()).thenReturn(Optional.of(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("view"))))); - assertNewInstance(postgreSQLAlterViewStatement); + PostgreSQLAlterViewStatement alterViewStatement = mock(PostgreSQLAlterViewStatement.class); + when(alterViewStatement.getView()).thenReturn(view); + when(alterViewStatement.getRenameView()).thenReturn(Optional.of(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("view"))))); + assertNewInstance(alterViewStatement); } private void assertNewInstance(final AlterViewStatement alterViewStatement) { diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/PrepareStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/PrepareStatementContextTest.java index 8285b17be3e2c..1ea14db3efc8c 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/PrepareStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/ddl/PrepareStatementContextTest.java @@ -61,41 +61,41 @@ void setUp() { @Test void assertNewInstance() { - PostgreSQLPrepareStatement postgreSQLPrepare = mock(PostgreSQLPrepareStatement.class); - when(postgreSQLPrepare.getSelect()).thenReturn(Optional.of(getSelect())); - when(postgreSQLPrepare.getInsert()).thenReturn(Optional.of(getInsert())); - when(postgreSQLPrepare.getUpdate()).thenReturn(Optional.of(getUpdate())); - when(postgreSQLPrepare.getDelete()).thenReturn(Optional.of(getDelete())); - PrepareStatementContext actual = new PrepareStatementContext(postgreSQLPrepare); + PostgreSQLPrepareStatement prepareStatement = mock(PostgreSQLPrepareStatement.class); + when(prepareStatement.getSelect()).thenReturn(Optional.of(getSelect())); + when(prepareStatement.getInsert()).thenReturn(Optional.of(getInsert())); + when(prepareStatement.getUpdate()).thenReturn(Optional.of(getUpdate())); + when(prepareStatement.getDelete()).thenReturn(Optional.of(getDelete())); + PrepareStatementContext actual = new PrepareStatementContext(prepareStatement); assertThat(actual, instanceOf(CommonSQLStatementContext.class)); - assertThat(actual.getSqlStatement(), is(postgreSQLPrepare)); + assertThat(actual.getSqlStatement(), is(prepareStatement)); assertThat(actual.getAllTables().stream().map(each -> each.getTableName().getIdentifier().getValue()).collect(Collectors.toList()), is(Arrays.asList("tbl_1", "tbl_1", "tbl_1", "tbl_1"))); } private SelectStatement getSelect() { - SelectStatement select = new PostgreSQLSelectStatement(); - select.setFrom(table); - return select; + SelectStatement result = new PostgreSQLSelectStatement(); + result.setFrom(table); + return result; } private InsertStatement getInsert() { - InsertStatement insert = new PostgreSQLInsertStatement(); - insert.setTable(table); - return insert; + InsertStatement result = new PostgreSQLInsertStatement(); + result.setTable(table); + return result; } private UpdateStatement getUpdate() { - UpdateStatement update = new PostgreSQLUpdateStatement(); - update.setTable(table); + UpdateStatement result = new PostgreSQLUpdateStatement(); + result.setTable(table); SetAssignmentSegment setAssignmentSegment = new SetAssignmentSegment(0, 0, Collections.singletonList(new ColumnAssignmentSegment(0, 0, Collections.singletonList(column), column))); - update.setSetAssignment(setAssignmentSegment); - return update; + result.setSetAssignment(setAssignmentSegment); + return result; } private DeleteStatement getDelete() { - DeleteStatement delete = new PostgreSQLDeleteStatement(); - delete.setTable(table); - return delete; + DeleteStatement result = new PostgreSQLDeleteStatement(); + result.setTable(table); + return result; } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContextTest.java index 71787a68a5b63..0d34002d2ec96 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/InsertStatementContextTest.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.sql.parser.sql.common.enums.ParameterMarkerType; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.ColumnAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; @@ -161,15 +162,16 @@ void assertGetGroupedParametersWithOnDuplicateParameters() { void assertInsertSelect() { InsertStatement insertStatement = new MySQLInsertStatement(); SelectStatement selectStatement = new MySQLSelectStatement(); + selectStatement.addParameterMarkerSegments(Collections.singleton(new ParameterMarkerExpressionSegment(0, 0, 0, ParameterMarkerType.QUESTION))); selectStatement.setProjections(new ProjectionsSegment(0, 0)); SubquerySegment insertSelect = new SubquerySegment(0, 0, selectStatement); insertStatement.setInsertSelect(insertSelect); insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("tbl")))); InsertStatementContext actual = createInsertStatementContext(Collections.singletonList("param"), insertStatement); actual.setUpParameters(Collections.singletonList("param")); - assertThat(actual.getInsertSelectContext().getParameterCount(), is(0)); + assertThat(actual.getInsertSelectContext().getParameterCount(), is(1)); assertThat(actual.getGroupedParameters().size(), is(1)); - assertThat(actual.getGroupedParameters().iterator().next(), is(Collections.emptyList())); + assertThat(actual.getGroupedParameters().iterator().next(), is(Collections.singletonList("param"))); } private void setUpInsertValues(final InsertStatement insertStatement) { diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContextTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContextTest.java index ae2e8cb3ff11e..051f9c819f218 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContextTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/context/statement/dml/SelectStatementContextTest.java @@ -51,6 +51,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; @@ -626,4 +627,24 @@ void assertIsContainsEnhancedTable() { SelectStatementContext actual = new SelectStatementContext(metaData, Collections.emptyList(), selectStatement, DefaultDatabase.LOGIC_NAME); assertTrue(actual.isContainsEnhancedTable()); } + + @Test + void assertContainsEnhancedTable() { + SelectStatement selectStatement = new MySQLSelectStatement(); + selectStatement.setProjections(new ProjectionsSegment(0, 0)); + selectStatement.setFrom(new SubqueryTableSegment(new SubquerySegment(0, 0, createSubSelectStatement()))); + ShardingSphereMetaData metaData = new ShardingSphereMetaData(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, mockDatabase()), mock(ResourceMetaData.class), + mock(RuleMetaData.class), mock(ConfigurationProperties.class)); + SelectStatementContext actual = new SelectStatementContext(metaData, Collections.emptyList(), selectStatement, DefaultDatabase.LOGIC_NAME); + assertTrue(actual.containsTableSubquery()); + } + + private SelectStatement createSubSelectStatement() { + ProjectionsSegment projectionsSegment = new ProjectionsSegment(0, 0); + projectionsSegment.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id")))); + SelectStatement result = new MySQLSelectStatement(); + result.setProjections(projectionsSegment); + result.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + return result; + } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinderTest.java new file mode 100644 index 0000000000000..66119adb2f182 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/expression/impl/ColumnSegmentBinderTest.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.expression.impl; + +import org.apache.shardingsphere.infra.binder.enums.SegmentType; +import org.apache.shardingsphere.infra.binder.segment.from.SimpleTableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.mock; + +class ColumnSegmentBinderTest { + + @Test + void assertBindWithMultiTablesJoinAndNoOwner() { + Map tableBinderContexts = new LinkedHashMap<>(); + ColumnSegment boundedOrderIdColumn = new ColumnSegment(0, 0, new IdentifierValue("order_id")); + boundedOrderIdColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), + new IdentifierValue("t_order"), new IdentifierValue("order_id"))); + tableBinderContexts.put("t_order", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundedOrderIdColumn)))); + ColumnSegment boundedItemIdColumn = new ColumnSegment(0, 0, new IdentifierValue("item_id")); + boundedItemIdColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), + new IdentifierValue("t_order_item"), new IdentifierValue("item_id"))); + tableBinderContexts.put("t_order_item", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundedItemIdColumn)))); + ColumnSegment columnSegment = new ColumnSegment(0, 0, new IdentifierValue("order_id")); + SQLStatementBinderContext statementBinderContext = + new SQLStatementBinderContext(mock(ShardingSphereMetaData.class), DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), Collections.emptySet()); + ColumnSegment actual = ColumnSegmentBinder.bind(columnSegment, SegmentType.JOIN_ON, statementBinderContext, tableBinderContexts, Collections.emptyMap()); + assertNotNull(actual.getColumnBoundedInfo()); + assertNull(actual.getOtherUsingColumnBoundedInfo()); + assertThat(actual.getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(actual.getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + } + + @Test + void assertBindFromOuterTable() { + Map outerTableBinderContexts = new LinkedHashMap<>(); + ColumnSegment boundedOrderStatusColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + boundedOrderStatusColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), + new IdentifierValue("t_order"), new IdentifierValue("status"))); + outerTableBinderContexts.put("t_order", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundedOrderStatusColumn)))); + ColumnSegment boundedOrderItemStatusColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + boundedOrderItemStatusColumn.setColumnBoundedInfo(new ColumnSegmentBoundedInfo(new IdentifierValue(DefaultDatabase.LOGIC_NAME), new IdentifierValue(DefaultDatabase.LOGIC_NAME), + new IdentifierValue("t_order_item"), new IdentifierValue("status"))); + outerTableBinderContexts.put("t_order_item", new SimpleTableSegmentBinderContext(Collections.singleton(new ColumnProjectionSegment(boundedOrderItemStatusColumn)))); + SQLStatementBinderContext statementBinderContext = + new SQLStatementBinderContext(mock(ShardingSphereMetaData.class), DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), Collections.emptySet()); + ColumnSegment columnSegment = new ColumnSegment(0, 0, new IdentifierValue("status")); + ColumnSegment actual = ColumnSegmentBinder.bind(columnSegment, SegmentType.PROJECTION, statementBinderContext, Collections.emptyMap(), outerTableBinderContexts); + assertNotNull(actual.getColumnBoundedInfo()); + assertNull(actual.getOtherUsingColumnBoundedInfo()); + assertThat(actual.getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(actual.getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinderTest.java index cfb5a60131aa3..e84da4dd94bea 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinderTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/JoinTableSegmentBinderTest.java @@ -17,8 +17,8 @@ package org.apache.shardingsphere.infra.binder.segment.from.impl; -import org.apache.commons.collections4.map.CaseInsensitiveMap; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -40,6 +40,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -64,15 +66,16 @@ void assertBindWithAlias() { when(joinTableSegment.getLeft()).thenReturn(leftTable); when(joinTableSegment.getRight()).thenReturn(rightTable); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getLeft() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(actual.getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertJoinTableProjectionSegments(actual.getJoinTableProjectionSegments()); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertJoinTableProjectionSegments(actual.getDerivedJoinTableProjectionSegments()); assertTrue(tableBinderContexts.containsKey("o")); assertTrue(tableBinderContexts.containsKey("i")); } @@ -81,20 +84,20 @@ private void assertJoinTableProjectionSegments(final Collection actual = new ArrayList<>(joinTableProjectionSegments); assertThat(actual.size(), is(7)); assertTrue(actual.get(0) instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalColumn().getValue(), is("order_id")); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalColumn().getValue(), is("status")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalColumn().getValue(), is("item_id")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalTable().getValue(), is("t_order_item")); - assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getOriginalColumn().getValue(), is("order_id")); - assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getOriginalTable().getValue(), is("t_order_item")); - assertThat(((ColumnProjectionSegment) actual.get(5)).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertThat(((ColumnProjectionSegment) actual.get(5)).getColumn().getOriginalTable().getValue(), is("t_order_item")); - assertThat(((ColumnProjectionSegment) actual.get(6)).getColumn().getOriginalColumn().getValue(), is("status")); - assertThat(((ColumnProjectionSegment) actual.get(6)).getColumn().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("item_id")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(5)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + assertThat(((ColumnProjectionSegment) actual.get(5)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(6)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + assertThat(((ColumnProjectionSegment) actual.get(6)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); } @Test @@ -105,15 +108,16 @@ void assertBindWithoutAlias() { when(joinTableSegment.getLeft()).thenReturn(leftTable); when(joinTableSegment.getRight()).thenReturn(rightTable); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getLeft() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(actual.getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertJoinTableProjectionSegments(actual.getJoinTableProjectionSegments()); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertJoinTableProjectionSegments(actual.getDerivedJoinTableProjectionSegments()); assertTrue(tableBinderContexts.containsKey("t_order")); assertTrue(tableBinderContexts.containsKey("t_order_item")); } @@ -130,15 +134,16 @@ void assertBindWithNaturalJoin() { when(joinTableSegment.isNatural()).thenReturn(true); when(joinTableSegment.getJoinType()).thenReturn(JoinType.RIGHT.name()); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getLeft() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(actual.getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertJoinTableProjectionSegmentsWithNaturalJoin(actual.getJoinTableProjectionSegments()); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertJoinTableProjectionSegmentsWithNaturalJoin(actual.getDerivedJoinTableProjectionSegments()); assertTrue(tableBinderContexts.containsKey("o")); assertTrue(tableBinderContexts.containsKey("i")); } @@ -147,14 +152,14 @@ private void assertJoinTableProjectionSegmentsWithNaturalJoin(final Collection

actual = new ArrayList<>(joinTableProjectionSegments); assertThat(actual.size(), is(4)); assertTrue(actual.get(0) instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalColumn().getValue(), is("order_id")); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalColumn().getValue(), is("status")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalColumn().getValue(), is("item_id")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("item_id")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); } @Test @@ -169,15 +174,16 @@ void assertBindWithJoinUsing() { when(joinTableSegment.getJoinType()).thenReturn(JoinType.RIGHT.name()); when(joinTableSegment.getUsing()).thenReturn(Arrays.asList(new ColumnSegment(0, 0, new IdentifierValue("status")), new ColumnSegment(0, 0, new IdentifierValue("order_id")))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getLeft() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getLeft()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(actual.getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertJoinTableProjectionSegmentsWithUsing(actual.getJoinTableProjectionSegments()); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertJoinTableProjectionSegmentsWithUsing(actual.getDerivedJoinTableProjectionSegments()); assertTrue(tableBinderContexts.containsKey("o")); assertTrue(tableBinderContexts.containsKey("i")); } @@ -186,16 +192,16 @@ private void assertJoinTableProjectionSegmentsWithUsing(final Collection actual = new ArrayList<>(joinTableProjectionSegments); assertThat(actual.size(), is(5)); assertTrue(actual.get(0) instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalColumn().getValue(), is("status")); - assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalColumn().getValue(), is("order_id")); - assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalColumn().getValue(), is("item_id")); - assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getOriginalTable().getValue(), is("t_order_item")); - assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + assertThat(((ColumnProjectionSegment) actual.get(0)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + assertThat(((ColumnProjectionSegment) actual.get(1)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + assertThat(((ColumnProjectionSegment) actual.get(2)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("item_id")); + assertThat(((ColumnProjectionSegment) actual.get(3)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + assertThat(((ColumnProjectionSegment) actual.get(4)).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); } @Test @@ -207,19 +213,20 @@ void assertBindWithMultiTableJoin() { when(joinTableSegment.getLeft()).thenReturn(leftTable); when(joinTableSegment.getRight()).thenReturn(rightTable); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + JoinTableSegment actual = JoinTableSegmentBinder.bind(joinTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getLeft() instanceof JoinTableSegment); assertTrue(((JoinTableSegment) actual.getLeft()).getLeft() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getLeft()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getLeft()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getLeft()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getLeft()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(((JoinTableSegment) actual.getLeft()).getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) ((JoinTableSegment) actual.getLeft()).getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(actual.getRight() instanceof SimpleTableSegment); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(actual.getJoinTableProjectionSegments().size(), is(10)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((SimpleTableSegment) actual.getRight()).getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getDerivedJoinTableProjectionSegments().size(), is(10)); assertTrue(tableBinderContexts.containsKey("o")); assertTrue(tableBinderContexts.containsKey("o2")); assertTrue(tableBinderContexts.containsKey("i")); @@ -249,6 +256,10 @@ private ShardingSphereMetaData createMetaData() { new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order_item")).thenReturn(true); return result; } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinderTest.java index 38b3304c5c930..25f6a270ff991 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinderTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SimpleTableSegmentBinderTest.java @@ -17,10 +17,11 @@ package org.apache.shardingsphere.infra.binder.segment.from.impl; -import org.apache.commons.collections4.map.CaseInsensitiveMap; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.TableNotExistsException; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -34,10 +35,13 @@ import java.sql.Types; import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; @@ -51,33 +55,43 @@ class SimpleTableSegmentBinderTest { void assertBind() { SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 10, new IdentifierValue("t_order"))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SimpleTableSegment actual = SimpleTableSegmentBinder.bind(simpleTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); - assertThat(actual.getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(actual.getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + Map tableBinderContexts = new LinkedHashMap<>(); + SimpleTableSegment actual = + SimpleTableSegmentBinder.bind(simpleTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), tableBinderContexts); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); assertTrue(tableBinderContexts.containsKey("t_order")); assertThat(tableBinderContexts.get("t_order").getProjectionSegments().size(), is(3)); - assertTrue(tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("user_id") instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("user_id")).getColumn().getOriginalDatabase().getValue(), + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").isPresent()); + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").get() instanceof ColumnProjectionSegment); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").get()).getColumn().getColumnBoundedInfo().getOriginalDatabase() + .getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").get()).getColumn().getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("user_id")).getColumn().getOriginalSchema().getValue(), + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").get()).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), + is("t_order")); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("user_id").get()).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), + is("user_id")); + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").isPresent()); + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").get() instanceof ColumnProjectionSegment); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").get()).getColumn().getColumnBoundedInfo().getOriginalDatabase() + .getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").get()).getColumn().getColumnBoundedInfo().getOriginalSchema() + .getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").get()).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), + is("t_order")); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("order_id").get()).getColumn().getColumnBoundedInfo().getOriginalColumn() + .getValue(), is("order_id")); + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").isPresent()); + assertTrue(tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").get() instanceof ColumnProjectionSegment); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").get()).getColumn().getColumnBoundedInfo().getOriginalDatabase() + .getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").get()).getColumn().getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("user_id")).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("user_id")).getColumn().getOriginalColumn().getValue(), is("user_id")); - assertTrue(tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("order_id") instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("order_id")).getColumn().getOriginalDatabase().getValue(), - is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("order_id")).getColumn().getOriginalSchema().getValue(), - is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("order_id")).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("order_id")).getColumn().getOriginalColumn().getValue(), is("order_id")); - assertTrue(tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("status") instanceof ColumnProjectionSegment); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("status")).getColumn().getOriginalDatabase().getValue(), - is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("status")).getColumn().getOriginalSchema().getValue(), - is(DefaultDatabase.LOGIC_NAME)); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("status")).getColumn().getOriginalTable().getValue(), is("t_order")); - assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").getProjectionSegmentByColumnLabel("status")).getColumn().getOriginalColumn().getValue(), is("status")); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").get()).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), + is("t_order")); + assertThat(((ColumnProjectionSegment) tableBinderContexts.get("t_order").findProjectionSegmentByColumnLabel("status").get()).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), + is("status")); } @Test @@ -85,20 +99,22 @@ void assertBindWithSchemaForMySQL() { SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 10, new IdentifierValue("t_order"))); simpleTableSegment.setOwner(new OwnerSegment(0, 0, new IdentifierValue("sharding_db"))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SimpleTableSegment actual = SimpleTableSegmentBinder.bind(simpleTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); - assertThat(actual.getTableName().getOriginalDatabase().getValue(), is("sharding_db")); - assertThat(actual.getTableName().getOriginalSchema().getValue(), is("sharding_db")); + Map tableBinderContexts = new LinkedHashMap<>(); + SimpleTableSegment actual = + SimpleTableSegmentBinder.bind(simpleTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), tableBinderContexts); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is("sharding_db")); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is("sharding_db")); } @Test void assertBindWithoutSchemaForMySQL() { SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 10, new IdentifierValue("t_order"))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SimpleTableSegment actual = SimpleTableSegmentBinder.bind(simpleTableSegment, metaData, DefaultDatabase.LOGIC_NAME, databaseType, tableBinderContexts); - assertThat(actual.getTableName().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); - assertThat(actual.getTableName().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + Map tableBinderContexts = new LinkedHashMap<>(); + SimpleTableSegment actual = + SimpleTableSegmentBinder.bind(simpleTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), tableBinderContexts); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(actual.getTableName().getTableBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); } private ShardingSphereMetaData createMetaData() { @@ -115,6 +131,22 @@ private ShardingSphereMetaData createMetaData() { when(result.getDatabase("sharding_db").getSchema("sharding_db")).thenReturn(schema); when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema("public")).thenReturn(schema); when(result.getDatabase("sharding_db").getSchema("test")).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + when(result.containsDatabase("sharding_db")).thenReturn(true); + when(result.getDatabase("sharding_db").containsSchema("sharding_db")).thenReturn(true); + when(result.getDatabase("sharding_db").getSchema("sharding_db").containsTable("t_order")).thenReturn(true); return result; } + + @Test + void assertBindTableNotExists() { + SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 10, new IdentifierValue("t_not_exists"))); + ShardingSphereMetaData metaData = createMetaData(); + Map tableBinderContexts = new LinkedHashMap<>(); + assertThrows(TableNotExistsException.class, + () -> SimpleTableSegmentBinder.bind(simpleTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts)); + } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinderTest.java index cb40d44ee8ca0..96620bca69312 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinderTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/from/impl/SubqueryTableSegmentBinderTest.java @@ -17,8 +17,8 @@ package org.apache.shardingsphere.infra.binder.segment.from.impl; -import org.apache.commons.collections4.map.CaseInsensitiveMap; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; +import org.apache.shardingsphere.infra.binder.statement.SQLStatementBinderContext; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -42,6 +42,8 @@ import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -68,8 +70,9 @@ void assertBindWithSubqueryTableAlias() { SubqueryTableSegment subqueryTableSegment = new SubqueryTableSegment(new SubquerySegment(0, 0, selectStatement)); subqueryTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("temp"))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, metaData, DefaultDatabase.LOGIC_NAME, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getAlias().isPresent()); assertTrue(tableBinderContexts.containsKey("temp")); List projectionSegments = new ArrayList<>(tableBinderContexts.get("temp").getProjectionSegments()); @@ -101,8 +104,9 @@ void assertBindWithSubqueryProjectionAlias() { SubqueryTableSegment subqueryTableSegment = new SubqueryTableSegment(new SubquerySegment(0, 0, selectStatement)); subqueryTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("temp"))); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, metaData, DefaultDatabase.LOGIC_NAME, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertTrue(actual.getAlias().isPresent()); assertTrue(tableBinderContexts.containsKey("temp")); List projectionSegments = new ArrayList<>(tableBinderContexts.get("temp").getProjectionSegments()); @@ -123,8 +127,9 @@ void assertBindWithoutSubqueryTableAlias() { when(selectStatement.getProjections()).thenReturn(projectionsSegment); SubqueryTableSegment subqueryTableSegment = new SubqueryTableSegment(new SubquerySegment(0, 0, selectStatement)); ShardingSphereMetaData metaData = createMetaData(); - Map tableBinderContexts = new CaseInsensitiveMap<>(); - SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, metaData, DefaultDatabase.LOGIC_NAME, tableBinderContexts); + Map tableBinderContexts = new LinkedHashMap<>(); + SubqueryTableSegment actual = SubqueryTableSegmentBinder.bind(subqueryTableSegment, new SQLStatementBinderContext(metaData, DefaultDatabase.LOGIC_NAME, databaseType, Collections.emptySet()), + tableBinderContexts, Collections.emptyMap()); assertFalse(actual.getAlias().isPresent()); assertTrue(tableBinderContexts.containsKey("")); } @@ -137,6 +142,9 @@ private ShardingSphereMetaData createMetaData() { new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); return result; } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinderTest.java index ece3353aab642..5a383048215e2 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinderTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/projection/impl/ShorthandProjectionSegmentBinderTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.binder.segment.projection.impl; -import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.shardingsphere.infra.binder.segment.from.SimpleTableSegmentBinderContext; import org.apache.shardingsphere.infra.binder.segment.from.TableSegmentBinderContext; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; @@ -37,6 +37,7 @@ import java.util.Arrays; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.Map; import static org.hamcrest.CoreMatchers.is; @@ -50,10 +51,10 @@ class ShorthandProjectionSegmentBinderTest { void assertBindWithOwner() { ShorthandProjectionSegment shorthandProjectionSegment = new ShorthandProjectionSegment(0, 0); shorthandProjectionSegment.setOwner(new OwnerSegment(0, 0, new IdentifierValue("o"))); - Map tableBinderContexts = new CaseInsensitiveMap<>(); + Map tableBinderContexts = new LinkedHashMap<>(); ColumnProjectionSegment invisibleColumn = new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("status"))); invisibleColumn.setVisible(false); - tableBinderContexts.put("o", new TableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); + tableBinderContexts.put("o", new SimpleTableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); ShorthandProjectionSegment actual = ShorthandProjectionSegmentBinder.bind(shorthandProjectionSegment, mock(TableSegment.class), tableBinderContexts); assertThat(actual.getActualProjectionSegments().size(), is(1)); ProjectionSegment visibleColumn = actual.getActualProjectionSegments().iterator().next(); @@ -63,10 +64,10 @@ void assertBindWithOwner() { @Test void assertBindWithoutOwnerForSimpleTableSegment() { - Map tableBinderContexts = new CaseInsensitiveMap<>(); + Map tableBinderContexts = new LinkedHashMap<>(); ColumnProjectionSegment invisibleColumn = new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("status"))); invisibleColumn.setVisible(false); - tableBinderContexts.put("o", new TableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); + tableBinderContexts.put("o", new SimpleTableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); SimpleTableSegment boundedTableSegment = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); boundedTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("o"))); ShorthandProjectionSegment actual = ShorthandProjectionSegmentBinder.bind(new ShorthandProjectionSegment(0, 0), boundedTableSegment, tableBinderContexts); @@ -78,10 +79,10 @@ void assertBindWithoutOwnerForSimpleTableSegment() { @Test void assertBindWithoutOwnerForSubqueryTableSegment() { - Map tableBinderContexts = new CaseInsensitiveMap<>(); + Map tableBinderContexts = new LinkedHashMap<>(); ColumnProjectionSegment invisibleColumn = new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("status"))); invisibleColumn.setVisible(false); - tableBinderContexts.put("o", new TableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); + tableBinderContexts.put("o", new SimpleTableSegmentBinderContext(Arrays.asList(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))), invisibleColumn))); SubqueryTableSegment boundedTableSegment = new SubqueryTableSegment(new SubquerySegment(0, 0, mock(MySQLSelectStatement.class))); boundedTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("o"))); ShorthandProjectionSegment actual = ShorthandProjectionSegmentBinder.bind(new ShorthandProjectionSegment(0, 0), boundedTableSegment, tableBinderContexts); @@ -95,7 +96,7 @@ void assertBindWithoutOwnerForSubqueryTableSegment() { void assertBindWithoutOwnerForJoinTableSegment() { ShorthandProjectionSegment shorthandProjectionSegment = new ShorthandProjectionSegment(0, 0); JoinTableSegment boundedTableSegment = new JoinTableSegment(); - boundedTableSegment.getJoinTableProjectionSegments().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id")))); + boundedTableSegment.getDerivedJoinTableProjectionSegments().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id")))); ShorthandProjectionSegment actual = ShorthandProjectionSegmentBinder.bind(shorthandProjectionSegment, boundedTableSegment, Collections.emptyMap()); assertThat(actual.getActualProjectionSegments().size(), is(1)); ProjectionSegment visibleColumn = actual.getActualProjectionSegments().iterator().next(); diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/select/projection/util/ProjectionUtilsTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/select/projection/util/ProjectionUtilsTest.java new file mode 100644 index 0000000000000..09f23b6432848 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/segment/select/projection/util/ProjectionUtilsTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.segment.select.projection.util; + +import org.apache.shardingsphere.infra.binder.context.segment.select.projection.util.ProjectionUtils; +import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.postgresql.type.PostgreSQLDatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.junit.jupiter.api.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +class ProjectionUtilsTest { + + private final IdentifierValue alias = new IdentifierValue("Data", QuoteCharacter.NONE); + + @Test + void assertGetColumnLabelFromAlias() { + assertThat(ProjectionUtils.getColumnLabelFromAlias(new IdentifierValue("Data", QuoteCharacter.QUOTE), new PostgreSQLDatabaseType()), is("Data")); + assertThat(ProjectionUtils.getColumnLabelFromAlias(alias, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")), is("data")); + assertThat(ProjectionUtils.getColumnLabelFromAlias(alias, TypedSPILoader.getService(DatabaseType.class, "openGauss")), is("data")); + assertThat(ProjectionUtils.getColumnLabelFromAlias(alias, TypedSPILoader.getService(DatabaseType.class, "Oracle")), is("DATA")); + assertThat(ProjectionUtils.getColumnLabelFromAlias(alias, TypedSPILoader.getService(DatabaseType.class, "MySQL")), is("Data")); + } + + @Test + void assertGetColumnNameFromFunction() { + String functionName = "Function"; + String functionExpression = "FunctionExpression"; + assertThat(ProjectionUtils.getColumnNameFromFunction(functionName, functionExpression, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")), is("function")); + assertThat(ProjectionUtils.getColumnNameFromFunction(functionName, functionExpression, TypedSPILoader.getService(DatabaseType.class, "openGauss")), is("function")); + assertThat(ProjectionUtils.getColumnNameFromFunction(functionName, functionExpression, TypedSPILoader.getService(DatabaseType.class, "Oracle")), is("FUNCTIONEXPRESSION")); + assertThat(ProjectionUtils.getColumnNameFromFunction(functionName, functionExpression, TypedSPILoader.getService(DatabaseType.class, "MySQL")), is("FunctionExpression")); + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/DeleteStatementBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/DeleteStatementBinderTest.java new file mode 100644 index 0000000000000..986ba26090c42 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/DeleteStatementBinderTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement; + +import org.apache.shardingsphere.infra.binder.statement.dml.DeleteStatementBinder; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLDeleteStatement; +import org.junit.jupiter.api.Test; + +import java.sql.Types; +import java.util.Arrays; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class DeleteStatementBinderTest { + + @Test + void assertBind() { + DeleteStatement deleteStatement = new MySQLDeleteStatement(); + SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); + deleteStatement.setTable(simpleTableSegment); + deleteStatement.setWhere(new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("status")), + new LiteralExpressionSegment(0, 0, 0), "=", "status = 1"))); + DeleteStatement actual = new DeleteStatementBinder().bind(deleteStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(deleteStatement)); + assertThat(actual.getTable(), not(deleteStatement.getTable())); + assertThat(actual.getTable(), instanceOf(SimpleTableSegment.class)); + assertTrue(actual.getWhere().isPresent()); + assertThat(actual.getWhere().get(), not(deleteStatement.getWhere())); + assertThat(actual.getWhere().get(), instanceOf(WhereSegment.class)); + assertTrue(deleteStatement.getWhere().isPresent()); + assertThat(actual.getWhere().get().getExpr(), not(deleteStatement.getWhere().get().getExpr())); + assertThat(actual.getWhere().get().getExpr(), instanceOf(BinaryOperationExpression.class)); + assertThat(((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft(), instanceOf(ColumnSegment.class)); + assertThat(((ColumnSegment) ((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft()).getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + } + + private ShardingSphereMetaData createMetaData() { + ShardingSphereSchema schema = mock(ShardingSphereSchema.class, RETURNS_DEEP_STUBS); + when(schema.getTable("t_order").getColumnValues()).thenReturn(Arrays.asList( + new ShardingSphereColumn("order_id", Types.INTEGER, true, false, false, true, false, false), + new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); + ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + return result; + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/InsertStatementBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/InsertStatementBinderTest.java new file mode 100644 index 0000000000000..035cb63b2b3d2 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/InsertStatementBinderTest.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement; + +import org.apache.shardingsphere.infra.binder.statement.dml.InsertStatementBinder; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.InsertColumnsSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLInsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLSelectStatement; +import org.junit.jupiter.api.Test; + +import java.sql.Types; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class InsertStatementBinderTest { + + @Test + void assertBindInsertValues() { + InsertStatement insertStatement = new MySQLInsertStatement(); + insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + insertStatement.setInsertColumns(new InsertColumnsSegment(0, 0, Arrays.asList(new ColumnSegment(0, 0, new IdentifierValue("order_id")), + new ColumnSegment(0, 0, new IdentifierValue("user_id")), new ColumnSegment(0, 0, new IdentifierValue("status"))))); + insertStatement.getValues().add(new InsertValuesSegment(0, 0, Arrays.asList(new LiteralExpressionSegment(0, 0, 1), + new LiteralExpressionSegment(0, 0, 1), new LiteralExpressionSegment(0, 0, "OK")))); + InsertStatement actual = new InsertStatementBinder().bind(insertStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(insertStatement)); + assertThat(actual.getTable().getTableName(), not(insertStatement.getTable().getTableName())); + assertTrue(actual.getInsertColumns().isPresent()); + assertInsertColumns(actual.getInsertColumns().get().getColumns()); + } + + private static void assertInsertColumns(final Collection insertColumns) { + assertThat(insertColumns.size(), is(3)); + Iterator iterator = insertColumns.iterator(); + ColumnSegment orderIdColumnSegment = iterator.next(); + assertThat(orderIdColumnSegment.getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(orderIdColumnSegment.getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(orderIdColumnSegment.getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(orderIdColumnSegment.getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + ColumnSegment userIdColumnSegment = iterator.next(); + assertThat(userIdColumnSegment.getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(userIdColumnSegment.getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(userIdColumnSegment.getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(userIdColumnSegment.getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + ColumnSegment statusColumnSegment = iterator.next(); + assertThat(statusColumnSegment.getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(statusColumnSegment.getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(statusColumnSegment.getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(statusColumnSegment.getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + } + + @Test + void assertBindInsertSelectWithColumns() { + InsertStatement insertStatement = new MySQLInsertStatement(); + insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + insertStatement.setInsertColumns(new InsertColumnsSegment(0, 0, Arrays.asList(new ColumnSegment(0, 0, new IdentifierValue("order_id")), + new ColumnSegment(0, 0, new IdentifierValue("user_id")), new ColumnSegment(0, 0, new IdentifierValue("status"))))); + MySQLSelectStatement subSelectStatement = new MySQLSelectStatement(); + subSelectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + ProjectionsSegment projections = new ProjectionsSegment(0, 0); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id")))); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("user_id")))); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("status")))); + subSelectStatement.setProjections(projections); + insertStatement.setInsertSelect(new SubquerySegment(0, 0, subSelectStatement)); + insertStatement.getValues().add(new InsertValuesSegment(0, 0, Arrays.asList(new LiteralExpressionSegment(0, 0, 1), + new LiteralExpressionSegment(0, 0, 1), new LiteralExpressionSegment(0, 0, "OK")))); + InsertStatement actual = new InsertStatementBinder().bind(insertStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(insertStatement)); + assertThat(actual.getTable().getTableName(), not(insertStatement.getTable().getTableName())); + assertTrue(actual.getInsertColumns().isPresent()); + assertInsertColumns(actual.getInsertColumns().get().getColumns()); + assertInsertSelect(actual); + } + + @Test + void assertBindInsertSelectWithoutColumns() { + InsertStatement insertStatement = new MySQLInsertStatement(); + insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + MySQLSelectStatement subSelectStatement = new MySQLSelectStatement(); + subSelectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order")))); + ProjectionsSegment projections = new ProjectionsSegment(0, 0); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id")))); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("user_id")))); + projections.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("status")))); + subSelectStatement.setProjections(projections); + insertStatement.setInsertSelect(new SubquerySegment(0, 0, subSelectStatement)); + insertStatement.getValues().add(new InsertValuesSegment(0, 0, Arrays.asList(new LiteralExpressionSegment(0, 0, 1), + new LiteralExpressionSegment(0, 0, 1), new LiteralExpressionSegment(0, 0, "OK")))); + InsertStatement actual = new InsertStatementBinder().bind(insertStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(insertStatement)); + assertThat(actual.getTable().getTableName(), not(insertStatement.getTable().getTableName())); + assertInsertColumns(actual.getDerivedInsertColumns()); + assertInsertSelect(actual); + } + + private static void assertInsertSelect(final InsertStatement actual) { + assertTrue(actual.getInsertSelect().isPresent()); + Collection actualProjections = actual.getInsertSelect().get().getSelect().getProjections().getProjections(); + assertThat(actualProjections.size(), is(3)); + Iterator projectionIterator = actualProjections.iterator(); + ProjectionSegment orderIdProjectionSegment = projectionIterator.next(); + assertThat(orderIdProjectionSegment, instanceOf(ColumnProjectionSegment.class)); + assertThat(((ColumnProjectionSegment) orderIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) orderIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) orderIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) orderIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("order_id")); + ProjectionSegment userIdProjectionSegment = projectionIterator.next(); + assertThat(userIdProjectionSegment, instanceOf(ColumnProjectionSegment.class)); + assertThat(((ColumnProjectionSegment) userIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) userIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) userIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) userIdProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("user_id")); + ProjectionSegment statusProjectionSegment = projectionIterator.next(); + assertThat(statusProjectionSegment, instanceOf(ColumnProjectionSegment.class)); + assertThat(((ColumnProjectionSegment) statusProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalDatabase().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) statusProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalSchema().getValue(), is(DefaultDatabase.LOGIC_NAME)); + assertThat(((ColumnProjectionSegment) statusProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + assertThat(((ColumnProjectionSegment) statusProjectionSegment).getColumn().getColumnBoundedInfo().getOriginalColumn().getValue(), is("status")); + } + + private ShardingSphereMetaData createMetaData() { + ShardingSphereSchema schema = mock(ShardingSphereSchema.class, RETURNS_DEEP_STUBS); + when(schema.getTable("t_order").getColumnValues()).thenReturn(Arrays.asList( + new ShardingSphereColumn("order_id", Types.INTEGER, true, false, false, true, false, false), + new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); + ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + return result; + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/MergeStatementBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/MergeStatementBinderTest.java new file mode 100644 index 0000000000000..e5671c97ad2d2 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/MergeStatementBinderTest.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement; + +import org.apache.shardingsphere.infra.binder.statement.dml.MergeStatementBinder; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.ColumnAssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ExpressionProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.MergeStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleMergeStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleSelectStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleUpdateStatement; +import org.junit.jupiter.api.Test; + +import java.sql.Types; +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class MergeStatementBinderTest { + + @Test + void assertBind() { + MergeStatement mergeStatement = new OracleMergeStatement(); + SimpleTableSegment targetTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); + targetTable.setAlias(new AliasSegment(0, 0, new IdentifierValue("a"))); + mergeStatement.setTarget(targetTable); + SimpleTableSegment sourceTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order_item"))); + sourceTable.setAlias(new AliasSegment(0, 0, new IdentifierValue("b"))); + mergeStatement.setSource(sourceTable); + mergeStatement.setExpr(new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("id")), + new ColumnSegment(0, 0, new IdentifierValue("order_id")), "=", "id = order_id")); + UpdateStatement updateStatement = new OracleUpdateStatement(); + updateStatement.setTable(targetTable); + ColumnSegment targetTableColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + targetTableColumn.setOwner(new OwnerSegment(0, 0, new IdentifierValue("a"))); + ColumnSegment sourceTableColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + sourceTableColumn.setOwner(new OwnerSegment(0, 0, new IdentifierValue("b"))); + SetAssignmentSegment setAssignmentSegment = new SetAssignmentSegment(0, 0, + Collections.singletonList(new ColumnAssignmentSegment(0, 0, Collections.singletonList(targetTableColumn), sourceTableColumn))); + updateStatement.setSetAssignment(setAssignmentSegment); + updateStatement.setWhere(new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("item_id")), + new LiteralExpressionSegment(0, 0, 1), "=", "item_id = 1"))); + mergeStatement.setUpdate(updateStatement); + MergeStatement actual = new MergeStatementBinder().bind(mergeStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(mergeStatement)); + assertThat(actual.getSource(), not(mergeStatement.getSource())); + assertThat(actual.getSource(), instanceOf(SimpleTableSegment.class)); + assertThat(actual.getTarget(), not(mergeStatement.getTarget())); + assertThat(actual.getTarget(), instanceOf(SimpleTableSegment.class)); + assertThat(actual.getUpdate(), not(mergeStatement.getUpdate())); + assertThat(actual.getUpdate().getSetAssignment().getAssignments().iterator().next().getValue(), instanceOf(ColumnSegment.class)); + assertThat(((ColumnSegment) actual.getUpdate().getSetAssignment().getAssignments().iterator().next().getValue()).getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order_item")); + } + + private ShardingSphereMetaData createMetaData() { + ShardingSphereSchema schema = mock(ShardingSphereSchema.class, RETURNS_DEEP_STUBS); + when(schema.getTable("t_order").getColumnValues()).thenReturn(Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, true, false, false, true, false, false), + new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); + when(schema.getTable("t_order_item").getColumnValues()).thenReturn(Arrays.asList( + new ShardingSphereColumn("item_id", Types.INTEGER, true, false, false, true, false, false), + new ShardingSphereColumn("order_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); + ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order_item")).thenReturn(true); + return result; + } + + @Test + void assertBindWithSubQuery() { + MergeStatement mergeStatement = new OracleMergeStatement(); + SimpleTableSegment targetTable = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); + targetTable.setAlias(new AliasSegment(0, 0, new IdentifierValue("a"))); + mergeStatement.setTarget(targetTable); + ProjectionsSegment projectionsSegment = new ProjectionsSegment(0, 0); + ExpressionProjectionSegment expressionProjectionSegment = new ExpressionProjectionSegment(0, 0, "status + 1", new BinaryOperationExpression(0, 0, + new ColumnSegment(0, 0, new IdentifierValue("status")), new LiteralExpressionSegment(0, 0, 1), "+", "status + 1")); + expressionProjectionSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("new_status"))); + projectionsSegment.getProjections().add(expressionProjectionSegment); + OracleSelectStatement oracleSelectStatement = new OracleSelectStatement(); + oracleSelectStatement.setProjections(projectionsSegment); + oracleSelectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order_item")))); + SubqueryTableSegment subqueryTableSegment = new SubqueryTableSegment(new SubquerySegment(0, 0, oracleSelectStatement)); + subqueryTableSegment.setAlias(new AliasSegment(0, 0, new IdentifierValue("b"))); + mergeStatement.setSource(subqueryTableSegment); + UpdateStatement updateStatement = new OracleUpdateStatement(); + ColumnSegment targetTableColumn = new ColumnSegment(0, 0, new IdentifierValue("status")); + targetTableColumn.setOwner(new OwnerSegment(0, 0, new IdentifierValue("a"))); + ColumnSegment sourceTableColumn = new ColumnSegment(0, 0, new IdentifierValue("new_status")); + SetAssignmentSegment setAssignmentSegment = new SetAssignmentSegment(0, 0, + Collections.singletonList(new ColumnAssignmentSegment(0, 0, Collections.singletonList(targetTableColumn), sourceTableColumn))); + updateStatement.setSetAssignment(setAssignmentSegment); + mergeStatement.setUpdate(updateStatement); + MergeStatement actual = new MergeStatementBinder().bind(mergeStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(mergeStatement)); + } +} diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/SelectStatementBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/SelectStatementBinderTest.java index 3318f3af2f237..4e2cc0242aae3 100644 --- a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/SelectStatementBinderTest.java +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/SelectStatementBinderTest.java @@ -23,9 +23,13 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ColumnProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionsSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; @@ -39,8 +43,10 @@ import java.util.List; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -60,6 +66,7 @@ void assertBind() { projections.getProjections().add(statusProjection); SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); selectStatement.setFrom(simpleTableSegment); + selectStatement.setWhere(mockWhereSegment()); SelectStatement actual = new SelectStatementBinder().bind(selectStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); assertThat(actual, not(selectStatement)); assertThat(actual.getFrom(), not(selectStatement.getFrom())); @@ -77,6 +84,23 @@ void assertBind() { assertThat(actualProjections.get(2), not(statusProjection)); assertThat(actualProjections.get(2), instanceOf(ColumnProjectionSegment.class)); assertThat(((ColumnProjectionSegment) actualProjections.get(2)).getColumn(), not(statusProjection.getColumn())); + assertTrue(actual.getWhere().isPresent()); + assertThat(actual.getWhere().get(), not(selectStatement.getWhere())); + assertThat(actual.getWhere().get(), instanceOf(WhereSegment.class)); + assertTrue(selectStatement.getWhere().isPresent()); + assertThat(actual.getWhere().get().getExpr(), not(selectStatement.getWhere().get().getExpr())); + assertThat(actual.getWhere().get().getExpr(), instanceOf(BinaryOperationExpression.class)); + assertThat(((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft(), instanceOf(FunctionSegment.class)); + assertThat(((FunctionSegment) ((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft()).getParameters().iterator().next(), instanceOf(ColumnSegment.class)); + assertThat(((ColumnSegment) ((FunctionSegment) ((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft()).getParameters().iterator().next()) + .getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + } + + private static WhereSegment mockWhereSegment() { + FunctionSegment functionSegment = new FunctionSegment(0, 0, "nvl", "nvl(status, 0)"); + functionSegment.getParameters().add(new ColumnSegment(0, 0, new IdentifierValue("status"))); + functionSegment.getParameters().add(new LiteralExpressionSegment(0, 0, 0)); + return new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, functionSegment, new LiteralExpressionSegment(0, 0, 0), "=", "nvl(status, 0) = 0")); } private ShardingSphereMetaData createMetaData() { @@ -87,6 +111,9 @@ private ShardingSphereMetaData createMetaData() { new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); return result; } } diff --git a/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/UpdateStatementBinderTest.java b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/UpdateStatementBinderTest.java new file mode 100644 index 0000000000000..234057d8b2db6 --- /dev/null +++ b/infra/binder/src/test/java/org/apache/shardingsphere/infra/binder/statement/UpdateStatementBinderTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.binder.statement; + +import org.apache.shardingsphere.infra.binder.statement.dml.UpdateStatementBinder; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLUpdateStatement; +import org.junit.jupiter.api.Test; + +import java.sql.Types; +import java.util.Arrays; + +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class UpdateStatementBinderTest { + + @Test + void assertBind() { + UpdateStatement updateStatement = new MySQLUpdateStatement(); + SimpleTableSegment simpleTableSegment = new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))); + updateStatement.setTable(simpleTableSegment); + updateStatement.setWhere(new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("status")), + new LiteralExpressionSegment(0, 0, 0), "=", "status = 1"))); + UpdateStatement actual = new UpdateStatementBinder().bind(updateStatement, createMetaData(), DefaultDatabase.LOGIC_NAME); + assertThat(actual, not(updateStatement)); + assertThat(actual.getTable(), not(updateStatement.getTable())); + assertThat(actual.getTable(), instanceOf(SimpleTableSegment.class)); + assertTrue(actual.getWhere().isPresent()); + assertThat(actual.getWhere().get(), not(updateStatement.getWhere())); + assertThat(actual.getWhere().get(), instanceOf(WhereSegment.class)); + assertTrue(updateStatement.getWhere().isPresent()); + assertThat(actual.getWhere().get().getExpr(), not(updateStatement.getWhere().get().getExpr())); + assertThat(actual.getWhere().get().getExpr(), instanceOf(BinaryOperationExpression.class)); + assertThat(((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft(), instanceOf(ColumnSegment.class)); + assertThat(((ColumnSegment) ((BinaryOperationExpression) actual.getWhere().get().getExpr()).getLeft()).getColumnBoundedInfo().getOriginalTable().getValue(), is("t_order")); + } + + private ShardingSphereMetaData createMetaData() { + ShardingSphereSchema schema = mock(ShardingSphereSchema.class, RETURNS_DEEP_STUBS); + when(schema.getTable("t_order").getColumnValues()).thenReturn(Arrays.asList( + new ShardingSphereColumn("order_id", Types.INTEGER, true, false, false, true, false, false), + new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("status", Types.INTEGER, false, false, false, true, false, false))); + ShardingSphereMetaData result = mock(ShardingSphereMetaData.class, RETURNS_DEEP_STUBS); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); + when(result.containsDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(result.getDatabase(DefaultDatabase.LOGIC_NAME).getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_order")).thenReturn(true); + return result; + } +} diff --git a/infra/common/pom.xml b/infra/common/pom.xml index 1d13c2b57027f..95e8d9bc3811c 100644 --- a/infra/common/pom.xml +++ b/infra/common/pom.xml @@ -80,25 +80,19 @@ org.apache.shardingsphere - shardingsphere-infra-datasource-c3p0 + shardingsphere-infra-data-source-pool-hikari ${project.version} org.apache.shardingsphere - shardingsphere-infra-datasource-dbcp + shardingsphere-infra-data-source-pool-dbcp ${project.version} org.apache.shardingsphere - shardingsphere-infra-datasource-druid + shardingsphere-infra-data-source-pool-c3p0 ${project.version} - - org.apache.shardingsphere - shardingsphere-infra-datasource-hikari - ${project.version} - - org.apache.shardingsphere shardingsphere-parser-sql-engine diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java index 769029efe1a6c..2bdb5fbd8124b 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/DatabaseConfiguration.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.infra.config.database; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; import javax.sql.DataSource; import java.util.Collection; @@ -52,9 +52,9 @@ public interface DatabaseConfiguration { StorageResource getStorageResource(); /** - * Get data source props map. + * Get data source pool properties map. * - * @return data source props map + * @return data source pool properties map */ - Map getDataSourcePropsMap(); + Map getDataSourcePoolPropertiesMap(); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java index 1273aafadc1c7..903f30e5cdd7a 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfiguration.java @@ -20,15 +20,18 @@ import lombok.Getter; import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceCreator; import javax.sql.DataSource; import java.util.Collection; +import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; /** * Data source generated database configuration. @@ -40,12 +43,13 @@ public final class DataSourceGeneratedDatabaseConfiguration implements DatabaseC private final Collection ruleConfigurations; - private final Map dataSourcePropsMap; + private final Map dataSourcePoolPropertiesMap; public DataSourceGeneratedDatabaseConfiguration(final Map dataSourceConfigs, final Collection ruleConfigs) { ruleConfigurations = ruleConfigs; - dataSourcePropsMap = DataSourcePropertiesCreator.createFromConfiguration(dataSourceConfigs); - this.storageResource = DataSourcePoolCreator.createStorageResource(dataSourcePropsMap); + dataSourcePoolPropertiesMap = dataSourceConfigs.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + storageResource = StorageResourceCreator.createStorageResource(dataSourcePoolPropertiesMap); } @Override diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java index 63a44b1552ed9..e16db003755b6 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java @@ -18,12 +18,13 @@ package org.apache.shardingsphere.infra.config.database.impl; import lombok.Getter; +import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageUtils; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceUtils; import javax.sql.DataSource; import java.util.Collection; @@ -35,6 +36,7 @@ /** * Data source provided database configuration. */ +@RequiredArgsConstructor @Getter public final class DataSourceProvidedDatabaseConfiguration implements DatabaseConfiguration { @@ -42,24 +44,17 @@ public final class DataSourceProvidedDatabaseConfiguration implements DatabaseCo private final Collection ruleConfigurations; - private final Map dataSourcePropsMap; + private final Map dataSourcePoolPropertiesMap; - public DataSourceProvidedDatabaseConfiguration(final Map dataSources, final Collection ruleConfigurations) { - this.ruleConfigurations = ruleConfigurations; - this.storageResource = new StorageResource(dataSources, StorageUtils.getStorageUnits(dataSources)); - dataSourcePropsMap = createDataSourcePropertiesMap(dataSources); + public DataSourceProvidedDatabaseConfiguration(final Map dataSources, final Collection ruleConfigs) { + this.ruleConfigurations = ruleConfigs; + this.storageResource = new StorageResource(StorageResourceUtils.getStorageNodeDataSources(dataSources), StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); + dataSourcePoolPropertiesMap = createDataSourcePoolPropertiesMap(dataSources); } - public DataSourceProvidedDatabaseConfiguration(final StorageResource storageResource, final Collection ruleConfigurations, - final Map dataSourcePropsMap) { - this.ruleConfigurations = ruleConfigurations; - this.storageResource = storageResource; - this.dataSourcePropsMap = dataSourcePropsMap; - } - - private Map createDataSourcePropertiesMap(final Map dataSources) { + private Map createDataSourcePoolPropertiesMap(final Map dataSources) { return dataSources.entrySet().stream().collect(Collectors - .toMap(Entry::getKey, entry -> DataSourcePropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + .toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/AmbiguousColumnException.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/AmbiguousColumnException.java index fa874525cb94e..a54ce623447eb 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/AmbiguousColumnException.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/AmbiguousColumnException.java @@ -27,7 +27,7 @@ public final class AmbiguousColumnException extends MetaDataSQLException { private static final long serialVersionUID = -9002743483594729164L; - public AmbiguousColumnException(final String columnName) { - super(XOpenSQLState.GENERAL_ERROR, 5, "Column '%s' in field list is ambiguous.", columnName); + public AmbiguousColumnException(final String columnExpression, final String segmentTypeMessage) { + super(XOpenSQLState.GENERAL_ERROR, 5, "Column '%s' in %s is ambiguous.", columnExpression, segmentTypeMessage); } } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/transaction/JDBCTransactionAcrossDatabasesException.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/TableNotExistsException.java similarity index 64% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/transaction/JDBCTransactionAcrossDatabasesException.java rename to infra/common/src/main/java/org/apache/shardingsphere/infra/exception/TableNotExistsException.java index 28138ac20a1f8..28141de15fb5e 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/transaction/JDBCTransactionAcrossDatabasesException.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/TableNotExistsException.java @@ -15,19 +15,19 @@ * limitations under the License. */ -package org.apache.shardingsphere.driver.jdbc.exception.transaction; +package org.apache.shardingsphere.infra.exception; import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.TransactionSQLException; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.MetaDataSQLException; /** - * JDBC transaction across databases exception. + * Table not exists exception. */ -public final class JDBCTransactionAcrossDatabasesException extends TransactionSQLException { +public final class TableNotExistsException extends MetaDataSQLException { - private static final long serialVersionUID = 3294968323117604702L; + private static final long serialVersionUID = -2507596759730534895L; - public JDBCTransactionAcrossDatabasesException() { - super(XOpenSQLState.INVALID_TRANSACTION_STATE, 100, "JDBC does not support operations across multiple logical databases in transaction."); + public TableNotExistsException(final String tableName) { + super(XOpenSQLState.NOT_FOUND, 7, "Table or view `%s` does not exist.", tableName); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/UnknownColumnException.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/UnknownColumnException.java index 262a21067fe51..6df57bfa84acf 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/UnknownColumnException.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/UnknownColumnException.java @@ -27,7 +27,7 @@ public final class UnknownColumnException extends MetaDataSQLException { private static final long serialVersionUID = -1305402273592303335L; - public UnknownColumnException(final String columnName) { - super(XOpenSQLState.NOT_FOUND, 6, "Unknown column '%s' in 'field list'.", columnName); + public UnknownColumnException(final String columnExpression, final String segmentTypeMessage) { + super(XOpenSQLState.NOT_FOUND, 6, "Unknown column '%s' in '%s'.", columnExpression, segmentTypeMessage); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/hint/SQLHintUtils.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/hint/SQLHintUtils.java index c241b47f9a0ff..452e967ef9963 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/hint/SQLHintUtils.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/hint/SQLHintUtils.java @@ -104,26 +104,26 @@ public static Optional extractHint(final String sql) { } HintValueContext result = new HintValueContext(); String hintText = sql.substring(0, sql.indexOf(SQL_COMMENT_SUFFIX) + 2); - Properties hintProperties = SQLHintUtils.getSQLHintProps(hintText); - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)) { - result.setDataSourceName(getProperty(hintProperties, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)); + Properties hintProps = SQLHintUtils.getSQLHintProps(hintText); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)) { + result.setDataSourceName(getProperty(hintProps, SQLHintPropertiesKey.DATASOURCE_NAME_KEY)); } - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY)) { - result.setWriteRouteOnly(Boolean.parseBoolean(getProperty(hintProperties, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY))); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY)) { + result.setWriteRouteOnly(Boolean.parseBoolean(getProperty(hintProps, SQLHintPropertiesKey.WRITE_ROUTE_ONLY_KEY))); } - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.USE_TRAFFIC_KEY)) { - result.setUseTraffic(Boolean.parseBoolean(getProperty(hintProperties, SQLHintPropertiesKey.USE_TRAFFIC_KEY))); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.USE_TRAFFIC_KEY)) { + result.setUseTraffic(Boolean.parseBoolean(getProperty(hintProps, SQLHintPropertiesKey.USE_TRAFFIC_KEY))); } - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY)) { - result.setSkipSQLRewrite(Boolean.parseBoolean(getProperty(hintProperties, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY))); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY)) { + result.setSkipSQLRewrite(Boolean.parseBoolean(getProperty(hintProps, SQLHintPropertiesKey.SKIP_SQL_REWRITE_KEY))); } - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)) { - result.setDisableAuditNames(getProperty(hintProperties, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)) { + result.setDisableAuditNames(getProperty(hintProps, SQLHintPropertiesKey.DISABLE_AUDIT_NAMES_KEY)); } - if (containsPropertyKey(hintProperties, SQLHintPropertiesKey.SHADOW_KEY)) { - result.setShadow(Boolean.parseBoolean(getProperty(hintProperties, SQLHintPropertiesKey.SHADOW_KEY))); + if (containsPropertyKey(hintProps, SQLHintPropertiesKey.SHADOW_KEY)) { + result.setShadow(Boolean.parseBoolean(getProperty(hintProps, SQLHintPropertiesKey.SHADOW_KEY))); } - for (Entry entry : hintProperties.entrySet()) { + for (Entry entry : hintProps.entrySet()) { Comparable value = entry.getValue() instanceof Comparable ? (Comparable) entry.getValue() : Objects.toString(entry.getValue()); if (containsPropertyKey(Objects.toString(entry.getKey()), SQLHintPropertiesKey.SHARDING_DATABASE_VALUE_KEY)) { result.getShardingDatabaseValues().put(Objects.toString(entry.getKey()).toUpperCase(), value); @@ -139,17 +139,17 @@ private static boolean startWithHint(final String sql) { return null != sql && (sql.startsWith(SQLHintTokenEnum.SQL_START_HINT_TOKEN.getKey()) || sql.startsWith(SQLHintTokenEnum.SQL_START_HINT_TOKEN.getAlias())); } - private static boolean containsPropertyKey(final Properties hintProperties, final SQLHintPropertiesKey sqlHintPropertiesKey) { - return hintProperties.containsKey(sqlHintPropertiesKey.getKey()) || hintProperties.containsKey(sqlHintPropertiesKey.getAlias()); + private static boolean containsPropertyKey(final Properties hintProps, final SQLHintPropertiesKey sqlHintPropsKey) { + return hintProps.containsKey(sqlHintPropsKey.getKey()) || hintProps.containsKey(sqlHintPropsKey.getAlias()); } - private static boolean containsPropertyKey(final String hintPropertyKey, final SQLHintPropertiesKey sqlHintPropertiesKey) { - return hintPropertyKey.contains(sqlHintPropertiesKey.getKey()) || hintPropertyKey.contains(sqlHintPropertiesKey.getAlias()); + private static boolean containsPropertyKey(final String hintPropKey, final SQLHintPropertiesKey sqlHintPropsKey) { + return hintPropKey.contains(sqlHintPropsKey.getKey()) || hintPropKey.contains(sqlHintPropsKey.getAlias()); } - private static String getProperty(final Properties hintProperties, final SQLHintPropertiesKey sqlHintPropertiesKey) { - String result = hintProperties.getProperty(sqlHintPropertiesKey.getKey()); - return null == result ? hintProperties.getProperty(sqlHintPropertiesKey.getAlias()) : result; + private static String getProperty(final Properties hintProps, final SQLHintPropertiesKey sqlHintPropsKey) { + String result = hintProps.getProperty(sqlHintPropsKey.getKey()); + return null == result ? hintProps.getProperty(sqlHintPropsKey.getAlias()) : result; } /** @@ -159,9 +159,6 @@ private static String getProperty(final Properties hintProperties, final SQLHint * @return SQL after remove hint */ public static String removeHint(final String sql) { - if (startWithHint(sql)) { - return sql.substring(sql.indexOf(SQL_COMMENT_SUFFIX) + 2); - } - return sql; + return startWithHint(sql) ? sql.substring(sql.indexOf(SQL_COMMENT_SUFFIX) + 2) : sql; } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/jdbc/JDBCInstanceMetaDataBuilder.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/jdbc/JDBCInstanceMetaDataBuilder.java index 210bbec6a0be2..b055be7991d1d 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/jdbc/JDBCInstanceMetaDataBuilder.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/jdbc/JDBCInstanceMetaDataBuilder.java @@ -23,7 +23,7 @@ import java.util.UUID; /** - * JDBC instance definition builder. + * JDBC instance meta data builder. */ public final class JDBCInstanceMetaDataBuilder implements InstanceMetaDataBuilder { diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/proxy/ProxyInstanceMetaDataBuilder.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/proxy/ProxyInstanceMetaDataBuilder.java index 32b9e21f35da0..b01721fa66778 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/proxy/ProxyInstanceMetaDataBuilder.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/metadata/proxy/ProxyInstanceMetaDataBuilder.java @@ -23,7 +23,7 @@ import java.util.UUID; /** - * Proxy instance definition builder. + * Proxy instance meta data builder. */ public final class ProxyInstanceMetaDataBuilder implements InstanceMetaDataBuilder { diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java index 691cc43b54fdf..c1ed3fc7ac6e7 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/instance/mode/ModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.infra.instance.mode; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaPOJO; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; @@ -82,19 +82,19 @@ public interface ModeContextManager { * Register storage units. * * @param databaseName database name - * @param toBeRegisterStorageUnitProps to be register storage unit props + * @param toBeRegisteredProps to be registered storage unit properties * @throws SQLException SQL exception */ - void registerStorageUnits(String databaseName, Map toBeRegisterStorageUnitProps) throws SQLException; + void registerStorageUnits(String databaseName, Map toBeRegisteredProps) throws SQLException; /** * Alter storage units. * * @param databaseName database name - * @param toBeUpdatedStorageUnitProps to be updated storage unit props + * @param toBeUpdatedProps to be updated storage unit properties * @throws SQLException SQL exception */ - void alterStorageUnits(String databaseName, Map toBeUpdatedStorageUnitProps) throws SQLException; + void alterStorageUnits(String databaseName, Map toBeUpdatedProps) throws SQLException; /** * Unregister storage units. diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaData.java index 7b9f611341863..6b01992d6ee24 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaData.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaData.java @@ -122,6 +122,7 @@ private void closeResources(final ShardingSphereDatabase database) { globalRuleMetaData.findRules(ResourceHeldRule.class).forEach(each -> each.closeStaleResource(databaseName)); database.getRuleMetaData().findRules(ResourceHeldRule.class).forEach(each -> each.closeStaleResource(databaseName)); database.getRuleMetaData().findSingleRule(StaticDataSourceContainedRule.class).ifPresent(StaticDataSourceContainedRule::cleanStorageNodeDataSources); - Optional.ofNullable(database.getResourceMetaData()).ifPresent(optional -> optional.getDataSources().values().forEach(each -> database.getResourceMetaData().close(each))); + Optional.ofNullable(database.getResourceMetaData()) + .ifPresent(optional -> optional.getStorageUnitMetaData().getStorageUnits().values().forEach(each -> database.getResourceMetaData().close(each.getDataSource()))); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java index 7eac15403baa9..7c1a3f8546100 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/ShardingSphereDatabase.java @@ -24,8 +24,8 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -118,14 +118,13 @@ public static ShardingSphereDatabase create(final String name, final DatabaseTyp */ public static ShardingSphereDatabase create(final String name, final DatabaseType protocolType, final DatabaseConfiguration databaseConfig, final Collection rules, final Map schemas) { - ResourceMetaData resourceMetaData = createResourceMetaData(name, databaseConfig.getStorageResource(), databaseConfig.getDataSourcePropsMap()); + ResourceMetaData resourceMetaData = createResourceMetaData(name, databaseConfig.getStorageResource(), databaseConfig.getDataSourcePoolPropertiesMap()); RuleMetaData ruleMetaData = new RuleMetaData(rules); return new ShardingSphereDatabase(name, protocolType, resourceMetaData, ruleMetaData, schemas); } - private static ResourceMetaData createResourceMetaData(final String databaseName, final StorageResource storageResource, - final Map dataSourcePropsMap) { - return new ResourceMetaData(databaseName, storageResource, dataSourcePropsMap); + private static ResourceMetaData createResourceMetaData(final String databaseName, final StorageResource storageResource, final Map propsMap) { + return new ResourceMetaData(databaseName, storageResource, propsMap); } /** @@ -173,7 +172,7 @@ public boolean containsSchema(final String schemaName) { * @return is completed or not */ public boolean isComplete() { - return !ruleMetaData.getRules().isEmpty() && !resourceMetaData.getDataSources().isEmpty(); + return !ruleMetaData.getRules().isEmpty() && !resourceMetaData.getStorageUnitMetaData().getStorageUnits().isEmpty(); } /** @@ -182,7 +181,7 @@ public boolean isComplete() { * @return contains data source or not */ public boolean containsDataSource() { - return !resourceMetaData.getDataSources().isEmpty(); + return !resourceMetaData.getStorageUnitMetaData().getStorageUnits().isEmpty(); } /** @@ -196,7 +195,7 @@ public synchronized void reloadRules(final Class r Collection databaseRules = new LinkedList<>(ruleMetaData.getRules()); toBeReloadedRules.stream().findFirst().ifPresent(optional -> { databaseRules.removeAll(toBeReloadedRules); - databaseRules.add(((MutableDataNodeRule) optional).reloadRule(ruleConfig, name, resourceMetaData.getDataSources(), databaseRules)); + databaseRules.add(((MutableDataNodeRule) optional).reloadRule(ruleConfig, name, resourceMetaData.getStorageUnitMetaData().getDataSources(), databaseRules)); }); ruleMetaData.getRules().clear(); ruleMetaData.getRules().addAll(databaseRules); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java index 42b762eb67a4b..f0ba8a800e5d6 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java @@ -20,17 +20,17 @@ import lombok.Getter; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageUtils; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceUtils; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import javax.sql.DataSource; import java.util.Collection; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; @@ -43,57 +43,25 @@ @Getter public final class ResourceMetaData { - private final StorageNodeMetaData storageNodeMetaData; + private final Map storageNodeDataSources; private final StorageUnitMetaData storageUnitMetaData; - private final Map dataSourcePropsMap; - public ResourceMetaData(final Map dataSources) { this(null, dataSources); } public ResourceMetaData(final String databaseName, final Map dataSources) { - Map enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, dataSources); - Map storageTypes = createStorageTypes(dataSources, enabledDataSources); - dataSourcePropsMap = DataSourcePropertiesCreator.create(dataSources); - storageNodeMetaData = new StorageNodeMetaData(dataSources); - storageUnitMetaData = new StorageUnitMetaData(dataSources, storageTypes, StorageUtils.getStorageUnits(dataSources), enabledDataSources); - - } - - public ResourceMetaData(final String databaseName, final StorageResource storageResource, final Map dataSourcePropsMap) { - Map enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, storageResource.getStorageNodes()); - Map storageTypes = createStorageTypes(storageResource.getStorageNodes(), enabledDataSources); - storageNodeMetaData = new StorageNodeMetaData(storageResource.getStorageNodes()); - storageUnitMetaData = new StorageUnitMetaData(storageResource.getStorageNodes(), storageTypes, storageResource.getStorageUnits(), enabledDataSources); - this.dataSourcePropsMap = dataSourcePropsMap; - } - - private Map createStorageTypes(final Map dataSources, final Map enabledDataSources) { - Map result = new LinkedHashMap<>(dataSources.size(), 1F); - for (Entry entry : dataSources.entrySet()) { - result.put(entry.getKey(), DatabaseTypeEngine.getStorageType(enabledDataSources.containsKey(entry.getKey()) ? Collections.singleton(entry.getValue()) : Collections.emptyList())); - } - return result; + storageNodeDataSources = StorageResourceUtils.getStorageNodeDataSources(dataSources); + storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, + dataSources.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)), + StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); } - /** - * Get data sources. - * - * @return data sources - */ - public Map getDataSources() { - return storageUnitMetaData.getDataSources(); - } - - /** - * Get storage types. - * - * @return storage types - */ - public Map getStorageTypes() { - return storageUnitMetaData.getStorageTypes(); + public ResourceMetaData(final String databaseName, final StorageResource storageResource, final Map propsMap) { + storageNodeDataSources = storageResource.getStorageNodeDataSources(); + storageUnitMetaData = new StorageUnitMetaData(databaseName, storageNodeDataSources, propsMap, storageResource.getStorageUnitNodeMappers()); } /** @@ -103,7 +71,7 @@ public Map getStorageTypes() { */ public Collection getAllInstanceDataSourceNames() { Collection result = new LinkedList<>(); - for (Entry entry : storageUnitMetaData.getConnectionPropsMap().entrySet()) { + for (Entry entry : storageUnitMetaData.getStorageUnits().entrySet()) { if (!isExisted(entry.getKey(), result)) { result.add(entry.getKey()); } @@ -112,8 +80,8 @@ public Collection getAllInstanceDataSourceNames() { } private boolean isExisted(final String dataSourceName, final Collection existedDataSourceNames) { - return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getConnectionPropsMap().get(dataSourceName) - .isInSameDatabaseInstance(storageUnitMetaData.getConnectionPropsMap().get(each))); + return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getStorageUnits().get(dataSourceName).getConnectionProperties() + .isInSameDatabaseInstance(storageUnitMetaData.getStorageUnits().get(each).getConnectionProperties())); } /** @@ -123,7 +91,7 @@ private boolean isExisted(final String dataSourceName, final Collection * @return connection properties */ public ConnectionProperties getConnectionProperties(final String dataSourceName) { - return storageUnitMetaData.getConnectionPropsMap().get(dataSourceName); + return storageUnitMetaData.getStorageUnits().get(dataSourceName).getConnectionProperties(); } /** @@ -133,7 +101,7 @@ public ConnectionProperties getConnectionProperties(final String dataSourceName) * @return storage type */ public DatabaseType getStorageType(final String dataSourceName) { - return storageUnitMetaData.getStorageTypes().get(dataSourceName); + return storageUnitMetaData.getStorageUnits().get(dataSourceName).getStorageType(); } /** @@ -143,7 +111,7 @@ public DatabaseType getStorageType(final String dataSourceName) { * @return not existed resource names */ public Collection getNotExistedDataSources(final Collection resourceNames) { - return resourceNames.stream().filter(each -> !storageUnitMetaData.getDataSources().containsKey(each)).collect(Collectors.toSet()); + return resourceNames.stream().filter(each -> !storageUnitMetaData.getStorageUnits().containsKey(each)).collect(Collectors.toSet()); } /** diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java deleted file mode 100644 index 05357738ffced..0000000000000 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.metadata.database.resource; - -import lombok.Getter; -import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; -import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.storage.StorageUnit; - -import javax.sql.DataSource; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; - -/** - * Storage unit meta data. - */ -@Getter -public final class StorageUnitMetaData { - - private final Map dataSources; - - private final Map storageTypes; - - private final Map storageUnits; - - private final Map connectionPropsMap; - - public StorageUnitMetaData(final Map dataSources, final Map storageTypes, final Map storageUnits, - final Map enabledDataSources) { - this.storageUnits = storageUnits; - this.dataSources = getStorageUnitDataSources(dataSources, storageUnits); - this.storageTypes = getStorageUnitTypes(storageTypes); - this.connectionPropsMap = createConnectionPropertiesMap(enabledDataSources, storageTypes, storageUnits); - } - - private Map getStorageUnitDataSources(final Map storageNodes, final Map storageUnits) { - Map result = new LinkedHashMap<>(storageUnits.size(), 1F); - for (Entry entry : storageUnits.entrySet()) { - DataSource dataSource = storageNodes.get(entry.getValue().getNodeName()); - result.put(entry.getKey(), new ShardingSphereStorageDataSourceWrapper(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl())); - } - return result; - } - - private Map getStorageUnitTypes(final Map storageTypes) { - Map result = new LinkedHashMap<>(storageUnits.size(), 1F); - for (Entry entry : storageUnits.entrySet()) { - DatabaseType storageType = storageTypes.containsKey(entry.getValue().getNodeName()) - ? storageTypes.get(entry.getValue().getNodeName()) - : DatabaseTypeEngine.getStorageType(Collections.emptyList()); - result.put(entry.getKey(), storageType); - } - return result; - } - - private Map createConnectionPropertiesMap(final Map enabledDataSources, - final Map storageTypes, final Map storageUnits) { - Map result = new LinkedHashMap<>(storageUnits.size(), 1F); - for (Entry entry : storageUnits.entrySet()) { - String nodeName = entry.getValue().getNodeName(); - if (enabledDataSources.containsKey(nodeName)) { - Map standardProps = DataSourcePropertiesCreator.create(enabledDataSources.get(nodeName)).getConnectionPropertySynonyms().getStandardProperties(); - DatabaseType storageType = storageTypes.get(nodeName); - ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType); - result.put(entry.getKey(), parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), entry.getValue().getCatalog())); - } - } - return result; - } -} diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNode.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNode.java new file mode 100644 index 0000000000000..a080d47f32a8b --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNode.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import com.google.common.base.Objects; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +/** + * Storage node. + */ +@RequiredArgsConstructor +@Getter +public final class StorageNode { + + private final String name; + + @Override + public boolean equals(final Object obj) { + return obj instanceof StorageNode && ((StorageNode) obj).name.equalsIgnoreCase(name); + } + + @Override + public int hashCode() { + return Objects.hashCode(name.toUpperCase()); + } + + @Override + public String toString() { + return name; + } +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageNodeProperties.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNodeProperties.java similarity index 78% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageNodeProperties.java rename to infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNodeProperties.java index 040825543d243..91e13d2091409 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageNodeProperties.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageNodeProperties.java @@ -15,13 +15,12 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.storage; +package org.apache.shardingsphere.infra.metadata.database.resource.storage; import com.google.common.base.Objects; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; /** * Storage node properties. @@ -34,15 +33,13 @@ public final class StorageNodeProperties { private final DatabaseType databaseType; - private final DataSourceProperties dataSourceProperties; - - private final String database; + private final String catalog; @Override public boolean equals(final Object obj) { if (obj instanceof StorageNodeProperties) { - StorageNodeProperties storageNodeProperties = (StorageNodeProperties) obj; - return storageNodeProperties.name.equals(name); + StorageNodeProperties storageNodeProps = (StorageNodeProperties) obj; + return storageNodeProps.name.equals(name); } return false; } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResource.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResource.java similarity index 53% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResource.java rename to infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResource.java index ad8ca882becb4..818712af07564 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResource.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResource.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.storage; +package org.apache.shardingsphere.infra.metadata.database.resource.storage; import lombok.Getter; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; import javax.sql.DataSource; import java.util.LinkedHashMap; @@ -31,24 +31,24 @@ @Getter public class StorageResource { - private final Map storageNodes; + private final Map storageNodeDataSources; - private final Map storageUnits; + private final Map storageUnitNodeMappers; private final Map wrappedDataSources; - public StorageResource(final Map storageNodes, final Map storageUnits) { - this.storageNodes = storageNodes; - this.storageUnits = storageUnits; - wrappedDataSources = getWrappedDataSources(storageUnits); + public StorageResource(final Map storageNodeDataSources, final Map storageUnitNodeMappers) { + this.storageNodeDataSources = storageNodeDataSources; + this.storageUnitNodeMappers = storageUnitNodeMappers; + wrappedDataSources = createWrappedDataSources(); } - private Map getWrappedDataSources(final Map storageUnits) { - Map result = new LinkedHashMap<>(storageUnits.size(), 1F); - for (Entry entry : storageUnits.entrySet()) { - DataSource dataSource = storageNodes.get(entry.getValue().getNodeName()); + private Map createWrappedDataSources() { + Map result = new LinkedHashMap<>(storageUnitNodeMappers.size(), 1F); + for (Entry entry : storageUnitNodeMappers.entrySet()) { + DataSource dataSource = storageNodeDataSources.get(entry.getValue().getStorageNode()); if (null != dataSource) { - result.put(entry.getKey(), new ShardingSphereStorageDataSourceWrapper(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl())); + result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl())); } } return result; diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceCreator.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceCreator.java new file mode 100644 index 0000000000000..bf408ade7e8be --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceCreator.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.database.core.connector.url.JdbcUrl; +import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; +import org.apache.shardingsphere.infra.database.core.connector.url.UnrecognizedDatabaseURLException; +import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; + +/** + * Storage resource creator. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class StorageResourceCreator { + + /** + * Create storage resource. + * + * @param propsMap data source pool properties map + * @return created storage resource + */ + public static StorageResource createStorageResource(final Map propsMap) { + Map storageNodes = new LinkedHashMap<>(); + Map storageUnitNodeMappers = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { + StorageNodeProperties storageNodeProps = getStorageNodeProperties(entry.getKey(), entry.getValue()); + StorageNode storageNode = new StorageNode(storageNodeProps.getName()); + if (!storageNodes.containsKey(storageNode)) { + storageNodes.put(storageNode, DataSourcePoolCreator.create(entry.getKey(), entry.getValue(), true, storageNodes.values())); + } + appendStorageUnitNodeMapper(storageUnitNodeMappers, storageNodeProps, entry.getKey(), entry.getValue()); + } + return new StorageResource(storageNodes, storageUnitNodeMappers); + } + + /** + * Create storage resource without data source. + * + * @param propsMap data source pool properties map + * @return created storage resource + */ + public static StorageResourceWithProperties createStorageResourceWithoutDataSource(final Map propsMap) { + Map storageNodes = new LinkedHashMap<>(); + Map storageUnitNodeMappers = new LinkedHashMap<>(); + Map newPropsMap = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { + StorageNodeProperties storageNodeProps = getStorageNodeProperties(entry.getKey(), entry.getValue()); + StorageNode storageNode = new StorageNode(storageNodeProps.getName()); + if (storageNodes.containsKey(storageNode)) { + appendStorageUnitNodeMapper(storageUnitNodeMappers, storageNodeProps, entry.getKey(), entry.getValue()); + continue; + } + storageNodes.put(storageNode, null); + appendStorageUnitNodeMapper(storageUnitNodeMappers, storageNodeProps, entry.getKey(), entry.getValue()); + newPropsMap.put(storageNodeProps.getName(), entry.getValue()); + } + return new StorageResourceWithProperties(storageNodes, storageUnitNodeMappers, newPropsMap); + } + + private static void appendStorageUnitNodeMapper(final Map storageUnitNodeMappers, final StorageNodeProperties storageNodeProps, + final String unitName, final DataSourcePoolProperties props) { + String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); + storageUnitNodeMappers.put(unitName, getStorageUnitNodeMapper(storageNodeProps, unitName, url)); + } + + private static StorageUnitNodeMapper getStorageUnitNodeMapper(final StorageNodeProperties storageNodeProps, final String unitName, final String url) { + DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(storageNodeProps.getDatabaseType()).getDialectDatabaseMetaData(); + return dialectDatabaseMetaData.isInstanceConnectionAvailable() + ? new StorageUnitNodeMapper(unitName, new StorageNode(storageNodeProps.getName()), storageNodeProps.getCatalog(), url) + : new StorageUnitNodeMapper(unitName, new StorageNode(storageNodeProps.getName()), url); + } + + private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final DataSourcePoolProperties storageNodeProps) { + Map standardProps = storageNodeProps.getConnectionPropertySynonyms().getStandardProperties(); + String url = standardProps.get("url").toString(); + String username = standardProps.get("username").toString(); + DatabaseType databaseType = DatabaseTypeFactory.get(url); + return getStorageNodeProperties(dataSourceName, url, username, databaseType); + } + + private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final String url, final String username, final DatabaseType databaseType) { + try { + JdbcUrl jdbcUrl = new StandardJdbcUrlParser().parse(url); + DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); + String nodeName = dialectDatabaseMetaData.isInstanceConnectionAvailable() ? generateStorageNodeName(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : dataSourceName; + return new StorageNodeProperties(nodeName, databaseType, jdbcUrl.getDatabase()); + } catch (final UnrecognizedDatabaseURLException ex) { + return new StorageNodeProperties(dataSourceName, databaseType, null); + } + } + + private static String generateStorageNodeName(final String hostname, final int port, final String username) { + return String.format("%s_%s_%s", hostname, port, username); + } +} diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java new file mode 100644 index 0000000000000..5b5a7ea9fa0f0 --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +/** + * Storage utility class. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class StorageResourceUtils { + + /** + * Get storage node data sources. + * + * @param dataSources data sources + * @return storage node data sources + */ + public static Map getStorageNodeDataSources(final Map dataSources) { + Map result = new LinkedHashMap<>(dataSources.size(), 1F); + for (Entry entry : dataSources.entrySet()) { + result.put(new StorageNode(entry.getKey()), entry.getValue()); + } + return result; + } + + /** + * Get storage unit node mappers from provided data sources. + * + * @param dataSources data sources + * @return storage unit node mappers + */ + public static Map getStorageUnitNodeMappers(final Map dataSources) { + return dataSources.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> getStorageUnitNodeMapper(entry.getKey(), entry.getValue()), (oldValue, currentValue) -> currentValue, LinkedHashMap::new)); + } + + /** + * Get storage unit node mapper from provided data source. + * + * @param dataSourceName data source name + * @param dataSource data source + * @return storage unit node mapper + */ + public static StorageUnitNodeMapper getStorageUnitNodeMapper(final String dataSourceName, final DataSource dataSource) { + DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSource); + String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); + return new StorageUnitNodeMapper(dataSourceName, new StorageNode(dataSourceName), url); + } +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceWithProperties.java similarity index 61% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java rename to infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceWithProperties.java index ee64e98b0e9a9..bc9b610a8db70 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageResourceWithProperties.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceWithProperties.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.storage; +package org.apache.shardingsphere.infra.metadata.database.resource.storage; import lombok.Getter; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import javax.sql.DataSource; import java.util.Map; @@ -29,10 +29,11 @@ @Getter public final class StorageResourceWithProperties extends StorageResource { - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; - public StorageResourceWithProperties(final Map storageNodes, final Map storageUnits, final Map dataSourcePropertiesMap) { - super(storageNodes, storageUnits); - this.dataSourcePropertiesMap = dataSourcePropertiesMap; + public StorageResourceWithProperties(final Map storageNodes, + final Map storageUnitNodeMappers, final Map dataSourcePoolPropertiesMap) { + super(storageNodes, storageUnitNodeMappers); + this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap; } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java new file mode 100644 index 0000000000000..a46fbf1957894 --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.Getter; +import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; +import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; +import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; + +import javax.sql.DataSource; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Storage unit. + */ +@Getter +public final class StorageUnit { + + private final DataSourcePoolProperties dataSourcePoolProperties; + + private final StorageUnitNodeMapper unitNodeMapper; + + private final DataSource dataSource; + + private final DatabaseType storageType; + + private final ConnectionProperties connectionProperties; + + public StorageUnit(final String databaseName, final Map storageNodeDataSources, + final DataSourcePoolProperties props, final StorageUnitNodeMapper unitNodeMapper) { + this.dataSourcePoolProperties = props; + this.unitNodeMapper = unitNodeMapper; + dataSource = getStorageUnitDataSource(storageNodeDataSources, unitNodeMapper); + Map enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources); + storageType = createStorageType(enabledStorageNodeDataSources, unitNodeMapper); + connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null); + } + + private DataSource getStorageUnitDataSource(final Map storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) { + DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNode()); + return new CatalogSwitchableDataSource(dataSource, unitNodeMapper.getCatalog(), unitNodeMapper.getUrl()); + } + + private Map getEnabledStorageNodeDataSources(final String databaseName, final Map storageNodeDataSources) { + Map toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + for (Entry entry : storageNodeDataSources.entrySet()) { + toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue()); + } + Map enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources); + return storageNodeDataSources.entrySet().stream() + .filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + } + + private DatabaseType createStorageType(final Map enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) { + return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode()) + ? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())) + : Collections.emptyList()); + } + + private Optional createConnectionProperties(final Map enabledStorageNodeDataSources, + final StorageUnitNodeMapper unitNodeMapper, final DatabaseType storageType) { + if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())) { + return Optional.empty(); + } + Map standardProps = DataSourcePoolPropertiesCreator.create( + enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())).getConnectionPropertySynonyms().getStandardProperties(); + ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType); + return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), unitNodeMapper.getCatalog())); + } +} diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java new file mode 100644 index 0000000000000..854ea7edb3324 --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.Getter; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; + +/** + * Storage unit meta data. + */ +@Getter +public final class StorageUnitMetaData { + + // TODO zhangliang: should refactor + private final Map unitNodeMappers; + + private final Map storageUnits; + + // TODO zhangliang: should refactor + private final Map dataSources; + + public StorageUnitMetaData(final String databaseName, final Map storageNodeDataSources, + final Map dataSourcePoolPropertiesMap, final Map unitNodeMappers) { + this.unitNodeMappers = unitNodeMappers; + storageUnits = new LinkedHashMap<>(unitNodeMappers.size(), 1F); + for (Entry entry : unitNodeMappers.entrySet()) { + storageUnits.put(entry.getKey(), new StorageUnit(databaseName, storageNodeDataSources, dataSourcePoolPropertiesMap.get(entry.getKey()), entry.getValue())); + } + dataSources = createDataSources(); + } + + /** + * Get data source pool properties map. + * + * @return data source pool properties map + */ + public Map getDataSourcePoolPropertiesMap() { + return storageUnits.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + } + + private Map createDataSources() { + Map result = new LinkedHashMap<>(storageUnits.size(), 1F); + for (Entry entry : storageUnits.entrySet()) { + result.put(entry.getKey(), entry.getValue().getDataSource()); + } + return result; + } +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUnit.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitNodeMapper.java similarity index 55% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUnit.java rename to infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitNodeMapper.java index f0d9aca9148eb..c609b3e63ebf2 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUnit.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitNodeMapper.java @@ -15,46 +15,46 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.storage; +package org.apache.shardingsphere.infra.metadata.database.resource.storage; import com.google.common.base.Objects; import lombok.Getter; import lombok.RequiredArgsConstructor; /** - * Storage unit. + * Storage unit and node mapper. */ @RequiredArgsConstructor @Getter -public final class StorageUnit { +public final class StorageUnitNodeMapper { private final String name; - private final String nodeName; + private final StorageNode storageNode; private final String catalog; private final String url; - public StorageUnit(final String name, final String nodeName, final String url) { - this(name, nodeName, null, url); - } - - private boolean isSameCatalog(final StorageUnit storageUnit) { - return null == catalog ? null == storageUnit : catalog.equalsIgnoreCase(storageUnit.getCatalog()); + public StorageUnitNodeMapper(final String name, final StorageNode storageNode, final String url) { + this(name, storageNode, null, url); } @Override public boolean equals(final Object obj) { - if (obj instanceof StorageUnit) { - StorageUnit storageUnit = (StorageUnit) obj; - return storageUnit.name.equalsIgnoreCase(name) && storageUnit.nodeName.equalsIgnoreCase(nodeName) && isSameCatalog(storageUnit); + if (obj instanceof StorageUnitNodeMapper) { + StorageUnitNodeMapper storageUnitNodeMapper = (StorageUnitNodeMapper) obj; + return storageUnitNodeMapper.name.equalsIgnoreCase(name) && storageUnitNodeMapper.storageNode.equals(storageNode) && isSameCatalog(storageUnitNodeMapper); } return false; } + private boolean isSameCatalog(final StorageUnitNodeMapper storageUnitNodeMapper) { + return null == catalog ? null == storageUnitNodeMapper : catalog.equalsIgnoreCase(storageUnitNodeMapper.getCatalog()); + } + @Override public int hashCode() { - return Objects.hashCode(name.toUpperCase(), nodeName.toUpperCase(), null == catalog ? null : catalog.toUpperCase()); + return Objects.hashCode(name.toUpperCase(), storageNode.getName().toUpperCase(), null == catalog ? null : catalog.toUpperCase()); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilder.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilder.java index 50c513a2e6f60..856e1c6977341 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilder.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilder.java @@ -77,21 +77,12 @@ public static Map build(final GenericSchemaBuilder */ public static Map build(final Collection tableNames, final GenericSchemaBuilderMaterial material) throws SQLException { Map result = loadSchemas(tableNames, material); - if (!isProtocolTypeSameWithStorageType(material)) { + if (!material.isSameProtocolAndStorageTypes()) { result = translate(result, material); } return revise(result, material); } - private static boolean isProtocolTypeSameWithStorageType(final GenericSchemaBuilderMaterial material) { - for (DatabaseType each : material.getStorageTypes().values()) { - if (!material.getProtocolType().equals(each)) { - return false; - } - } - return true; - } - private static Collection getAllTableNames(final Collection rules) { return rules.stream().filter(TableContainedRule.class::isInstance).flatMap(each -> ((TableContainedRule) each).getLogicTableMapper().getTableNames().stream()).collect(Collectors.toSet()); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterial.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterial.java index 824aceda2bd8f..50e880ad8190a 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterial.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterial.java @@ -21,11 +21,15 @@ import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import javax.sql.DataSource; import java.util.Collection; +import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; +import java.util.stream.Collectors; /** * ShardingSphere schema builder material. @@ -45,4 +49,20 @@ public final class GenericSchemaBuilderMaterial { private final ConfigurationProperties props; private final String defaultSchemaName; + + public GenericSchemaBuilderMaterial(final DatabaseType protocolType, final StorageUnitMetaData storageUnitMetaData, + final Collection rules, final ConfigurationProperties props, final String defaultSchemaName) { + this(protocolType, storageUnitMetaData.getStorageUnits().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getStorageType(), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)), + storageUnitMetaData.getDataSources(), rules, props, defaultSchemaName); + } + + /** + * Judge whether same protocol and storage database types. + * + * @return is same or not + */ + public boolean isSameProtocolAndStorageTypes() { + return storageTypes.values().stream().allMatch(protocolType::equals); + } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtils.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtils.java index 9d2239ec2f852..6469ba02e8869 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtils.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtils.java @@ -19,22 +19,23 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.database.core.GlobalDataSourceRegistry; +import org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoaderMaterial; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.datanode.DataNodes; -import org.apache.shardingsphere.infra.database.core.GlobalDataSourceRegistry; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.metadata.database.schema.builder.GenericSchemaBuilderMaterial; import org.apache.shardingsphere.infra.metadata.database.schema.exception.UnsupportedActualDataNodeStructureException; -import org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoaderMaterial; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import javax.sql.DataSource; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; -import java.util.stream.Collectors; /** * Schema meta data utility class. @@ -53,18 +54,27 @@ public final class SchemaMetaDataUtils { public static Collection getMetaDataLoaderMaterials(final Collection tableNames, final GenericSchemaBuilderMaterial material, final boolean checkMetaDataEnable) { Map> dataSourceTableGroups = new LinkedHashMap<>(); - Collection notSupportThreeTierStructureStorageTypes = getNotSupportThreeTierStructureStorageTypes(material.getStorageTypes().values()); + Collection unsupportedThreeTierStorageStructureDatabaseTypes = getUnsupportedThreeTierStorageStructureDatabaseTypes(material.getStorageTypes().values()); DataNodes dataNodes = new DataNodes(material.getRules()); for (String each : tableNames) { - checkDataSourceTypeIncludeInstanceAndSetDatabaseTableMap(notSupportThreeTierStructureStorageTypes, dataNodes, each); + checkDataSourceTypeIncludeInstanceAndSetDatabaseTableMap(unsupportedThreeTierStorageStructureDatabaseTypes, dataNodes, each); if (checkMetaDataEnable) { addAllActualTableDataNode(material, dataSourceTableGroups, dataNodes, each); } else { addOneActualTableDataNode(material, dataSourceTableGroups, dataNodes, each); } } - return dataSourceTableGroups.entrySet().stream().map(entry -> new MetaDataLoaderMaterial(entry.getValue(), - getDataSource(material, entry.getKey()), material.getStorageTypes().get(entry.getKey()), material.getDefaultSchemaName())).collect(Collectors.toList()); + Collection result = new LinkedList<>(); + for (Entry> entry : dataSourceTableGroups.entrySet()) { + DatabaseType storageType = material.getStorageTypes().get(entry.getKey()); + String defaultSchemaName = getDefaultSchemaNameByStorageType(storageType, material.getDefaultSchemaName()); + result.add(new MetaDataLoaderMaterial(entry.getValue(), getDataSource(material, entry.getKey()), storageType, defaultSchemaName)); + } + return result; + } + + private static String getDefaultSchemaNameByStorageType(final DatabaseType storageType, final String databaseName) { + return new DatabaseTypeRegistry(storageType).getDefaultSchemaName(databaseName); } private static DataSource getDataSource(final GenericSchemaBuilderMaterial material, final String dataSourceName) { @@ -83,10 +93,10 @@ private static void checkDataSourceTypeIncludeInstanceAndSetDatabaseTableMap(fin } } - private static Collection getNotSupportThreeTierStructureStorageTypes(final Collection storageTypes) { + private static Collection getUnsupportedThreeTierStorageStructureDatabaseTypes(final Collection storageTypes) { Collection result = new LinkedList<>(); for (DatabaseType each : storageTypes) { - if (!"MySQL".equals(each.getType())) { + if (!new DatabaseTypeRegistry(each).getDialectDatabaseMetaData().isSupportThreeTierStorageStructure()) { result.add(each); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/builder/dialect/PostgreSQLShardingSphereStatisticsBuilder.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/builder/dialect/PostgreSQLShardingSphereStatisticsBuilder.java index cf223217d6606..a4a38d3c0fd4a 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/builder/dialect/PostgreSQLShardingSphereStatisticsBuilder.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/builder/dialect/PostgreSQLShardingSphereStatisticsBuilder.java @@ -71,8 +71,8 @@ public ShardingSphereStatistics build(final ShardingSphereMetaData metaData) { return result; } - private void appendSchemaData(final ShardingSphereDatabase shardingSphereDatabase, final ShardingSphereDatabaseData databaseData) { - for (Entry entry : shardingSphereDatabase.getSchemas().entrySet()) { + private void appendSchemaData(final ShardingSphereDatabase database, final ShardingSphereDatabaseData databaseData) { + for (Entry entry : database.getSchemas().entrySet()) { if (COLLECTED_SCHEMA_TABLES.containsKey(entry.getKey()) || INIT_DATA_SCHEMA_TABLES.containsKey(entry.getKey())) { ShardingSphereSchemaData schemaData = new ShardingSphereSchemaData(); appendTableData(entry, schemaData); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/ShardingSphereTableDataCollectorUtils.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/ShardingSphereTableDataCollectorUtils.java index 1ef6f48fc6771..05b1ead4dbd26 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/ShardingSphereTableDataCollectorUtils.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/ShardingSphereTableDataCollectorUtils.java @@ -19,12 +19,12 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereRowData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; +import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereRowData; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; @@ -44,54 +44,64 @@ public final class ShardingSphereTableDataCollectorUtils { /** * Collect row data. * - * @param shardingSphereDatabase ShardingSphere database - * @param sql sql + * @param database ShardingSphere database * @param table table * @param selectedColumnNames selected column names + * @param sql SQL * @return ShardingSphere row data * @throws SQLException sql exception */ - public static Collection collectRowData(final ShardingSphereDatabase shardingSphereDatabase, final String sql, final ShardingSphereTable table, - final Collection selectedColumnNames) throws SQLException { - if (isProtocolTypeAndStorageTypeDifferent(shardingSphereDatabase)) { + public static Collection collectRowData(final ShardingSphereDatabase database, final ShardingSphereTable table, + final Collection selectedColumnNames, final String sql) throws SQLException { + if (isDifferentProtocolAndStorageType(database)) { return Collections.emptyList(); } Collection result = new LinkedList<>(); - for (DataSource each : shardingSphereDatabase.getResourceMetaData().getDataSources().values()) { + for (StorageUnit each : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().values()) { try ( - Connection connection = each.getConnection(); + Connection connection = each.getDataSource().getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) { - result.addAll(getRows(resultSet, table, selectedColumnNames)); + result.addAll(getRows(table, selectedColumnNames, resultSet)); } } return result; } - private static boolean isProtocolTypeAndStorageTypeDifferent(final ShardingSphereDatabase shardingSphereDatabase) { - return !shardingSphereDatabase.getResourceMetaData().getStorageTypes().values().stream().allMatch(each -> each.getType().equals(shardingSphereDatabase.getProtocolType().getType())); + private static boolean isDifferentProtocolAndStorageType(final ShardingSphereDatabase database) { + return !database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().values().stream().allMatch(each -> each.getStorageType().equals(database.getProtocolType())); } - private static Collection getRows(final ResultSet resultSet, final ShardingSphereTable table, final Collection selectedColumnNames) throws SQLException { + private static Collection getRows(final ShardingSphereTable table, final Collection selectedColumnNames, final ResultSet resultSet) throws SQLException { Collection result = new LinkedList<>(); while (resultSet.next()) { - result.add(new ShardingSphereRowData(getRow(table, resultSet, selectedColumnNames))); + result.add(new ShardingSphereRowData(getRow(table, selectedColumnNames, resultSet))); } return result; } - private static List getRow(final ShardingSphereTable table, final ResultSet resultSet, final Collection selectedColumnNames) throws SQLException { + private static List getRow(final ShardingSphereTable table, final Collection selectedColumnNames, final ResultSet resultSet) throws SQLException { List result = new LinkedList<>(); for (ShardingSphereColumn each : table.getColumnValues()) { - if (selectedColumnNames.contains(each.getName())) { - result.add(convertIfNecessary(resultSet.getObject(each.getName()), each.getDataType())); - } else { - result.add(mockValue(each.getDataType())); - } + result.add(selectedColumnNames.contains(each.getName()) ? convertValueIfNecessary(resultSet.getObject(each.getName()), each.getDataType()) : mockValue(each.getDataType())); } return result; } + private static Object convertValueIfNecessary(final Object data, final int dataType) { + if (null == data) { + return null; + } + switch (dataType) { + case Types.ARRAY: + return data.toString(); + case Types.BIGINT: + return Long.valueOf(data.toString()); + default: + return data; + } + } + private static Object mockValue(final int dataType) { switch (dataType) { case Types.BIGINT: @@ -112,14 +122,4 @@ private static Object mockValue(final int dataType) { return null; } } - - private static Object convertIfNecessary(final Object data, final int dataType) { - if (Types.ARRAY == dataType) { - return null == data ? null : data.toString(); - } - if (Types.BIGINT == dataType) { - return null == data ? null : Long.valueOf(data.toString()); - } - return data; - } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgClassTableCollector.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgClassTableCollector.java index 22f7b2fc4162a..b72ff4290b5bb 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgClassTableCollector.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgClassTableCollector.java @@ -52,7 +52,7 @@ public final class PgClassTableCollector implements ShardingSphereStatisticsColl public Optional collect(final String databaseName, final ShardingSphereTable table, final Map shardingSphereDatabases) throws SQLException { Collection rows = ShardingSphereTableDataCollectorUtils.collectRowData(shardingSphereDatabases.get(databaseName), - SELECT_SQL, table, Arrays.stream(COLUMN_NAMES.split(",")).map(String::trim).collect(Collectors.toList())); + table, Arrays.stream(COLUMN_NAMES.split(",")).map(String::trim).collect(Collectors.toList()), SELECT_SQL); Collection rowData = decorateTableName(rows, table, shardingSphereDatabases.get(databaseName).getRuleMetaData().getRules()); ShardingSphereTableData result = new ShardingSphereTableData(PG_CLASS); result.getRows().addAll(rowData); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgNamespaceTableCollector.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgNamespaceTableCollector.java index b6d16c2803c04..bdea1de566bf7 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgNamespaceTableCollector.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/collector/tables/PgNamespaceTableCollector.java @@ -46,7 +46,7 @@ public final class PgNamespaceTableCollector implements ShardingSphereStatistics public Optional collect(final String databaseName, final ShardingSphereTable table, final Map shardingSphereDatabases) throws SQLException { Collection rows = ShardingSphereTableDataCollectorUtils.collectRowData(shardingSphereDatabases.get(databaseName), - SELECT_SQL, table, Arrays.stream(COLUMN_NAMES.split(",")).map(String::trim).collect(Collectors.toList())); + table, Arrays.stream(COLUMN_NAMES.split(",")).map(String::trim).collect(Collectors.toList()), SELECT_SQL); ShardingSphereTableData result = new ShardingSphereTableData(PG_NAMESPACE); result.getRows().addAll(rows); return Optional.of(result); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/ShardingSphereRule.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/ShardingSphereRule.java index ac887ac5491e7..4102ec08de978 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/ShardingSphereRule.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/ShardingSphereRule.java @@ -30,11 +30,4 @@ public interface ShardingSphereRule { * @return rule configuration */ RuleConfiguration getConfiguration(); - - /** - * Get type. - * - * @return rule type - */ - String getType(); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/identifier/type/DataSourceContainedRule.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/identifier/type/DataSourceContainedRule.java index ca5302c233d10..fb49a376a528c 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/identifier/type/DataSourceContainedRule.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/rule/identifier/type/DataSourceContainedRule.java @@ -29,7 +29,7 @@ public interface DataSourceContainedRule extends ShardingSphereRule { /** * Get data source mapper. - * + * * @return data source mapper */ Map> getDataSourceMapper(); diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java index 15c110d2b262a..d1f9f089a41bb 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourceConfigurationSwapper.java @@ -19,7 +19,7 @@ import com.google.common.base.Preconditions; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.yaml.config.pojo.YamlRootConfiguration; import javax.sql.DataSource; @@ -56,31 +56,31 @@ public Map swapToDataSources(final Map swapToDataSources(final Map> yamlDataSources, final boolean cacheEnabled) { - return DataSourcePoolCreator.create(yamlDataSources.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapToDataSourceProperties(entry.getValue()))), cacheEnabled); + return DataSourcePoolCreator.create(yamlDataSources.entrySet().stream().collect(Collectors.toMap(Entry::getKey, entry -> swapToDataSourcePoolProperties(entry.getValue()))), cacheEnabled); } /** - * Get data source properties. + * Get data source pool properties. * - * @param yamlRootConfig yaml root configuration - * @return data source name to data source properties map + * @param yamlRootConfig YAML root configuration + * @return data source name to data source pool properties map */ - public Map getDataSourcePropertiesMap(final YamlRootConfiguration yamlRootConfig) { + public Map getDataSourcePoolPropertiesMap(final YamlRootConfiguration yamlRootConfig) { Map> yamlDataSourceConfigs = yamlRootConfig.getDataSources(); - Map result = new LinkedHashMap<>(yamlDataSourceConfigs.size(), 1F); - yamlDataSourceConfigs.forEach((key, value) -> result.put(key, swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSourceConfigs.size(), 1F); + yamlDataSourceConfigs.forEach((key, value) -> result.put(key, swapToDataSourcePoolProperties(value))); return result; } /** - * Swap to data source properties. + * Swap to data source pool properties. * * @param yamlConfig YAML configurations - * @return data source properties + * @return data source pool properties */ - public DataSourceProperties swapToDataSourceProperties(final Map yamlConfig) { + public DataSourcePoolProperties swapToDataSourcePoolProperties(final Map yamlConfig) { Preconditions.checkState(yamlConfig.containsKey(DATA_SOURCE_CLASS_NAME_KEY), "%s can not be null.", DATA_SOURCE_CLASS_NAME_KEY); - return new DataSourceProperties(yamlConfig.get(DATA_SOURCE_CLASS_NAME_KEY).toString(), getProperties(yamlConfig)); + return new DataSourcePoolProperties(yamlConfig.get(DATA_SOURCE_CLASS_NAME_KEY).toString(), getProperties(yamlConfig)); } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -99,12 +99,12 @@ private Map getProperties(final Map yamlConfig) /** * Swap to map from data source properties. * - * @param dataSourceProps data source properties + * @param props data source pool properties * @return data source map */ - public Map swapToMap(final DataSourceProperties dataSourceProps) { - Map result = new HashMap<>(dataSourceProps.getAllStandardProperties()); - result.put(DATA_SOURCE_CLASS_NAME_KEY, dataSourceProps.getDataSourceClassName()); + public Map swapToMap(final DataSourcePoolProperties props) { + Map result = new HashMap<>(props.getAllStandardProperties()); + result.put(DATA_SOURCE_CLASS_NAME_KEY, props.getPoolClassName()); return result; } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java index d7f01b83e6705..a5aa710f10b88 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceGeneratedDatabaseConfigurationTest.java @@ -18,11 +18,12 @@ package org.apache.shardingsphere.infra.config.database.impl; import com.zaxxer.hikari.HikariDataSource; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; -import org.apache.shardingsphere.infra.datasource.config.ConnectionConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.config.PoolConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration; import org.junit.jupiter.api.Test; @@ -34,7 +35,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; class DataSourceGeneratedDatabaseConfigurationTest { @@ -43,16 +43,14 @@ class DataSourceGeneratedDatabaseConfigurationTest { void assertGetDataSources() { DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration(); DataSource dataSource = databaseConfig.getDataSources().get("normal_db"); - assertTrue(dataSource instanceof ShardingSphereStorageDataSourceWrapper); - ShardingSphereStorageDataSourceWrapper wrapper = (ShardingSphereStorageDataSourceWrapper) dataSource; - assertTrue(wrapper.getDataSource() instanceof HikariDataSource); - assertNull(wrapper.getCatalog()); + assertTrue(dataSource instanceof CatalogSwitchableDataSource); + assertTrue(((CatalogSwitchableDataSource) dataSource).getDataSource() instanceof HikariDataSource); } @Test void assertGetStorageNodes() { DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration(); - HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getStorageNodes().get("normal_db"); + HikariDataSource hikariDataSource = (HikariDataSource) databaseConfig.getStorageResource().getStorageNodeDataSources().get(new StorageNode("normal_db")); assertThat(hikariDataSource.getJdbcUrl(), is("jdbc:mock://127.0.0.1/normal_db")); assertThat(hikariDataSource.getUsername(), is("root")); assertThat(hikariDataSource.getPassword(), is("")); @@ -62,10 +60,8 @@ void assertGetStorageNodes() { void assertGetStorageUnits() { DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration(); DataSource dataSource = databaseConfig.getDataSources().get("normal_db"); - assertTrue(dataSource instanceof ShardingSphereStorageDataSourceWrapper); - ShardingSphereStorageDataSourceWrapper wrapper = (ShardingSphereStorageDataSourceWrapper) dataSource; - assertTrue(wrapper.getDataSource() instanceof HikariDataSource); - assertNull(wrapper.getCatalog()); + assertTrue(dataSource instanceof CatalogSwitchableDataSource); + assertTrue(((CatalogSwitchableDataSource) dataSource).getDataSource() instanceof HikariDataSource); } @Test @@ -76,9 +72,9 @@ void assertGetRuleConfigurations() { } @Test - void assertGetDataSourceProperties() { + void assertGetDataSourcePoolProperties() { DataSourceGeneratedDatabaseConfiguration databaseConfig = createDataSourceGeneratedDatabaseConfiguration(); - DataSourceProperties props = databaseConfig.getDataSourcePropsMap().get("normal_db"); + DataSourcePoolProperties props = databaseConfig.getDataSourcePoolPropertiesMap().get("normal_db"); Map poolStandardProps = props.getPoolPropertySynonyms().getStandardProperties(); assertThat(poolStandardProps.size(), is(6)); assertThat(poolStandardProps.get("connectionTimeoutMilliseconds"), is(2000L)); diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java index 55f80a2a7db9f..16476889fd17d 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfigurationTest.java @@ -17,8 +17,9 @@ package org.apache.shardingsphere.infra.config.database.impl; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.fixture.FixtureRuleConfiguration; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -29,7 +30,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; class DataSourceProvidedDatabaseConfigurationTest { @@ -38,16 +38,14 @@ class DataSourceProvidedDatabaseConfigurationTest { void assertGetDataSources() { DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration(); DataSource dataSource = databaseConfig.getDataSources().get("foo_ds"); - assertTrue(dataSource instanceof ShardingSphereStorageDataSourceWrapper); - ShardingSphereStorageDataSourceWrapper wrapper = (ShardingSphereStorageDataSourceWrapper) dataSource; - assertTrue(wrapper.getDataSource() instanceof MockedDataSource); - assertNull(wrapper.getCatalog()); + assertTrue(dataSource instanceof CatalogSwitchableDataSource); + assertTrue(((CatalogSwitchableDataSource) dataSource).getDataSource() instanceof MockedDataSource); } @Test void assertGetStorageNodes() { DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration(); - MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getStorageNodes().get("foo_ds"); + MockedDataSource dataSource = (MockedDataSource) databaseConfig.getStorageResource().getStorageNodeDataSources().get(new StorageNode("foo_ds")); assertThat(dataSource.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(dataSource.getUsername(), is("root")); assertThat(dataSource.getPassword(), is("root")); @@ -57,10 +55,8 @@ void assertGetStorageNodes() { void assertGetStorageUnits() { DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration(); DataSource dataSource = databaseConfig.getDataSources().get("foo_ds"); - assertTrue(dataSource instanceof ShardingSphereStorageDataSourceWrapper); - ShardingSphereStorageDataSourceWrapper wrapper = (ShardingSphereStorageDataSourceWrapper) dataSource; - assertTrue(wrapper.getDataSource() instanceof MockedDataSource); - assertNull(wrapper.getCatalog()); + assertTrue(dataSource instanceof CatalogSwitchableDataSource); + assertTrue(((CatalogSwitchableDataSource) dataSource).getDataSource() instanceof MockedDataSource); } @Test @@ -71,9 +67,9 @@ void assertGetRuleConfigurations() { } @Test - void assertGetDataSourceProperties() { + void assertGetDataSourcePoolProperties() { DataSourceProvidedDatabaseConfiguration databaseConfig = createDataSourceProvidedDatabaseConfiguration(); - DataSourceProperties props = databaseConfig.getDataSourcePropsMap().get("foo_ds"); + DataSourcePoolProperties props = databaseConfig.getDataSourcePoolPropertiesMap().get("foo_ds"); Map poolStandardProps = props.getPoolPropertySynonyms().getStandardProperties(); assertThat(poolStandardProps.size(), is(0)); Map connStandardProps = props.getConnectionPropertySynonyms().getStandardProperties(); diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/fixture/FixtureRule.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/fixture/FixtureRule.java index e50265e928725..35a9971bd98d8 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/fixture/FixtureRule.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/fixture/FixtureRule.java @@ -38,9 +38,4 @@ public RuleConfiguration getConfiguration() { public Map> getDataSourceMapper() { return Collections.emptyMap(); } - - @Override - public String getType() { - return FixtureRule.class.getSimpleName(); - } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaDataTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaDataTest.java index bee8ad614deba..da1846beea8d8 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaDataTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/ShardingSphereMetaDataTest.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.rule.identifier.type.ResourceHeldRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -45,6 +46,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -57,7 +59,7 @@ class ShardingSphereMetaDataTest { @Test void assertAddDatabase() { ResourceHeldRule globalResourceHeldRule = mock(ResourceHeldRule.class); - ShardingSphereDatabase database = mockDatabase(mock(ResourceMetaData.class), new MockedDataSource(), mock(ResourceHeldRule.class)); + ShardingSphereDatabase database = mockDatabase(mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), new MockedDataSource(), mock(ResourceHeldRule.class)); DatabaseType databaseType = mock(DatabaseType.class); ConfigurationProperties configProps = new ConfigurationProperties(new Properties()); when(ShardingSphereDatabase.create("foo_db", databaseType, configProps)).thenReturn(database); @@ -71,7 +73,7 @@ void assertAddDatabase() { @Test void assertDropDatabase() { - ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); + ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); DataSource dataSource = new MockedDataSource(); ResourceHeldRule databaseResourceHeldRule = mock(ResourceHeldRule.class); ResourceHeldRule globalResourceHeldRule = mock(ResourceHeldRule.class); @@ -88,7 +90,9 @@ private ShardingSphereDatabase mockDatabase(final ResourceMetaData resourceMetaD ShardingSphereDatabase result = mock(ShardingSphereDatabase.class); when(result.getName()).thenReturn("foo_db"); when(result.getResourceMetaData()).thenReturn(resourceMetaData); - when(result.getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_db", dataSource)); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(dataSource); + when(result.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_db", storageUnit)); when(result.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(databaseResourceHeldRule))); return result; } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/rule/ShardingSphereRuleFixture.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/rule/ShardingSphereRuleFixture.java index 48a3211a0f408..67f6ca44944fa 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/rule/ShardingSphereRuleFixture.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/rule/ShardingSphereRuleFixture.java @@ -28,9 +28,4 @@ public class ShardingSphereRuleFixture implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return ShardingSphereRuleFixture.class.getSimpleName(); - } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterialTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterialTest.java new file mode 100644 index 0000000000000..3b3237496fa3d --- /dev/null +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderMaterialTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.schema.builder; + +import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class GenericSchemaBuilderMaterialTest { + + @Test + void assertIsSameProtocolAndStorageTypes() { + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), + Collections.singletonMap("foo", TypedSPILoader.getService(DatabaseType.class, "FIXTURE")), + Collections.emptyMap(), Collections.emptyList(), new ConfigurationProperties(new Properties()), ""); + assertTrue(material.isSameProtocolAndStorageTypes()); + } + + @Test + void assertIsDifferentProtocolAndStorageTypes() { + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), + Collections.singletonMap("foo", null), + Collections.emptyMap(), Collections.emptyList(), new ConfigurationProperties(new Properties()), ""); + assertFalse(material.isSameProtocolAndStorageTypes()); + } +} diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderTest.java index d4ebb9eb951bf..a2ba70da0bfd6 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/builder/GenericSchemaBuilderTest.java @@ -19,13 +19,14 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.metadata.database.schema.fixture.rule.TableContainedFixtureRule; import org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoader; import org.apache.shardingsphere.infra.database.core.metadata.data.model.SchemaMetaData; import org.apache.shardingsphere.infra.database.core.metadata.data.model.TableMetaData; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.database.schema.fixture.rule.TableContainedFixtureRule; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.mock.AutoMockExtension; import org.apache.shardingsphere.test.mock.StaticMockSettings; import org.junit.jupiter.api.BeforeEach; @@ -57,8 +58,9 @@ class GenericSchemaBuilderTest { @BeforeEach void setUp() { - DatabaseType databaseType = mock(DatabaseType.class); - material = new GenericSchemaBuilderMaterial(databaseType, Collections.emptyMap(), Collections.singletonMap(DefaultDatabase.LOGIC_NAME, mock(DataSource.class)), + DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); + material = new GenericSchemaBuilderMaterial(databaseType, Collections.singletonMap(DefaultDatabase.LOGIC_NAME, databaseType), + Collections.singletonMap(DefaultDatabase.LOGIC_NAME, mock(DataSource.class)), Collections.singleton(new TableContainedFixtureRule()), new ConfigurationProperties(new Properties()), DefaultDatabase.LOGIC_NAME); } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/fixture/rule/TableContainedFixtureRule.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/fixture/rule/TableContainedFixtureRule.java index dac17fbc6be00..91251a632eda1 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/fixture/rule/TableContainedFixtureRule.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/fixture/rule/TableContainedFixtureRule.java @@ -49,9 +49,4 @@ public TableNamesMapper getEnhancedTableMapper() { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return TableContainedFixtureRule.class.getSimpleName(); - } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtilsTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtilsTest.java index 94347e34f5c87..f08a63688bace 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtilsTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/database/schema/util/SchemaMetaDataUtilsTest.java @@ -18,12 +18,13 @@ package org.apache.shardingsphere.infra.metadata.database.schema.util; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoaderMaterial; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.metadata.database.schema.builder.GenericSchemaBuilderMaterial; -import org.apache.shardingsphere.infra.database.core.metadata.data.loader.MetaDataLoaderMaterial; import org.apache.shardingsphere.infra.rule.identifier.type.DataNodeContainedRule; import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -47,7 +48,7 @@ class SchemaMetaDataUtilsTest { void assertGetSchemaMetaDataLoaderMaterialsWhenConfigCheckMetaDataEnable() { DataNodeContainedRule dataNodeContainedRule = mock(DataNodeContainedRule.class); when(dataNodeContainedRule.getDataNodesByTableName("t_order")).thenReturn(mockShardingDataNodes()); - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), Collections.emptyMap(), mockDataSourceMap(), + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), mockStorageTypes(), mockDataSourceMap(), Arrays.asList(dataNodeContainedRule, mock(DataSourceContainedRule.class)), mock(ConfigurationProperties.class), "sharding_db"); Collection actual = SchemaMetaDataUtils.getMetaDataLoaderMaterials(Collections.singleton("t_order"), material, true); assertThat(actual.size(), is(2)); @@ -64,7 +65,7 @@ void assertGetSchemaMetaDataLoaderMaterialsWhenConfigCheckMetaDataEnable() { void assertGetSchemaMetaDataLoaderMaterialsWhenNotConfigCheckMetaDataEnable() { DataNodeContainedRule dataNodeContainedRule = mock(DataNodeContainedRule.class); when(dataNodeContainedRule.getDataNodesByTableName("t_order")).thenReturn(mockShardingDataNodes()); - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), Collections.emptyMap(), mockDataSourceMap(), + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), mockStorageTypes(), mockDataSourceMap(), Arrays.asList(dataNodeContainedRule, mock(DataSourceContainedRule.class)), mock(ConfigurationProperties.class), "sharding_db"); Collection actual = SchemaMetaDataUtils.getMetaDataLoaderMaterials(Collections.singleton("t_order"), material, false); assertThat(actual.size(), is(1)); @@ -78,7 +79,7 @@ void assertGetSchemaMetaDataLoaderMaterialsWhenNotConfigCheckMetaDataEnable() { void assertGetSchemaMetaDataLoaderMaterialsWhenNotConfigCheckMetaDataEnableForSingleTableDataNode() { DataNodeContainedRule dataNodeContainedRule = mock(DataNodeContainedRule.class); when(dataNodeContainedRule.getDataNodesByTableName("t_single")).thenReturn(mockSingleTableDataNodes()); - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), Collections.emptyMap(), mockDataSourceMap(), + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(mock(DatabaseType.class), mockStorageTypes(), mockDataSourceMap(), Arrays.asList(dataNodeContainedRule, mock(DataSourceContainedRule.class)), mock(ConfigurationProperties.class), "public"); Collection actual = SchemaMetaDataUtils.getMetaDataLoaderMaterials(Collections.singleton("t_single"), material, false); assertThat(actual.size(), is(1)); @@ -106,4 +107,12 @@ private Map mockDataSourceMap() { result.put("ds_1", new MockedDataSource()); return result; } + + private Map mockStorageTypes() { + Map result = new HashMap<>(2, 1F); + DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); + result.put("ds_0", databaseType); + result.put("ds_1", databaseType); + return result; + } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/statistics/builder/PostgreSQLShardingSphereStatisticsBuilderTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/statistics/builder/PostgreSQLShardingSphereStatisticsBuilderTest.java index 67f655011c606..e8376ecfba3a0 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/statistics/builder/PostgreSQLShardingSphereStatisticsBuilderTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/metadata/statistics/builder/PostgreSQLShardingSphereStatisticsBuilderTest.java @@ -29,6 +29,7 @@ import java.util.Map; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -51,7 +52,7 @@ private ShardingSphereMetaData mockMetaData() { } private Map mockDatabaseMap() { - ShardingSphereDatabase database = mock(ShardingSphereDatabase.class); + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); Map schemaMap = mockSchemaMap(); when(database.getSchemas()).thenReturn(schemaMap); return Collections.singletonMap("logic_db", database); diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureDatabaseRule.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureDatabaseRule.java index ecda50857b3cd..86775ce9c2acf 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureDatabaseRule.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureDatabaseRule.java @@ -28,9 +28,4 @@ public final class FixtureDatabaseRule implements DatabaseRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return FixtureDatabaseRule.class.getSimpleName(); - } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureGlobalRule.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureGlobalRule.java index 669c36071c594..c365e98557832 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureGlobalRule.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/rule/builder/fixture/FixtureGlobalRule.java @@ -28,9 +28,4 @@ public final class FixtureGlobalRule implements GlobalRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return FixtureGlobalRule.class.getSimpleName(); - } } diff --git a/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java b/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java similarity index 91% rename from infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java rename to infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java index 81a118f131478..7e0e9877fec72 100644 --- a/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePropertiesSwapperTest.java +++ b/infra/common/src/test/java/org/apache/shardingsphere/infra/yaml/config/swapper/resource/YamlDataSourcePoolPropertiesSwapperTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.yaml.config.swapper.resource; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -29,7 +29,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -class YamlDataSourcePropertiesSwapperTest { +class YamlDataSourcePoolPropertiesSwapperTest { private final YamlDataSourceConfigurationSwapper swapper = new YamlDataSourceConfigurationSwapper(); @@ -48,12 +48,12 @@ void assertSwapToDataSources() { } @Test - void assertSwapToDataSourceProperties() { + void assertSwapToDataSourcePoolProperties() { Map yamlConfig = new HashMap<>(3, 1F); yamlConfig.put("dataSourceClassName", MockedDataSource.class.getName()); yamlConfig.put("url", "xx:xxx"); yamlConfig.put("username", "root"); - DataSourceProperties actual = swapper.swapToDataSourceProperties(yamlConfig); + DataSourcePoolProperties actual = swapper.swapToDataSourcePoolProperties(yamlConfig); assertThat(actual.getAllLocalProperties().size(), is(3)); assertThat(actual.getAllLocalProperties().get("dataSourceClassName").toString(), is("org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource")); assertThat(actual.getAllLocalProperties().get("url").toString(), is("xx:xxx")); @@ -62,7 +62,7 @@ void assertSwapToDataSourceProperties() { @Test void assertSwapToMap() { - Map actual = swapper.swapToMap(new DataSourceProperties(MockedDataSource.class.getName(), createProperties())); + Map actual = swapper.swapToMap(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties())); assertThat(actual.get("dataSourceClassName"), is(MockedDataSource.class.getName())); assertThat(actual.get("url").toString(), is("xx:xxx")); assertThat(actual.get("username").toString(), is("root")); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefreshEngine.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefreshEngine.java index 9be0d70146b14..ce7d1b53a58ad 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefreshEngine.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefreshEngine.java @@ -66,7 +66,7 @@ public void refresh(final SQLStatementContext sqlStatementContext, final Collect String schemaName = sqlStatementContext.getTablesContext().getSchemaName() .orElseGet(() -> new DatabaseTypeRegistry(sqlStatementContext.getDatabaseType()).getDefaultSchemaName(database.getName())).toLowerCase(); Collection logicDataSourceNames = routeUnits.stream().map(each -> each.getDataSourceMapper().getLogicName()).collect(Collectors.toList()); - schemaRefresher.get().refresh(modeContextManager, database, logicDataSourceNames, schemaName, sqlStatementContext.getSqlStatement(), props); + schemaRefresher.get().refresh(modeContextManager, database, logicDataSourceNames, schemaName, sqlStatementContext.getDatabaseType(), sqlStatementContext.getSqlStatement(), props); return; } IGNORED_SQL_STATEMENT_CLASSES.add(sqlStatementClass); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefresher.java index a2b83e3f62fdd..4af21f6b2eb57 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/MetaDataRefresher.java @@ -18,6 +18,7 @@ package org.apache.shardingsphere.infra.connection.refresher; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI; @@ -42,12 +43,13 @@ public interface MetaDataRefresher extends TypedSPI { * @param database database * @param logicDataSourceNames route data source names * @param schemaName schema name + * @param databaseType database type * @param sqlStatement SQL statement * @param props configuration properties * @throws SQLException SQL exception */ void refresh(ModeContextManager modeContextManager, ShardingSphereDatabase database, Collection logicDataSourceNames, String schemaName, - T sqlStatement, ConfigurationProperties props) throws SQLException; + DatabaseType databaseType, T sqlStatement, ConfigurationProperties props) throws SQLException; @Override Class getType(); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/AlterIndexStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/AlterIndexStatementSchemaRefresher.java index 974c7addcfa5c..61a005dd2021c 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/AlterIndexStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/AlterIndexStatementSchemaRefresher.java @@ -20,6 +20,7 @@ import com.google.common.base.Preconditions; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereIndex; @@ -40,7 +41,7 @@ public final class AlterIndexStatementSchemaRefresher implements MetaDataRefresh @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final AlterIndexStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final AlterIndexStatement sqlStatement, final ConfigurationProperties props) { Optional renameIndex = AlterIndexStatementHandler.getRenameIndexSegment(sqlStatement); if (!sqlStatement.getIndex().isPresent() || !renameIndex.isPresent()) { return; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/CreateIndexStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/CreateIndexStatementSchemaRefresher.java index c6892d282f914..6f9d013af3b6e 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/CreateIndexStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/CreateIndexStatementSchemaRefresher.java @@ -20,6 +20,7 @@ import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereIndex; @@ -37,9 +38,10 @@ public final class CreateIndexStatementSchemaRefresher implements MetaDataRefres @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final CreateIndexStatement sqlStatement, final ConfigurationProperties props) { - String indexName = null != sqlStatement.getIndex() ? sqlStatement.getIndex().getIndexName().getIdentifier().getValue() - : IndexMetaDataUtils.getGeneratedLogicIndexName(sqlStatement.getColumns()); + final String schemaName, final DatabaseType databaseType, final CreateIndexStatement sqlStatement, final ConfigurationProperties props) { + String indexName = null == sqlStatement.getIndex() + ? IndexMetaDataUtils.getGeneratedLogicIndexName(sqlStatement.getColumns()) + : sqlStatement.getIndex().getIndexName().getIdentifier().getValue(); if (Strings.isNullOrEmpty(indexName)) { return; } diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/DropIndexStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/DropIndexStatementSchemaRefresher.java index 51a15caa1e806..0068d5fd0bad0 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/DropIndexStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/index/DropIndexStatementSchemaRefresher.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.QualifiedTable; @@ -41,7 +42,7 @@ public final class DropIndexStatementSchemaRefresher implements MetaDataRefreshe @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final DropIndexStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final DropIndexStatement sqlStatement, final ConfigurationProperties props) { for (IndexSegment each : sqlStatement.getIndexes()) { String actualSchemaName = each.getOwner().map(optional -> optional.getIdentifier().getValue().toLowerCase()).orElse(schemaName); Optional logicTableName = findLogicTableName(database, sqlStatement, Collections.singletonList(each)); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/AlterSchemaStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/AlterSchemaStatementSchemaRefresher.java index 0a5f7e02e3ad9..6c95281bae9d1 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/AlterSchemaStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/AlterSchemaStatementSchemaRefresher.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaPOJO; @@ -35,7 +36,7 @@ public final class AlterSchemaStatementSchemaRefresher implements MetaDataRefres @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final AlterSchemaStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final AlterSchemaStatement sqlStatement, final ConfigurationProperties props) { Optional renameSchemaName = AlterSchemaStatementHandler.getRenameSchema(sqlStatement).map(optional -> optional.getValue().toLowerCase()); if (!renameSchemaName.isPresent()) { return; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/CreateSchemaStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/CreateSchemaStatementSchemaRefresher.java index b87c5e8abe162..5a6cd8437775c 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/CreateSchemaStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/CreateSchemaStatementSchemaRefresher.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateSchemaStatement; @@ -33,7 +34,7 @@ public final class CreateSchemaStatementSchemaRefresher implements MetaDataRefre @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final CreateSchemaStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final CreateSchemaStatement sqlStatement, final ConfigurationProperties props) { (sqlStatement.getSchemaName().isPresent() ? sqlStatement.getSchemaName() : CreateSchemaStatementHandler.getUsername(sqlStatement)) .ifPresent(optional -> modeContextManager.createSchema(database.getName(), optional.getValue().toLowerCase())); } diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/DropSchemaStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/DropSchemaStatementSchemaRefresher.java index 6642e92f5b29b..2c42d3672c60a 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/DropSchemaStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/schema/DropSchemaStatementSchemaRefresher.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.DropSchemaStatement; @@ -34,7 +35,7 @@ public final class DropSchemaStatementSchemaRefresher implements MetaDataRefresh @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final DropSchemaStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final DropSchemaStatement sqlStatement, final ConfigurationProperties props) { modeContextManager.dropSchema(database.getName(), getSchemaNames(sqlStatement)); } diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/AlterTableStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/AlterTableStatementSchemaRefresher.java index 9ee2f0a85f6d1..97bdfe8fc5930 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/AlterTableStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/AlterTableStatementSchemaRefresher.java @@ -19,6 +19,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -28,7 +30,6 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; -import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterTableStatement; import java.sql.SQLException; @@ -45,8 +46,8 @@ public final class AlterTableStatementSchemaRefresher implements MetaDataRefresh @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final AlterTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { - String tableName = sqlStatement.getTable().getTableName().getIdentifier().getValue(); + final String schemaName, final DatabaseType databaseType, final AlterTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { + String tableName = TableRefreshUtils.getTableName(databaseType, sqlStatement.getTable().getTableName().getIdentifier()); AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO = new AlterSchemaMetaDataPOJO(database.getName(), schemaName, logicDataSourceNames); if (sqlStatement.getRenameTable().isPresent()) { String renameTable = sqlStatement.getRenameTable().get().getTableName().getIdentifier().getValue(); @@ -61,20 +62,16 @@ public void refresh(final ModeContextManager modeContextManager, final ShardingS private ShardingSphereTable getTable(final ShardingSphereDatabase database, final Collection logicDataSourceNames, final String schemaName, final String tableName, final ConfigurationProperties props) throws SQLException { RuleMetaData ruleMetaData = new RuleMetaData(new LinkedList<>(database.getRuleMetaData().getRules())); - if (isSingleTable(tableName, database)) { + if (TableRefreshUtils.isSingleTable(tableName, database)) { ruleMetaData.findRules(MutableDataNodeRule.class).forEach(each -> each.put(logicDataSourceNames.iterator().next(), schemaName, tableName)); } - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), database.getResourceMetaData().getDataSources(), ruleMetaData.getRules(), props, schemaName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial( + database.getProtocolType(), database.getResourceMetaData().getStorageUnitMetaData(), ruleMetaData.getRules(), props, schemaName); Map schemaMap = GenericSchemaBuilder.build(Collections.singletonList(tableName), material); return Optional.ofNullable(schemaMap.get(schemaName)).map(optional -> optional.getTable(tableName)) .orElseGet(() -> new ShardingSphereTable(tableName, Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); } - private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { - return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); - } - @Override public Class getType() { return AlterTableStatement.class; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/CreateTableStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/CreateTableStatementSchemaRefresher.java index 9c73fb130b5be..0b49b2c319fd5 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/CreateTableStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/CreateTableStatementSchemaRefresher.java @@ -20,6 +20,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -29,7 +30,6 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; -import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateTableStatement; import java.sql.SQLException; @@ -46,15 +46,15 @@ public final class CreateTableStatementSchemaRefresher implements MetaDataRefres @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final CreateTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { - String tableName = sqlStatement.getTable().getTableName().getIdentifier().getValue(); + final String schemaName, final DatabaseType databaseType, final CreateTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { + String tableName = TableRefreshUtils.getTableName(databaseType, sqlStatement.getTable().getTableName().getIdentifier()); RuleMetaData ruleMetaData = new RuleMetaData(new LinkedList<>(database.getRuleMetaData().getRules())); - boolean isSingleTable = isSingleTable(tableName, database); + boolean isSingleTable = TableRefreshUtils.isSingleTable(tableName, database); if (isSingleTable) { ruleMetaData.findRules(MutableDataNodeRule.class).forEach(each -> each.put(logicDataSourceNames.iterator().next(), schemaName, tableName)); } - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), database.getResourceMetaData().getDataSources(), ruleMetaData.getRules(), props, schemaName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial( + database.getProtocolType(), database.getResourceMetaData().getStorageUnitMetaData(), ruleMetaData.getRules(), props, schemaName); Map schemaMap = GenericSchemaBuilder.build(Collections.singletonList(tableName), material); Optional actualTableMetaData = Optional.ofNullable(schemaMap.get(schemaName)).map(optional -> optional.getTable(tableName)); if (actualTableMetaData.isPresent()) { @@ -67,10 +67,6 @@ public void refresh(final ModeContextManager modeContextManager, final ShardingS } } - private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { - return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); - } - @Override public Class getType() { return CreateTableStatement.class; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/DropTableStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/DropTableStatementSchemaRefresher.java index de3e58fb40bc3..a3d23536c94d9 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/DropTableStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/DropTableStatementSchemaRefresher.java @@ -20,6 +20,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -37,7 +38,7 @@ public final class DropTableStatementSchemaRefresher implements MetaDataRefreshe @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final DropTableStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final DropTableStatement sqlStatement, final ConfigurationProperties props) { AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO = new AlterSchemaMetaDataPOJO(database.getName(), schemaName); sqlStatement.getTables().forEach(each -> alterSchemaMetaDataPOJO.getDroppedTables().add(each.getTableName().getIdentifier().getValue())); RuleMetaData ruleMetaData = database.getRuleMetaData(); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/RenameTableStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/RenameTableStatementSchemaRefresher.java index b937fc4852f00..3122b4ed5c2b7 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/RenameTableStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/table/RenameTableStatementSchemaRefresher.java @@ -19,6 +19,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -28,7 +30,6 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; -import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.table.RenameTableDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.RenameTableStatement; @@ -46,10 +47,11 @@ public final class RenameTableStatementSchemaRefresher implements MetaDataRefres @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final RenameTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { + final String schemaName, final DatabaseType databaseType, final RenameTableStatement sqlStatement, final ConfigurationProperties props) throws SQLException { for (RenameTableDefinitionSegment each : sqlStatement.getRenameTables()) { AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO = new AlterSchemaMetaDataPOJO(database.getName(), schemaName, logicDataSourceNames); - alterSchemaMetaDataPOJO.getAlteredTables().add(getTable(database, logicDataSourceNames, schemaName, each.getRenameTable().getTableName().getIdentifier().getValue(), props)); + alterSchemaMetaDataPOJO.getAlteredTables().add(getTable(database, logicDataSourceNames, schemaName, + TableRefreshUtils.getTableName(databaseType, each.getRenameTable().getTableName().getIdentifier()), props)); alterSchemaMetaDataPOJO.getDroppedTables().add(each.getTable().getTableName().getIdentifier().getValue()); modeContextManager.alterSchemaMetaData(alterSchemaMetaDataPOJO); } @@ -58,20 +60,16 @@ public void refresh(final ModeContextManager modeContextManager, final ShardingS private ShardingSphereTable getTable(final ShardingSphereDatabase database, final Collection logicDataSourceNames, final String schemaName, final String tableName, final ConfigurationProperties props) throws SQLException { RuleMetaData ruleMetaData = new RuleMetaData(new LinkedList<>(database.getRuleMetaData().getRules())); - if (isSingleTable(tableName, database)) { + if (TableRefreshUtils.isSingleTable(tableName, database)) { ruleMetaData.findRules(MutableDataNodeRule.class).forEach(each -> each.put(logicDataSourceNames.iterator().next(), schemaName, tableName)); } - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), database.getResourceMetaData().getDataSources(), ruleMetaData.getRules(), props, schemaName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial( + database.getProtocolType(), database.getResourceMetaData().getStorageUnitMetaData(), ruleMetaData.getRules(), props, schemaName); Map schemaMap = GenericSchemaBuilder.build(Collections.singletonList(tableName), material); return Optional.ofNullable(schemaMap.get(schemaName)).map(optional -> optional.getTable(tableName)) .orElseGet(() -> new ShardingSphereTable(tableName, Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); } - private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { - return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); - } - @Override public Class getType() { return RenameTableStatement.class; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/AlterViewStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/AlterViewStatementSchemaRefresher.java index 83085a1115f65..e8d242841069d 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/AlterViewStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/AlterViewStatementSchemaRefresher.java @@ -19,6 +19,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -29,7 +31,6 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereView; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; -import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl.AlterViewStatementHandler; @@ -48,8 +49,8 @@ public final class AlterViewStatementSchemaRefresher implements MetaDataRefreshe @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final AlterViewStatement sqlStatement, final ConfigurationProperties props) throws SQLException { - String viewName = sqlStatement.getView().getTableName().getIdentifier().getValue(); + final String schemaName, final DatabaseType databaseType, final AlterViewStatement sqlStatement, final ConfigurationProperties props) throws SQLException { + String viewName = TableRefreshUtils.getTableName(databaseType, sqlStatement.getView().getTableName().getIdentifier()); AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO = new AlterSchemaMetaDataPOJO(database.getName(), schemaName, logicDataSourceNames); Optional renameView = AlterViewStatementHandler.getRenameView(sqlStatement); if (renameView.isPresent()) { @@ -73,11 +74,11 @@ public void refresh(final ModeContextManager modeContextManager, final ShardingS private ShardingSphereSchema getSchema(final ShardingSphereDatabase database, final Collection logicDataSourceNames, final String schemaName, final String viewName, final String viewDefinition, final ConfigurationProperties props) throws SQLException { RuleMetaData ruleMetaData = new RuleMetaData(new LinkedList<>(database.getRuleMetaData().getRules())); - if (isSingleTable(viewName, database)) { + if (TableRefreshUtils.isSingleTable(viewName, database)) { ruleMetaData.findRules(MutableDataNodeRule.class).forEach(each -> each.put(logicDataSourceNames.iterator().next(), schemaName, viewName)); } - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), database.getResourceMetaData().getDataSources(), ruleMetaData.getRules(), props, schemaName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial( + database.getProtocolType(), database.getResourceMetaData().getStorageUnitMetaData(), ruleMetaData.getRules(), props, schemaName); Map schemaMap = GenericSchemaBuilder.build(Collections.singletonList(viewName), material); Optional actualViewMetaData = Optional.ofNullable(schemaMap.get(schemaName)).map(optional -> optional.getTable(viewName)); ShardingSphereSchema result = new ShardingSphereSchema(); @@ -86,10 +87,6 @@ private ShardingSphereSchema getSchema(final ShardingSphereDatabase database, fi return result; } - private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { - return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); - } - @Override public Class getType() { return AlterViewStatement.class; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/CreateViewStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/CreateViewStatementSchemaRefresher.java index f63329c072fc5..aa1cb7dbab30c 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/CreateViewStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/CreateViewStatementSchemaRefresher.java @@ -19,6 +19,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.connection.refresher.util.TableRefreshUtils; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -29,7 +31,6 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereView; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; -import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateViewStatement; import java.sql.SQLException; @@ -46,14 +47,14 @@ public final class CreateViewStatementSchemaRefresher implements MetaDataRefresh @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final CreateViewStatement sqlStatement, final ConfigurationProperties props) throws SQLException { - String viewName = sqlStatement.getView().getTableName().getIdentifier().getValue(); + final String schemaName, final DatabaseType databaseType, final CreateViewStatement sqlStatement, final ConfigurationProperties props) throws SQLException { + String viewName = TableRefreshUtils.getTableName(databaseType, sqlStatement.getView().getTableName().getIdentifier()); RuleMetaData ruleMetaData = new RuleMetaData(new LinkedList<>(database.getRuleMetaData().getRules())); - if (isSingleTable(viewName, database)) { + if (TableRefreshUtils.isSingleTable(viewName, database)) { ruleMetaData.findRules(MutableDataNodeRule.class).forEach(each -> each.put(logicDataSourceNames.iterator().next(), schemaName, viewName)); } - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), database.getResourceMetaData().getDataSources(), ruleMetaData.getRules(), props, schemaName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial( + database.getProtocolType(), database.getResourceMetaData().getStorageUnitMetaData(), ruleMetaData.getRules(), props, schemaName); Map schemaMap = GenericSchemaBuilder.build(Collections.singletonList(viewName), material); Optional actualTableMetaData = Optional.ofNullable(schemaMap.get(schemaName)).map(optional -> optional.getTable(viewName)); if (actualTableMetaData.isPresent()) { @@ -64,10 +65,6 @@ public void refresh(final ModeContextManager modeContextManager, final ShardingS } } - private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { - return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); - } - @Override public Class getType() { return CreateViewStatement.class; diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/DropViewStatementSchemaRefresher.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/DropViewStatementSchemaRefresher.java index d31566f09f2fc..b5230b194e042 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/DropViewStatementSchemaRefresher.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/type/view/DropViewStatementSchemaRefresher.java @@ -19,6 +19,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.connection.refresher.MetaDataRefresher; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; @@ -33,7 +34,7 @@ public final class DropViewStatementSchemaRefresher implements MetaDataRefresher @Override public void refresh(final ModeContextManager modeContextManager, final ShardingSphereDatabase database, final Collection logicDataSourceNames, - final String schemaName, final DropViewStatement sqlStatement, final ConfigurationProperties props) { + final String schemaName, final DatabaseType databaseType, final DropViewStatement sqlStatement, final ConfigurationProperties props) { AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO = new AlterSchemaMetaDataPOJO(database.getName(), schemaName); sqlStatement.getViews().forEach(each -> { String viewName = each.getTableName().getIdentifier().getValue(); diff --git a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/util/TableRefreshUtils.java b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/util/TableRefreshUtils.java index a3976d700e46b..71e3170acd36b 100644 --- a/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/util/TableRefreshUtils.java +++ b/infra/context/src/main/java/org/apache/shardingsphere/infra/connection/refresher/util/TableRefreshUtils.java @@ -21,12 +21,18 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; +import org.apache.shardingsphere.infra.database.core.metadata.database.enums.QuoteCharacter; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.oracle.type.OracleDatabaseType; import org.apache.shardingsphere.infra.datanode.DataNode; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; +import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; import org.apache.shardingsphere.single.api.config.SingleRuleConfiguration; import org.apache.shardingsphere.single.api.constant.SingleTableConstants; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Collection; import java.util.Optional; @@ -37,6 +43,28 @@ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class TableRefreshUtils { + /** + * Get table name. + * + * @param databaseType database type + * @param identifierValue identifier value + * @return table name + */ + public static String getTableName(final DatabaseType databaseType, final IdentifierValue identifierValue) { + return databaseType instanceof OracleDatabaseType && QuoteCharacter.NONE == identifierValue.getQuoteCharacter() ? identifierValue.getValue().toUpperCase() : identifierValue.getValue(); + } + + /** + * Judge whether single table. + * + * @param tableName table name + * @param database database + * @return whether single table + */ + public static boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { + return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); + } + /** * Judge whether the rule need to be refreshed. * @@ -67,11 +95,11 @@ public static boolean isRuleRefreshRequired(final RuleMetaData ruleMetaData, fin if (!singleRule.isPresent()) { return false; } - RuleConfiguration ruleConfiguration = singleRule.get().getConfiguration(); - if (!(ruleConfiguration instanceof SingleRuleConfiguration)) { + RuleConfiguration ruleConfig = singleRule.get().getConfiguration(); + if (!(ruleConfig instanceof SingleRuleConfiguration)) { return false; } - Collection tablesConfig = ((SingleRuleConfiguration) ruleConfiguration).getTables(); + Collection tablesConfig = ((SingleRuleConfiguration) ruleConfig).getTables(); if (tablesConfig.contains(SingleTableConstants.ALL_TABLES) || tablesConfig.contains(SingleTableConstants.ALL_SCHEMA_TABLES)) { return false; } diff --git a/infra/datasource/core/pom.xml b/infra/data-source-pool/core/pom.xml similarity index 94% rename from infra/datasource/core/pom.xml rename to infra/data-source-pool/core/pom.xml index cf5f31e5bb894..b3207f45e53f6 100644 --- a/infra/datasource/core/pom.xml +++ b/infra/data-source-pool/core/pom.xml @@ -20,10 +20,10 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-infra-datasource + shardingsphere-infra-data-source-pool 5.4.1-SNAPSHOT - shardingsphere-infra-datasource-core + shardingsphere-infra-data-source-pool-core ${project.artifactId} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/ShardingSphereStorageDataSourceWrapper.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/CatalogSwitchableDataSource.java similarity index 86% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/ShardingSphereStorageDataSourceWrapper.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/CatalogSwitchableDataSource.java index 6afe40b89fad9..c9ea4ed8590da 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/ShardingSphereStorageDataSourceWrapper.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/CatalogSwitchableDataSource.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource; +package org.apache.shardingsphere.infra.datasource.pool; import lombok.Getter; import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; import javax.sql.DataSource; import java.io.PrintWriter; @@ -29,16 +28,17 @@ import java.util.logging.Logger; /** - * ShardingSphere storage data source wrapper. + * Catalog switchable data source. */ @RequiredArgsConstructor -@Getter -public final class ShardingSphereStorageDataSourceWrapper implements DataSource, AutoCloseable { +public final class CatalogSwitchableDataSource implements DataSource, AutoCloseable { + @Getter private final DataSource dataSource; private final String catalog; + @Getter private final String url; @Override @@ -90,12 +90,10 @@ public boolean isWrapperFor(final Class iface) throws SQLException { return dataSource.isWrapperFor(iface); } - @SneakyThrows @Override - public void close() { - if (!(dataSource instanceof AutoCloseable)) { - return; + public void close() throws Exception { + if (dataSource instanceof AutoCloseable) { + ((AutoCloseable) dataSource).close(); } - ((AutoCloseable) dataSource).close(); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/ConnectionConfiguration.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/ConnectionConfiguration.java similarity index 94% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/ConnectionConfiguration.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/ConnectionConfiguration.java index 889f0ee8b8c74..d6a07da9fb13a 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/ConnectionConfiguration.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/ConnectionConfiguration.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.config; +package org.apache.shardingsphere.infra.datasource.pool.config; import lombok.Getter; import lombok.RequiredArgsConstructor; diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/DataSourceConfiguration.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/DataSourceConfiguration.java similarity index 94% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/DataSourceConfiguration.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/DataSourceConfiguration.java index dc1774e87e601..5b38b3dde7bc9 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/DataSourceConfiguration.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/DataSourceConfiguration.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.config; +package org.apache.shardingsphere.infra.datasource.pool.config; import lombok.Getter; import lombok.RequiredArgsConstructor; diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/PoolConfiguration.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/PoolConfiguration.java similarity index 95% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/PoolConfiguration.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/PoolConfiguration.java index 10164335e19e3..2258892e7fc1b 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/config/PoolConfiguration.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/config/PoolConfiguration.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.config; +package org.apache.shardingsphere.infra.datasource.pool.config; import lombok.Getter; import lombok.RequiredArgsConstructor; diff --git a/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java new file mode 100644 index 0000000000000..07e9476650e52 --- /dev/null +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.datasource.pool.creator; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.database.core.GlobalDataSourceRegistry; +import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; +import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; +import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaDataReflection; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.custom.CustomDataSourcePoolProperties; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; + +import javax.sql.DataSource; +import java.util.Collection; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; + +/** + * Data source pool creator. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class DataSourcePoolCreator { + + /** + * Create data sources. + * + * @param propsMap data source pool properties map + * @param cacheEnabled cache enabled + * @return created data sources + */ + public static Map create(final Map propsMap, final boolean cacheEnabled) { + Map result = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { + result.put(entry.getKey(), create(entry.getKey(), entry.getValue(), cacheEnabled, result.values())); + } + return result; + } + + /** + * Create data source. + * + * @param props data source pool properties + * @return created data source + */ + public static DataSource create(final DataSourcePoolProperties props) { + DataSource result = create(props.getPoolClassName()); + Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, props.getPoolClassName()); + DataSourcePoolReflection dataSourcePoolReflection = new DataSourcePoolReflection(result); + if (poolMetaData.isPresent()) { + setDefaultFields(dataSourcePoolReflection, poolMetaData.get()); + setConfiguredFields(props, dataSourcePoolReflection, poolMetaData.get()); + appendJdbcUrlProperties(props.getCustomProperties(), result, poolMetaData.get(), dataSourcePoolReflection); + dataSourcePoolReflection.addDefaultDataSourcePoolProperties(poolMetaData.get()); + } else { + setConfiguredFields(props, dataSourcePoolReflection); + } + return result; + } + + /** + * Create data source. + * + * @param dataSourceName data source name + * @param props data source pool properties + * @param cacheEnabled cache enabled + * @return created data source + */ + public static DataSource create(final String dataSourceName, final DataSourcePoolProperties props, final boolean cacheEnabled) { + DataSource result = create(props); + if (cacheEnabled && !GlobalDataSourceRegistry.getInstance().getCachedDataSources().containsKey(dataSourceName)) { + GlobalDataSourceRegistry.getInstance().getCachedDataSources().put(dataSourceName, result); + } + return result; + } + + /** + * Create data source. + * + * @param dataSourceName data source name + * @param props data source pool properties + * @param cacheEnabled cache enabled + * @param storageNodes storage nodes + * @return created data source + */ + public static DataSource create(final String dataSourceName, final DataSourcePoolProperties props, final boolean cacheEnabled, final Collection storageNodes) { + try { + return create(dataSourceName, props, cacheEnabled); + // CHECKSTYLE:OFF + } catch (final RuntimeException ex) { + // CHECKSTYLE:ON + if (!cacheEnabled) { + storageNodes.stream().map(DataSourcePoolDestroyer::new).forEach(DataSourcePoolDestroyer::asyncDestroy); + } + throw ex; + } + } + + @SneakyThrows(ReflectiveOperationException.class) + private static DataSource create(final String dataSourceClassName) { + return (DataSource) Class.forName(dataSourceClassName).getConstructor().newInstance(); + } + + private static void setDefaultFields(final DataSourcePoolReflection dataSourcePoolReflection, final DataSourcePoolMetaData poolMetaData) { + for (Entry entry : poolMetaData.getDefaultProperties().entrySet()) { + dataSourcePoolReflection.setField(entry.getKey(), entry.getValue()); + } + } + + private static void setConfiguredFields(final DataSourcePoolProperties props, final DataSourcePoolReflection dataSourcePoolReflection) { + for (Entry entry : props.getAllLocalProperties().entrySet()) { + dataSourcePoolReflection.setField(entry.getKey(), entry.getValue()); + } + } + + private static void setConfiguredFields(final DataSourcePoolProperties props, final DataSourcePoolReflection dataSourcePoolReflection, final DataSourcePoolMetaData poolMetaData) { + for (Entry entry : props.getAllLocalProperties().entrySet()) { + String fieldName = entry.getKey(); + Object fieldValue = entry.getValue(); + if (isValidProperty(fieldName, fieldValue, poolMetaData) && !fieldName.equals(poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName())) { + dataSourcePoolReflection.setField(fieldName, fieldValue); + } + } + } + + private static boolean isValidProperty(final String key, final Object value, final DataSourcePoolMetaData poolMetaData) { + return !poolMetaData.getSkippedProperties().containsKey(key) || null == value || !value.equals(poolMetaData.getSkippedProperties().get(key)); + } + + @SuppressWarnings("unchecked") + private static void appendJdbcUrlProperties(final CustomDataSourcePoolProperties customPoolProps, final DataSource targetDataSource, final DataSourcePoolMetaData poolMetaData, + final DataSourcePoolReflection dataSourcePoolReflection) { + String jdbcUrlPropertiesFieldName = poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(); + if (null != jdbcUrlPropertiesFieldName && customPoolProps.getProperties().containsKey(jdbcUrlPropertiesFieldName)) { + Map jdbcUrlProps = (Map) customPoolProps.getProperties().get(jdbcUrlPropertiesFieldName); + DataSourcePoolMetaDataReflection dataSourcePoolMetaDataReflection = new DataSourcePoolMetaDataReflection(targetDataSource, poolMetaData.getFieldMetaData()); + dataSourcePoolMetaDataReflection.getJdbcConnectionProperties().ifPresent(optional -> setJdbcUrlProperties(dataSourcePoolReflection, optional, jdbcUrlProps, jdbcUrlPropertiesFieldName)); + } + } + + private static void setJdbcUrlProperties(final DataSourcePoolReflection dataSourcePoolReflection, final Properties jdbcConnectionProps, final Map customProps, + final String jdbcUrlPropertiesFieldName) { + for (Entry entry : customProps.entrySet()) { + jdbcConnectionProps.setProperty(entry.getKey(), entry.getValue().toString()); + } + dataSourcePoolReflection.setField(jdbcUrlPropertiesFieldName, jdbcConnectionProps); + } +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolReflection.java similarity index 89% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolReflection.java index 6e7a15fa4e365..acdab74d8b838 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourceReflection.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolReflection.java @@ -25,7 +25,7 @@ import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaDataReflection; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolFieldMetaData; +import org.apache.shardingsphere.infra.datasource.pool.metadata.impl.DefaultDataSourcePoolFieldMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import javax.sql.DataSource; @@ -41,9 +41,9 @@ import java.util.Properties; /** - * Data source reflection. + * Data source pool reflection. */ -public final class DataSourceReflection { +public final class DataSourcePoolReflection { private static final Collection> GENERAL_CLASS_TYPES; @@ -65,7 +65,7 @@ public final class DataSourceReflection { SKIPPED_PROPERTY_KEYS = new HashSet<>(Arrays.asList("loginTimeout", "driverClassName")); } - public DataSourceReflection(final DataSource dataSource) { + public DataSourcePoolReflection(final DataSource dataSource) { this.dataSource = dataSource; dataSourceMethods = dataSource.getClass().getMethods(); } @@ -130,6 +130,9 @@ public void setField(final String fieldName, final Object fieldValue) { @SneakyThrows(ReflectiveOperationException.class) private void setField(final Method method, final Object fieldValue) { Class paramType = method.getParameterTypes()[0]; + if (String.class == paramType && Properties.class.isAssignableFrom(fieldValue.getClass())) { + return; + } if (int.class == paramType || Integer.class == paramType) { method.invoke(dataSource, Integer.parseInt(fieldValue.toString())); } else if (long.class == paramType || Long.class == paramType) { @@ -157,9 +160,11 @@ private Optional findSetterMethod(final String fieldName) { } /** - * Add default data source properties. + * Add default data source pool properties. + * + * @param metaData data source pool meta data */ - public void addDefaultDataSourceProperties() { + public void addDefaultDataSourcePoolProperties(final DataSourcePoolMetaData metaData) { DataSourcePoolMetaDataReflection dataSourcePoolMetaDataReflection = new DataSourcePoolMetaDataReflection(dataSource, TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSource.getClass().getName()) .map(DataSourcePoolMetaData::getFieldMetaData).orElseGet(DefaultDataSourcePoolFieldMetaData::new)); @@ -170,16 +175,18 @@ public void addDefaultDataSourceProperties() { } ConnectionProperties connectionProps = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, DatabaseTypeFactory.get(jdbcUrl.get())).parse(jdbcUrl.get(), null, null); Properties queryProps = connectionProps.getQueryProperties(); + Properties jdbcProps = jdbcConnectionProps.get(); for (Entry entry : connectionProps.getDefaultQueryProperties().entrySet()) { String defaultPropertyKey = entry.getKey().toString(); String defaultPropertyValue = entry.getValue().toString(); - if (!containsDefaultProperty(defaultPropertyKey, jdbcConnectionProps.get(), queryProps)) { - jdbcConnectionProps.get().setProperty(defaultPropertyKey, defaultPropertyValue); + if (!containsDefaultProperty(defaultPropertyKey, jdbcProps, queryProps)) { + jdbcProps.setProperty(defaultPropertyKey, defaultPropertyValue); } } + setField(metaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(), jdbcProps); } - private boolean containsDefaultProperty(final String defaultPropertyKey, final Properties targetDataSourceProps, final Properties queryProps) { - return targetDataSourceProps.containsKey(defaultPropertyKey) || queryProps.containsKey(defaultPropertyKey); + private boolean containsDefaultProperty(final String defaultPropKey, final Properties targetProps, final Properties queryProps) { + return targetProps.containsKey(defaultPropKey) || queryProps.containsKey(defaultPropKey); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/detector/DataSourcePoolActiveDetector.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolActiveDetector.java similarity index 89% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/detector/DataSourcePoolActiveDetector.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolActiveDetector.java index 3734e79216e9c..5a48326362926 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/detector/DataSourcePoolActiveDetector.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolActiveDetector.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.pool.destroyer.detector; +package org.apache.shardingsphere.infra.datasource.pool.destroyer; import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; import javax.sql.DataSource; +import java.sql.SQLException; /** * Data source pool active detector. @@ -33,6 +34,7 @@ public interface DataSourcePoolActiveDetector extends TypedSPI { * * @param dataSource data source pool to be detected * @return contains active connection or not + * @throws SQLException SQL exception */ - boolean containsActiveConnection(DataSource dataSource); + boolean containsActiveConnection(DataSource dataSource) throws SQLException; } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java similarity index 93% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java index d8a95b8058a7d..49dc73995b109 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyer.java @@ -19,10 +19,10 @@ import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import javax.sql.DataSource; +import java.sql.SQLException; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -53,7 +53,7 @@ private void graceDestroy() { ((AutoCloseable) dataSource).close(); } - private void waitUntilActiveConnectionComplete() { + private void waitUntilActiveConnectionComplete() throws SQLException { Optional activeDetector = TypedSPILoader.findService(DataSourcePoolActiveDetector.class, dataSource.getClass().getName()); while (activeDetector.isPresent() && activeDetector.get().containsActiveConnection(dataSource)) { try { diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java similarity index 82% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java index 8877d130e94ba..dce6efa55c098 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolFieldMetaData.java @@ -22,20 +22,6 @@ */ public interface DataSourcePoolFieldMetaData { - /** - * Get username field name. - * - * @return username field name - */ - String getUsernameFieldName(); - - /** - * Get password field name. - * - * @return password field name - */ - String getPasswordFieldName(); - /** * Get JDBC URL field name. * diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java similarity index 84% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java index 25ebbec04f6d8..2f14412217da8 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaData.java @@ -37,11 +37,11 @@ public interface DataSourcePoolMetaData extends TypedSPI { Map getDefaultProperties(); /** - * Get invalid properties. + * Get skipped properties. * - * @return invalid properties + * @return skipped properties */ - Map getInvalidProperties(); + Map getSkippedProperties(); /** * Get property synonyms. @@ -63,12 +63,4 @@ public interface DataSourcePoolMetaData extends TypedSPI { * @return data source pool field meta data */ DataSourcePoolFieldMetaData getFieldMetaData(); - - /** - * Get data source pool properties validator. - * - * @return data source pool properties validator - */ - DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator(); - } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java similarity index 68% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java index 007864fd6966c..7a4f6bec616ff 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolMetaDataReflection.java @@ -40,15 +40,20 @@ public final class DataSourcePoolMetaDataReflection { * @return JDBC URL */ public Optional getJdbcUrl() { - return ReflectionUtils.getFieldValue(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlFieldName()); + Optional jdbcUrl = ReflectionUtils.getFieldValue(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlFieldName()); + return jdbcUrl.isPresent() ? jdbcUrl : ReflectionUtils.getFieldValueByGetMethod(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlFieldName()); } /** * Get JDBC connection properties. - * + * * @return JDBC connection properties */ public Optional getJdbcConnectionProperties() { - return ReflectionUtils.getFieldValue(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlPropertiesFieldName()); + if (null == dataSourcePoolFieldMetaData.getJdbcUrlPropertiesFieldName()) { + return Optional.empty(); + } + Optional props = ReflectionUtils.getFieldValue(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlPropertiesFieldName()); + return props.isPresent() ? props : ReflectionUtils.getFieldValueByGetMethod(targetDataSource, dataSourcePoolFieldMetaData.getJdbcUrlPropertiesFieldName()); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolFieldMetaData.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/impl/DefaultDataSourcePoolFieldMetaData.java similarity index 86% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolFieldMetaData.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/impl/DefaultDataSourcePoolFieldMetaData.java index 08a4fe39ce62b..d2e37ce7ff26a 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/impl/DefaultDataSourcePoolFieldMetaData.java @@ -15,23 +15,15 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.pool.metadata; +package org.apache.shardingsphere.infra.datasource.pool.metadata.impl; + +import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; /** * Default data source pool field meta data. */ public final class DefaultDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - @Override - public String getUsernameFieldName() { - return "username"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } - @Override public String getJdbcUrlFieldName() { return "url"; diff --git a/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreator.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreator.java new file mode 100644 index 0000000000000..536ca0cae169e --- /dev/null +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreator.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.datasource.pool.props.creator; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolReflection; +import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.custom.CustomDataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.ConnectionPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.PoolPropertySynonyms; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; + +/** + * Data source pool properties creator. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class DataSourcePoolPropertiesCreator { + + /** + * Create data source properties. + * + * @param config data source configuration + * @return created data source properties + */ + public static DataSourcePoolProperties create(final DataSourceConfiguration config) { + return new DataSourcePoolProperties(config.getConnection().getDataSourceClassName(), createProperties(config)); + } + + /** + * Create data source properties. + * + * @param dataSource data source + * @return created data source properties + */ + public static DataSourcePoolProperties create(final DataSource dataSource) { + DataSource realDataSource = dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource; + return new DataSourcePoolProperties(realDataSource.getClass().getName(), createProperties(realDataSource)); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private static Map createProperties(final DataSourceConfiguration config) { + Map result = new LinkedHashMap<>(); + result.put("dataSourceClassName", config.getConnection().getDataSourceClassName()); + result.put("url", config.getConnection().getUrl()); + result.put("username", config.getConnection().getUsername()); + result.put("password", config.getConnection().getPassword()); + result.put("connectionTimeoutMilliseconds", config.getPool().getConnectionTimeoutMilliseconds()); + result.put("idleTimeoutMilliseconds", config.getPool().getIdleTimeoutMilliseconds()); + result.put("maxLifetimeMilliseconds", config.getPool().getMaxLifetimeMilliseconds()); + result.put("maxPoolSize", config.getPool().getMaxPoolSize()); + result.put("minPoolSize", config.getPool().getMinPoolSize()); + result.put("readOnly", config.getPool().getReadOnly()); + if (null != config.getPool().getCustomProperties()) { + result.putAll((Map) config.getPool().getCustomProperties()); + } + return result; + } + + private static Map createProperties(final DataSource dataSource) { + Map result = new LinkedHashMap<>(); + Optional metaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSource.getClass().getName()); + for (Entry entry : new DataSourcePoolReflection(dataSource).convertToProperties().entrySet()) { + String propertyName = entry.getKey(); + Object propertyValue = entry.getValue(); + if (!metaData.isPresent() || isValidProperty(propertyName, propertyValue, metaData.get()) && !metaData.get().getTransientFieldNames().contains(propertyName)) { + result.put(propertyName, propertyValue); + } + } + return result; + } + + private static boolean isValidProperty(final String key, final Object value, final DataSourcePoolMetaData metaData) { + return null == value || !metaData.getSkippedProperties().containsKey(key) || !value.equals(metaData.getSkippedProperties().get(key)); + } + + /** + * Create data source configuration. + * + * @param props data source pool properties + * @return created data source configuration + */ + public static DataSourceConfiguration createConfiguration(final DataSourcePoolProperties props) { + return new DataSourceConfiguration(getConnectionConfiguration(props.getConnectionPropertySynonyms()), getPoolConfiguration(props.getPoolPropertySynonyms(), props.getCustomProperties())); + } + + private static ConnectionConfiguration getConnectionConfiguration(final ConnectionPropertySynonyms connectionPropSynonyms) { + Map standardProps = connectionPropSynonyms.getStandardProperties(); + return new ConnectionConfiguration( + (String) standardProps.get("dataSourceClassName"), (String) standardProps.get("url"), (String) standardProps.get("username"), (String) standardProps.get("password")); + } + + private static PoolConfiguration getPoolConfiguration(final PoolPropertySynonyms poolPropSynonyms, final CustomDataSourcePoolProperties customProps) { + Map standardProps = poolPropSynonyms.getStandardProperties(); + Long connectionTimeoutMilliseconds = toLong(standardProps, "connectionTimeoutMilliseconds"); + Long idleTimeoutMilliseconds = toLong(standardProps, "idleTimeoutMilliseconds"); + Long maxLifetimeMilliseconds = toLong(standardProps, "maxLifetimeMilliseconds"); + Integer maxPoolSize = toInt(standardProps, "maxPoolSize"); + Integer minPoolSize = toInt(standardProps, "minPoolSize"); + Boolean readOnly = toBoolean(standardProps, "readOnly"); + Properties newCustomProps = new Properties(); + newCustomProps.putAll(customProps.getProperties()); + return new PoolConfiguration(connectionTimeoutMilliseconds, idleTimeoutMilliseconds, maxLifetimeMilliseconds, maxPoolSize, minPoolSize, readOnly, newCustomProps); + } + + private static Long toLong(final Map props, final String name) { + if (!props.containsKey(name)) { + return null; + } + try { + return Long.parseLong(String.valueOf(props.get(name))); + } catch (final NumberFormatException ex) { + return null; + } + } + + private static Integer toInt(final Map props, final String name) { + if (!props.containsKey(name)) { + return null; + } + try { + return Integer.parseInt(String.valueOf(props.get(name))); + } catch (final NumberFormatException ex) { + return null; + } + } + + @SuppressWarnings("SameParameterValue") + private static Boolean toBoolean(final Map props, final String name) { + return props.containsKey(name) ? Boolean.parseBoolean(String.valueOf(props.get(name))) : null; + } +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourceProperties.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolProperties.java similarity index 60% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourceProperties.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolProperties.java index 42319245216f9..cf0af8d51e29e 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourceProperties.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolProperties.java @@ -15,14 +15,14 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.datasource.pool.props.domain; import com.google.common.base.Objects; import lombok.Getter; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.props.custom.CustomDataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.synonym.ConnectionPropertySynonyms; -import org.apache.shardingsphere.infra.datasource.props.synonym.PoolPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.custom.CustomDataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.ConnectionPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.util.Collection; @@ -34,29 +34,27 @@ import java.util.Optional; /** - * Data source properties. + * Data source pool properties. */ @Getter -public final class DataSourceProperties { +public final class DataSourcePoolProperties { - private static final String DEFAULT_DATA_SOURCE_CLASS = "com.zaxxer.hikari.HikariDataSource"; - - private final String dataSourceClassName; + private final String poolClassName; private final ConnectionPropertySynonyms connectionPropertySynonyms; private final PoolPropertySynonyms poolPropertySynonyms; - private final CustomDataSourceProperties customDataSourceProperties; + private final CustomDataSourcePoolProperties customProperties; - public DataSourceProperties(final String dataSourceClassName, final Map props) { - this.dataSourceClassName = dataSourceClassName; - Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceClassName); - Map propertySynonyms = poolMetaData.isPresent() ? poolMetaData.get().getPropertySynonyms() : Collections.emptyMap(); + public DataSourcePoolProperties(final String poolClassName, final Map props) { + Optional metaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, poolClassName); + this.poolClassName = metaData.map(optional -> optional.getType().toString()).orElse(poolClassName); + Map propertySynonyms = metaData.map(DataSourcePoolMetaData::getPropertySynonyms).orElse(Collections.emptyMap()); connectionPropertySynonyms = new ConnectionPropertySynonyms(props, propertySynonyms); poolPropertySynonyms = new PoolPropertySynonyms(props, propertySynonyms); - customDataSourceProperties = new CustomDataSourceProperties( - props, getStandardPropertyKeys(), poolMetaData.isPresent() ? poolMetaData.get().getTransientFieldNames() : Collections.emptyList(), propertySynonyms); + Collection transientFieldNames = metaData.map(DataSourcePoolMetaData::getTransientFieldNames).orElse(Collections.emptyList()); + customProperties = new CustomDataSourcePoolProperties(props, getStandardPropertyKeys(), transientFieldNames, propertySynonyms); } private Collection getStandardPropertyKeys() { @@ -65,15 +63,6 @@ private Collection getStandardPropertyKeys() { return result; } - /** - * Get data source class name. - * - * @return data source class name - */ - public String getDataSourceClassName() { - return null == dataSourceClassName ? DEFAULT_DATA_SOURCE_CLASS : dataSourceClassName; - } - /** * Get all standard properties. * @@ -81,10 +70,10 @@ public String getDataSourceClassName() { */ public Map getAllStandardProperties() { Map result = new LinkedHashMap<>( - connectionPropertySynonyms.getStandardProperties().size() + poolPropertySynonyms.getStandardProperties().size() + customDataSourceProperties.getProperties().size(), 1F); + connectionPropertySynonyms.getStandardProperties().size() + poolPropertySynonyms.getStandardProperties().size() + customProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getStandardProperties()); result.putAll(poolPropertySynonyms.getStandardProperties()); - result.putAll(customDataSourceProperties.getProperties()); + result.putAll(customProperties.getProperties()); return result; } @@ -95,30 +84,31 @@ public Map getAllStandardProperties() { */ public Map getAllLocalProperties() { Map result = new LinkedHashMap<>( - connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customDataSourceProperties.getProperties().size(), 1F); + connectionPropertySynonyms.getLocalProperties().size() + poolPropertySynonyms.getLocalProperties().size() + customProperties.getProperties().size(), 1F); result.putAll(connectionPropertySynonyms.getLocalProperties()); result.putAll(poolPropertySynonyms.getLocalProperties()); - result.putAll(customDataSourceProperties.getProperties()); + result.putAll(customProperties.getProperties()); return result; } @Override public boolean equals(final Object obj) { - return this == obj || null != obj && getClass() == obj.getClass() && equalsByProperties((DataSourceProperties) obj); + return this == obj || null != obj && getClass() == obj.getClass() && equalsByProperties((DataSourcePoolProperties) obj); } - private boolean equalsByProperties(final DataSourceProperties dataSourceProps) { - if (!dataSourceClassName.equals(dataSourceProps.dataSourceClassName)) { - return false; - } + private boolean equalsByProperties(final DataSourcePoolProperties props) { + return poolClassName.equals(props.poolClassName) && equalsByLocalProperties(props.getAllLocalProperties()); + } + + private boolean equalsByLocalProperties(final Map localProps) { for (Entry entry : getAllLocalProperties().entrySet()) { - if (!dataSourceProps.getAllLocalProperties().containsKey(entry.getKey())) { + if (!localProps.containsKey(entry.getKey())) { continue; } if (entry.getValue() instanceof Map) { - return entry.getValue().equals(dataSourceProps.getAllLocalProperties().get(entry.getKey())); + return entry.getValue().equals(localProps.get(entry.getKey())); } - if (!String.valueOf(entry.getValue()).equals(String.valueOf(dataSourceProps.getAllLocalProperties().get(entry.getKey())))) { + if (!String.valueOf(entry.getValue()).equals(String.valueOf(localProps.get(entry.getKey())))) { return false; } } @@ -131,6 +121,6 @@ public int hashCode() { for (Entry entry : getAllLocalProperties().entrySet()) { stringBuilder.append(entry.getKey()).append(entry.getValue()); } - return Objects.hashCode(dataSourceClassName, stringBuilder.toString()); + return Objects.hashCode(poolClassName, stringBuilder.toString()); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourceProperties.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolProperties.java similarity index 83% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourceProperties.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolProperties.java index 2462790915df1..4a2c4d3c4f6c1 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourceProperties.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolProperties.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.custom; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.custom; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -27,16 +27,16 @@ import java.util.Properties; /** - * Custom data source properties. + * Custom data source pool properties. */ @Getter @EqualsAndHashCode -public final class CustomDataSourceProperties { +public final class CustomDataSourcePoolProperties { private final Map properties; - public CustomDataSourceProperties(final Map props, - final Collection standardPropertyKeys, final Collection transientFieldNames, final Map propertySynonyms) { + public CustomDataSourcePoolProperties(final Map props, + final Collection standardPropertyKeys, final Collection transientFieldNames, final Map propertySynonyms) { properties = getProperties(props); standardPropertyKeys.forEach(properties::remove); transientFieldNames.forEach(properties::remove); diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonyms.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonyms.java similarity index 95% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonyms.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonyms.java index 30943e6b1687b..b4349194303a8 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonyms.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonyms.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.synonym; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym; import lombok.EqualsAndHashCode; diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonyms.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonyms.java similarity index 95% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonyms.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonyms.java index 511bce56e1ecc..aad6c5a9b898e 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonyms.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonyms.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.synonym; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym; import lombok.EqualsAndHashCode; diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PropertySynonyms.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PropertySynonyms.java similarity index 97% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PropertySynonyms.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PropertySynonyms.java index 2a23234c7a082..8626727de9e1c 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/synonym/PropertySynonyms.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PropertySynonyms.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.synonym; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym; import lombok.EqualsAndHashCode; import lombok.Getter; diff --git a/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesContentValidator.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesContentValidator.java new file mode 100644 index 0000000000000..3f8b4205adb14 --- /dev/null +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesContentValidator.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.datasource.pool.props.validator; + +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.spi.annotation.SingletonSPI; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; + +/** + * Data source pool properties content validator. + */ +@SingletonSPI +public interface DataSourcePoolPropertiesContentValidator extends TypedSPI { + + /** + * Validate data source properties. + * + * @param props data source pool properties + */ + void validate(DataSourcePoolProperties props); +} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidator.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidator.java similarity index 55% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidator.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidator.java index 733dac56911ed..90a69a39c4379 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidator.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidator.java @@ -15,12 +15,13 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.datasource.pool.props.validator; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import javax.sql.DataSource; @@ -30,54 +31,49 @@ import java.util.LinkedList; import java.util.Map; import java.util.Map.Entry; -import java.util.Optional; /** - * Data source properties validator. + * Data source pool properties validator. */ -public final class DataSourcePropertiesValidator { +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class DataSourcePoolPropertiesValidator { /** - * Validate data source properties map. + * Validate data source pool properties map. * - * @param dataSourcePropertiesMap data source properties map + * @param propsMap data source pool properties map * @return error messages */ - public Collection validate(final Map dataSourcePropertiesMap) { + public static Collection validate(final Map propsMap) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourcePropertiesMap.entrySet()) { + for (Entry entry : propsMap.entrySet()) { try { validateProperties(entry.getKey(), entry.getValue()); validateConnection(entry.getKey(), entry.getValue()); - } catch (final InvalidDataSourcePropertiesException ex) { + } catch (final InvalidDataSourcePoolPropertiesException ex) { result.add(ex.getMessage()); } } return result; } - private void validateProperties(final String dataSourceName, final DataSourceProperties dataSourceProps) throws InvalidDataSourcePropertiesException { - Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceProps.getDataSourceClassName()); - if (!poolMetaData.isPresent()) { - return; - } + private static void validateProperties(final String dataSourceName, final DataSourcePoolProperties props) throws InvalidDataSourcePoolPropertiesException { try { - DataSourcePoolPropertiesValidator propertiesValidator = poolMetaData.get().getDataSourcePoolPropertiesValidator(); - propertiesValidator.validateProperties(dataSourceProps); + TypedSPILoader.findService(DataSourcePoolPropertiesContentValidator.class, props.getPoolClassName()).ifPresent(optional -> optional.validate(props)); } catch (final IllegalArgumentException ex) { - throw new InvalidDataSourcePropertiesException(dataSourceName, ex.getMessage()); + throw new InvalidDataSourcePoolPropertiesException(dataSourceName, ex.getMessage()); } } - private void validateConnection(final String dataSourceName, final DataSourceProperties dataSourceProps) throws InvalidDataSourcePropertiesException { + private static void validateConnection(final String dataSourceName, final DataSourcePoolProperties props) throws InvalidDataSourcePoolPropertiesException { DataSource dataSource = null; try { - dataSource = DataSourcePoolCreator.create(dataSourceProps); + dataSource = DataSourcePoolCreator.create(props); checkFailFast(dataSource); // CHECKSTYLE:OFF } catch (final SQLException | RuntimeException ex) { // CHECKSTYLE:ON - throw new InvalidDataSourcePropertiesException(dataSourceName, ex.getMessage()); + throw new InvalidDataSourcePoolPropertiesException(dataSourceName, ex.getMessage()); } finally { if (null != dataSource) { new DataSourcePoolDestroyer(dataSource).asyncDestroy(); @@ -85,7 +81,8 @@ private void validateConnection(final String dataSourceName, final DataSourcePro } } - private void checkFailFast(final DataSource dataSource) throws SQLException { + @SuppressWarnings("EmptyTryBlock") + private static void checkFailFast(final DataSource dataSource) throws SQLException { // CHECKSTYLE:OFF try (Connection ignored = dataSource.getConnection()) { // CHECKSTYLE:ON diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolPropertiesValidator.java b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/InvalidDataSourcePoolPropertiesException.java similarity index 57% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolPropertiesValidator.java rename to infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/InvalidDataSourcePoolPropertiesException.java index 2c97893ddc11f..63c9c4ff05f72 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DefaultDataSourcePoolPropertiesValidator.java +++ b/infra/data-source-pool/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/InvalidDataSourcePoolPropertiesException.java @@ -15,16 +15,18 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.pool.metadata; +package org.apache.shardingsphere.infra.datasource.pool.props.validator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.exception.core.internal.ShardingSphereInternalException; /** - * Default data source pool properties validator. + * Invalid data source pool properties exception. */ -public final class DefaultDataSourcePoolPropertiesValidator implements DataSourcePoolPropertiesValidator { +public final class InvalidDataSourcePoolPropertiesException extends ShardingSphereInternalException { - @Override - public void validateProperties(final DataSourceProperties dataSourceProps) { + private static final long serialVersionUID = -7221138369057943935L; + + public InvalidDataSourcePoolPropertiesException(final String dataSourceName, final String errorMessage) { + super("Invalid data source `%s`, error message is: %s", dataSourceName, errorMessage); } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java similarity index 85% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java index 1ccbd4b4d0af6..5c3ea17f30f24 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreatorTest.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.infra.datasource.pool.creator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -34,18 +34,18 @@ class DataSourcePoolCreatorTest { @Test void assertCreateMap() { - Map actual = DataSourcePoolCreator.create(Collections.singletonMap("foo_ds", new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); + Map actual = DataSourcePoolCreator.create(Collections.singletonMap("foo_ds", new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties())), true); assertThat(actual.size(), is(1)); assertDataSource((MockedDataSource) actual.get("foo_ds")); } @Test void assertCreate() { - assertDataSource((MockedDataSource) DataSourcePoolCreator.create(new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); + assertDataSource((MockedDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties()))); } private Map createProperties() { - Map result = new LinkedHashMap<>(); + Map result = new LinkedHashMap<>(3, 1F); result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); result.put("username", "root"); result.put("password", "root"); diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyerTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyerTest.java similarity index 100% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyerTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/DataSourcePoolDestroyerTest.java diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/MockedDataSourcePoolActiveDetector.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/DataSourcePoolActiveDetectorFixture.java similarity index 79% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/MockedDataSourcePoolActiveDetector.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/DataSourcePoolActiveDetectorFixture.java index 3f3bd387ff97f..9cab103dd0b47 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/MockedDataSourcePoolActiveDetector.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/destroyer/fixture/DataSourcePoolActiveDetectorFixture.java @@ -17,19 +17,19 @@ package org.apache.shardingsphere.infra.datasource.pool.destroyer.fixture; -import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector; +import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import javax.sql.DataSource; import java.sql.SQLException; -public final class MockedDataSourcePoolActiveDetector implements DataSourcePoolActiveDetector { +public final class DataSourcePoolActiveDetectorFixture implements DataSourcePoolActiveDetector { - @SneakyThrows(SQLException.class) @Override - public boolean containsActiveConnection(final DataSource dataSource) { - return !dataSource.unwrap(MockedDataSource.class).getOpenedConnections().isEmpty(); + public boolean containsActiveConnection(final DataSource dataSource) throws SQLException { + try (MockedDataSource wrappedDataSource = dataSource.unwrap(MockedDataSource.class)) { + return !wrappedDataSource.getOpenedConnections().isEmpty(); + } } @Override diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolFieldMetaData.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolFieldMetaDataFixture.java similarity index 80% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolFieldMetaData.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolFieldMetaDataFixture.java index a88e5666b93bb..9fdffd7562f6d 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolFieldMetaDataFixture.java @@ -19,17 +19,7 @@ import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; -public final class MockedDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - - @Override - public String getUsernameFieldName() { - return "username"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } +public final class DataSourcePoolFieldMetaDataFixture implements DataSourcePoolFieldMetaData { @Override public String getJdbcUrlFieldName() { diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolMetaData.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolMetaDataFixture.java similarity index 72% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolMetaData.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolMetaDataFixture.java index 459ada86677d4..ac03f15943b56 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/MockedDataSourcePoolMetaData.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/metadata/fixture/DataSourcePoolMetaDataFixture.java @@ -18,15 +18,14 @@ package org.apache.shardingsphere.infra.datasource.pool.metadata.fixture; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolPropertiesValidator; +import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; -public final class MockedDataSourcePoolMetaData implements DataSourcePoolMetaData { +public final class DataSourcePoolMetaDataFixture implements DataSourcePoolMetaData { @Override public Map getDefaultProperties() { @@ -34,7 +33,7 @@ public Map getDefaultProperties() { } @Override - public Map getInvalidProperties() { + public Map getSkippedProperties() { Map result = new HashMap<>(2, 1F); result.put("maxPoolSize", -1); result.put("minPoolSize", -1); @@ -55,17 +54,12 @@ public Collection getTransientFieldNames() { } @Override - public MockedDataSourcePoolFieldMetaData getFieldMetaData() { - return new MockedDataSourcePoolFieldMetaData(); - } - - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new DefaultDataSourcePoolPropertiesValidator(); + public DataSourcePoolFieldMetaDataFixture getFieldMetaData() { + return new DataSourcePoolFieldMetaDataFixture(); } @Override public String getType() { - return "org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource"; + return MockedDataSource.class.getName(); } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreatorTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreatorTest.java similarity index 58% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreatorTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreatorTest.java index 26c9a433271bf..7b14cc3df52f9 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreatorTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/creator/DataSourcePoolPropertiesCreatorTest.java @@ -15,14 +15,15 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.datasource.pool.props.creator; -import org.apache.shardingsphere.infra.datasource.config.ConnectionConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.config.PoolConfiguration; -import org.apache.shardingsphere.infra.datasource.props.custom.CustomDataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.synonym.ConnectionPropertySynonyms; -import org.apache.shardingsphere.infra.datasource.props.synonym.PoolPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.custom.CustomDataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.ConnectionPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -39,24 +40,24 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -class DataSourcePropertiesCreatorTest { +class DataSourcePoolPropertiesCreatorTest { @Test void assertCreateWithDataSourceConfiguration() { - assertParameter(DataSourcePropertiesCreator.create(createResourceConfiguration())); + assertParameter(DataSourcePoolPropertiesCreator.create(createDataSourceConfiguration())); } - private DataSourceConfiguration createResourceConfiguration() { - ConnectionConfiguration connectionConfig = new ConnectionConfiguration("com.zaxxer.hikari.HikariDataSource", "jdbc:mysql://localhost:3306/demo_ds", "root", "root"); + private DataSourceConfiguration createDataSourceConfiguration() { + ConnectionConfiguration connectionConfig = new ConnectionConfiguration(MockedDataSource.class.getName(), "jdbc:mock://127.0.0.1/foo_ds", "root", "root"); PoolConfiguration poolConfig = new PoolConfiguration(null, null, null, null, null, null, null); return new DataSourceConfiguration(connectionConfig, poolConfig); } - private void assertParameter(final DataSourceProperties actual) { + private void assertParameter(final DataSourcePoolProperties actual) { Map props = actual.getAllLocalProperties(); assertThat(props.size(), is(10)); - assertThat(props.get("dataSourceClassName"), is("com.zaxxer.hikari.HikariDataSource")); - assertThat(props.get("url"), is("jdbc:mysql://localhost:3306/demo_ds")); + assertThat(props.get("dataSourceClassName"), is(MockedDataSource.class.getName())); + assertThat(props.get("url"), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(props.get("username"), is("root")); assertThat(props.get("password"), is("root")); assertNull(props.get("maximumPoolSize")); @@ -68,20 +69,7 @@ private void assertParameter(final DataSourceProperties actual) { @Test void assertCreateWithDataSource() { - assertThat(DataSourcePropertiesCreator.create(createDataSource()), is(new DataSourceProperties(MockedDataSource.class.getName(), createProperties()))); - } - - @Test - void assertCreateConfiguration() { - DataSourceProperties dataSourceProperties = mock(DataSourceProperties.class); - ConnectionPropertySynonyms connectionPropertySynonyms = new ConnectionPropertySynonyms(createStandardProperties(), createPropertySynonyms()); - PoolPropertySynonyms poolPropertySynonyms = new PoolPropertySynonyms(createStandardProperties(), createPropertySynonyms()); - CustomDataSourceProperties customDataSourceProperties = new CustomDataSourceProperties(createProperties(), - Arrays.asList("username", "password", "closed"), Collections.singletonList("closed"), Collections.singletonMap("username", "user")); - when(dataSourceProperties.getConnectionPropertySynonyms()).thenReturn(connectionPropertySynonyms); - when(dataSourceProperties.getPoolPropertySynonyms()).thenReturn(poolPropertySynonyms); - when(dataSourceProperties.getCustomDataSourceProperties()).thenReturn(customDataSourceProperties); - DataSourcePropertiesCreator.createConfiguration(dataSourceProperties); + assertThat(DataSourcePoolPropertiesCreator.create(createDataSource()), is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties()))); } private DataSource createDataSource() { @@ -93,14 +81,17 @@ private DataSource createDataSource() { return result; } - private Map createProperties() { - Map result = new HashMap<>(); - result.put("driverClassName", MockedDataSource.class.getName()); - result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); - result.put("username", "root"); - result.put("password", "root"); - result.put("maximumPoolSize", "-1"); - return result; + @Test + void assertCreateConfiguration() { + DataSourcePoolProperties props = mock(DataSourcePoolProperties.class); + ConnectionPropertySynonyms connectionPropSynonyms = new ConnectionPropertySynonyms(createStandardProperties(), createPropertySynonyms()); + PoolPropertySynonyms poolPropSynonyms = new PoolPropertySynonyms(createStandardProperties(), createPropertySynonyms()); + CustomDataSourcePoolProperties customProps = new CustomDataSourcePoolProperties(createProperties(), + Arrays.asList("username", "password", "closed"), Collections.singletonList("closed"), Collections.singletonMap("username", "user")); + when(props.getConnectionPropertySynonyms()).thenReturn(connectionPropSynonyms); + when(props.getPoolPropertySynonyms()).thenReturn(poolPropSynonyms); + when(props.getCustomProperties()).thenReturn(customProps); + assertPoolConfiguration(DataSourcePoolPropertiesCreator.createConfiguration(props).getPool()); } private Map createStandardProperties() { @@ -123,4 +114,24 @@ private Map createPropertySynonyms() { result.put("minPoolSize", "minimumIdle"); return result; } + + private Map createProperties() { + Map result = new HashMap<>(); + result.put("driverClassName", MockedDataSource.class.getName()); + result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); + result.put("username", "root"); + result.put("password", "root"); + result.put("maximumPoolSize", "-1"); + return result; + } + + private static void assertPoolConfiguration(final PoolConfiguration actual) { + assertThat(actual.getIdleTimeoutMilliseconds(), is(180000L)); + assertThat(actual.getMaxLifetimeMilliseconds(), is(180000L)); + assertThat(actual.getMaxPoolSize(), is(30)); + assertThat(actual.getMinPoolSize(), is(10)); + assertThat(actual.getCustomProperties().get("driverClassName"), is(MockedDataSource.class.getName())); + assertThat(actual.getCustomProperties().get("url"), is("jdbc:mock://127.0.0.1/foo_ds")); + assertThat(actual.getCustomProperties().get("maximumPoolSize"), is("-1")); + } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolPropertiesTest.java similarity index 67% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolPropertiesTest.java index 9509e47c6d019..a61b94d56ab0a 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/DataSourcePoolPropertiesTest.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.datasource.pool.props.domain; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -35,7 +36,7 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -class DataSourcePropertiesTest { +class DataSourcePoolPropertiesTest { @SuppressWarnings("unchecked") @Test @@ -46,8 +47,8 @@ void assertGetDataSourceConfigurationWithConnectionInitSqls() { actualDataSource.setUsername("root"); actualDataSource.setPassword("root"); actualDataSource.setConnectionInitSqls(Arrays.asList("set names utf8mb4;", "set names utf8;")); - DataSourceProperties actual = DataSourcePropertiesCreator.create(actualDataSource); - assertThat(actual.getDataSourceClassName(), is(MockedDataSource.class.getName())); + DataSourcePoolProperties actual = DataSourcePoolPropertiesCreator.create(actualDataSource); + assertThat(actual.getPoolClassName(), is(MockedDataSource.class.getName())); assertThat(actual.getAllLocalProperties().get("url").toString(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getAllLocalProperties().get("username").toString(), is("root")); assertThat(actual.getAllLocalProperties().get("password").toString(), is("root")); @@ -60,8 +61,8 @@ void assertGetDataSourceConfigurationWithConnectionInitSqls() { @Test void assertGetAllLocalProperties() { - DataSourceProperties originalDataSourceProps = new DataSourceProperties(MockedDataSource.class.getName(), getProperties()); - Map actualAllProps = originalDataSourceProps.getAllLocalProperties(); + DataSourcePoolProperties originalProps = new DataSourcePoolProperties(MockedDataSource.class.getName(), getProperties()); + Map actualAllProps = originalProps.getAllLocalProperties(); assertThat(actualAllProps.size(), is(7)); assertTrue(actualAllProps.containsKey("driverClassName")); assertTrue(actualAllProps.containsValue(MockedDataSource.class.getName())); @@ -93,57 +94,57 @@ private Map getProperties() { @Test void assertEquals() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")), - is(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")))); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")), + is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")))); } @Test void assertNotEqualsWithNullValue() { - assertNotEquals(null, new DataSourceProperties(MockedDataSource.class.getName(), new HashMap<>())); + assertNotEquals(null, new DataSourcePoolProperties(MockedDataSource.class.getName(), new HashMap<>())); } @Test void assertNotEqualsWithDifferentDataSourceClassName() { - assertThat(new DataSourceProperties("FooDataSourceClass", new HashMap<>()), not(new DataSourceProperties("BarDataSourceClass", new HashMap<>()))); + assertThat(new DataSourcePoolProperties("FooDataSourceClass", new HashMap<>()), not(new DataSourcePoolProperties("BarDataSourceClass", new HashMap<>()))); } @Test void assertNotEqualsWithDifferentProperties() { - DataSourceProperties actual = new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("foo")); - DataSourceProperties expected = new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("bar")); + DataSourcePoolProperties actual = new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("foo")); + DataSourcePoolProperties expected = new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("bar")); assertThat(actual, not(expected)); } @Test void assertSameHashCode() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode(), - is(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode())); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode(), + is(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("root")).hashCode())); } @Test void assertDifferentHashCodeWithDifferentDataSourceClassName() { - assertThat(new DataSourceProperties("FooDataSourceClass", createUserProperties("foo")).hashCode(), - not(new DataSourceProperties("BarDataSourceClass", createUserProperties("foo")).hashCode())); + assertThat(new DataSourcePoolProperties("FooDataSourceClass", createUserProperties("foo")).hashCode(), + not(new DataSourcePoolProperties("BarDataSourceClass", createUserProperties("foo")).hashCode())); } @Test void assertDifferentHashCodeWithDifferentProperties() { - assertThat(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("foo")).hashCode(), - not(new DataSourceProperties(MockedDataSource.class.getName(), createUserProperties("bar")).hashCode())); + assertThat(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("foo")).hashCode(), + not(new DataSourcePoolProperties(MockedDataSource.class.getName(), createUserProperties("bar")).hashCode())); } private Map createUserProperties(final String username) { Map result = new LinkedHashMap<>(2, 1F); result.put("username", username); - result.put("dataSourceProperties", getDataSourceProperties()); + result.put("dataSourceProperties", createDataSourcePoolProperties()); return result; } - private Map getDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new LinkedHashMap<>(3, 1F); - result.put("maintainTimeStats", "false"); - result.put("rewriteBatchedStatements", "true"); - result.put("useLocalSessionState", "true"); + result.put("maintainTimeStats", Boolean.FALSE.toString()); + result.put("rewriteBatchedStatements", Boolean.TRUE.toString()); + result.put("useLocalSessionState", Boolean.TRUE.toString()); return result; } } diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourcePropertiesTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolPropertiesTest.java similarity index 92% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourcePropertiesTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolPropertiesTest.java index ddf8a6c033d5b..0eafba91d5d65 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/custom/CustomDataSourcePropertiesTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/custom/CustomDataSourcePoolPropertiesTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.custom; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.custom; import org.junit.jupiter.api.Test; @@ -28,11 +28,11 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -class CustomDataSourcePropertiesTest { +class CustomDataSourcePoolPropertiesTest { @Test void assertGetProperties() { - Map actual = new CustomDataSourceProperties( + Map actual = new CustomDataSourcePoolProperties( createProperties(), Arrays.asList("username", "password", "closed"), Collections.singletonList("closed"), Collections.singletonMap("username", "user")).getProperties(); assertThat(actual.size(), is(3)); assertThat(actual.get("foo"), is("bar")); diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonymsTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonymsTest.java similarity index 98% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonymsTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonymsTest.java index d1aa4b5400585..111a9cd443af9 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/ConnectionPropertySynonymsTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/ConnectionPropertySynonymsTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.synonym; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym; import org.junit.jupiter.api.Test; diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonymsTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonymsTest.java similarity index 98% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonymsTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonymsTest.java index d4183cbbc2d12..6ab274fc58913 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/synonym/PoolPropertySynonymsTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/domain/synonym/PoolPropertySynonymsTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props.synonym; +package org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym; import org.junit.jupiter.api.Test; @@ -23,8 +23,8 @@ import java.util.Map; import static org.hamcrest.CoreMatchers.is; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertFalse; class PoolPropertySynonymsTest { diff --git a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidatorTest.java b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidatorTest.java similarity index 75% rename from infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidatorTest.java rename to infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidatorTest.java index f4d1d080c1b94..548c50901f895 100644 --- a/infra/datasource/core/src/test/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesValidatorTest.java +++ b/infra/data-source-pool/core/src/test/java/org/apache/shardingsphere/infra/datasource/pool/props/validator/DataSourcePoolPropertiesValidatorTest.java @@ -15,9 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.datasource.pool.props.validator; import com.zaxxer.hikari.HikariDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDriver; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -26,7 +27,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; -class DataSourcePropertiesValidatorTest { +class DataSourcePoolPropertiesValidatorTest { @BeforeAll static void setUp() throws ClassNotFoundException { @@ -34,8 +35,8 @@ static void setUp() throws ClassNotFoundException { } @Test - void assertValidateSuccess() { - assertTrue(new DataSourcePropertiesValidator().validate( - Collections.singletonMap("name", new DataSourceProperties(HikariDataSource.class.getName(), Collections.singletonMap("jdbcUrl", "jdbc:mock")))).isEmpty()); + void assertValidate() { + assertTrue(DataSourcePoolPropertiesValidator.validate( + Collections.singletonMap("name", new DataSourcePoolProperties(HikariDataSource.class.getName(), Collections.singletonMap("jdbcUrl", "jdbc:mock")))).isEmpty()); } } diff --git a/infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector b/infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector similarity index 95% rename from infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector rename to infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector index 260927a6ce767..7c4633c0f0a0f 100644 --- a/infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector +++ b/infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.infra.datasource.pool.destroyer.fixture.MockedDataSourcePoolActiveDetector +org.apache.shardingsphere.infra.datasource.pool.destroyer.fixture.DataSourcePoolActiveDetectorFixture diff --git a/infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData similarity index 96% rename from infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData rename to infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData index 0b4c9b8495433..047818918ecdb 100644 --- a/infra/datasource/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ b/infra/data-source-pool/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.infra.datasource.pool.metadata.fixture.MockedDataSourcePoolMetaData +org.apache.shardingsphere.infra.datasource.pool.metadata.fixture.DataSourcePoolMetaDataFixture diff --git a/infra/common/src/test/resources/logback-test.xml b/infra/data-source-pool/core/src/test/resources/logback-test.xml similarity index 100% rename from infra/common/src/test/resources/logback-test.xml rename to infra/data-source-pool/core/src/test/resources/logback-test.xml diff --git a/infra/datasource/pom.xml b/infra/data-source-pool/pom.xml similarity index 95% rename from infra/datasource/pom.xml rename to infra/data-source-pool/pom.xml index 54e09b093d272..24b2230969769 100644 --- a/infra/datasource/pom.xml +++ b/infra/data-source-pool/pom.xml @@ -23,7 +23,7 @@ shardingsphere-infra 5.4.1-SNAPSHOT - shardingsphere-infra-datasource + shardingsphere-infra-data-source-pool pom ${project.artifactId} diff --git a/infra/datasource/type/c3p0/pom.xml b/infra/data-source-pool/type/c3p0/pom.xml similarity index 90% rename from infra/datasource/type/c3p0/pom.xml rename to infra/data-source-pool/type/c3p0/pom.xml index a067d1af42693..5b074c97578df 100644 --- a/infra/datasource/type/c3p0/pom.xml +++ b/infra/data-source-pool/type/c3p0/pom.xml @@ -20,16 +20,16 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-infra-datasource-type + shardingsphere-infra-data-source-pool-type 5.4.1-SNAPSHOT - shardingsphere-infra-datasource-c3p0 + shardingsphere-infra-data-source-pool-c3p0 ${project.artifactId} org.apache.shardingsphere - shardingsphere-infra-datasource-core + shardingsphere-infra-data-source-pool-core ${project.version} diff --git a/infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java b/infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java similarity index 83% rename from infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java rename to infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java index e75df600e647f..959e61c488a8d 100644 --- a/infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolFieldMetaData.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.c3p0.metadata; +package org.apache.shardingsphere.infra.datasource.pool.c3p0.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; @@ -24,16 +24,6 @@ */ public final class C3P0DataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - @Override - public String getUsernameFieldName() { - return "user"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } - @Override public String getJdbcUrlFieldName() { return "jdbcUrl"; diff --git a/infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolMetaData.java b/infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolMetaData.java similarity index 82% rename from infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolMetaData.java rename to infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolMetaData.java index b379acfcc5772..66027a8688ae5 100644 --- a/infra/datasource/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/c3p0/metadata/C3P0DataSourcePoolMetaData.java +++ b/infra/data-source-pool/type/c3p0/src/main/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/metadata/C3P0DataSourcePoolMetaData.java @@ -15,11 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.c3p0.metadata; +package org.apache.shardingsphere.infra.datasource.pool.c3p0.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolPropertiesValidator; import java.util.Collection; import java.util.HashMap; @@ -33,7 +31,7 @@ public final class C3P0DataSourcePoolMetaData implements DataSourcePoolMetaData private static final Map DEFAULT_PROPS = new HashMap<>(6, 1F); - private static final Map INVALID_PROPS = new HashMap<>(2, 1F); + private static final Map SKIPPED_PROPS = new HashMap<>(2, 1F); private static final Map PROP_SYNONYMS = new HashMap<>(5, 1F); @@ -56,8 +54,8 @@ private static void buildDefaultProperties() { } private static void buildInvalidProperties() { - INVALID_PROPS.put("minPoolSize", -1); - INVALID_PROPS.put("maxPoolSize", -1); + SKIPPED_PROPS.put("minPoolSize", -1); + SKIPPED_PROPS.put("maxPoolSize", -1); } private static void buildPropertySynonyms() { @@ -79,8 +77,8 @@ public Map getDefaultProperties() { } @Override - public Map getInvalidProperties() { - return INVALID_PROPS; + public Map getSkippedProperties() { + return SKIPPED_PROPS; } @Override @@ -102,9 +100,4 @@ public C3P0DataSourcePoolFieldMetaData getFieldMetaData() { public String getType() { return "com.mchange.v2.c3p0.ComboPooledDataSource"; } - - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new DefaultDataSourcePoolPropertiesValidator(); - } } diff --git a/infra/datasource/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/data-source-pool/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData similarity index 89% rename from infra/datasource/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData rename to infra/data-source-pool/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData index 0a581c959c66b..7aaac6f4b147e 100644 --- a/infra/datasource/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ b/infra/data-source-pool/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.infra.datasource.dbcp.metadata.DBCPDataSourcePoolMetaData +org.apache.shardingsphere.infra.datasource.pool.c3p0.metadata.C3P0DataSourcePoolMetaData diff --git a/infra/data-source-pool/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/creator/C3P0DataSourcePoolCreatorTest.java b/infra/data-source-pool/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/creator/C3P0DataSourcePoolCreatorTest.java new file mode 100644 index 0000000000000..1fb156bd1e973 --- /dev/null +++ b/infra/data-source-pool/type/c3p0/src/test/java/org/apache/shardingsphere/infra/datasource/pool/c3p0/creator/C3P0DataSourcePoolCreatorTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.datasource.pool.c3p0.creator; + +import com.mchange.v2.c3p0.ComboPooledDataSource; +import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; +import org.apache.shardingsphere.test.util.PropertiesBuilder; +import org.junit.jupiter.api.Test; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +class C3P0DataSourcePoolCreatorTest { + + @Test + void assertCreateDataSource() { + ComboPooledDataSource actual = (ComboPooledDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(ComboPooledDataSource.class.getName(), createDataSourcePoolProperties())); + assertThat(actual.getJdbcUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); + assertThat(actual.getUser(), is("root")); + assertThat(actual.getPassword(), is("root")); + assertThat(actual.getProperties(), is(PropertiesBuilder.build(new PropertiesBuilder.Property("foo", "foo_value"), new PropertiesBuilder.Property("bar", "bar_value"), + new PropertiesBuilder.Property("password", "root"), new PropertiesBuilder.Property("user", "root")))); + } + + private Map createDataSourcePoolProperties() { + Map result = new HashMap<>(); + result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); + result.put("driverClassName", MockedDataSource.class.getName()); + result.put("username", "root"); + result.put("password", "root"); + result.put("properties", PropertiesBuilder.build(new PropertiesBuilder.Property("foo", "foo_value"), new PropertiesBuilder.Property("bar", "bar_value"))); + return result; + } +} diff --git a/infra/datasource/type/dbcp/pom.xml b/infra/data-source-pool/type/dbcp/pom.xml similarity index 90% rename from infra/datasource/type/dbcp/pom.xml rename to infra/data-source-pool/type/dbcp/pom.xml index 77c9ebf14c6a8..b3e5a8f86d872 100644 --- a/infra/datasource/type/dbcp/pom.xml +++ b/infra/data-source-pool/type/dbcp/pom.xml @@ -20,16 +20,16 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-infra-datasource-type + shardingsphere-infra-data-source-pool-type 5.4.1-SNAPSHOT - shardingsphere-infra-datasource-dbcp + shardingsphere-infra-data-source-pool-dbcp ${project.artifactId} org.apache.shardingsphere - shardingsphere-infra-datasource-core + shardingsphere-infra-data-source-pool-core ${project.version} diff --git a/infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java b/infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java similarity index 82% rename from infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java rename to infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java index 53f5004d8ec69..cec9416df9f1e 100644 --- a/infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolFieldMetaData.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.dbcp.metadata; +package org.apache.shardingsphere.infra.datasource.pool.dbcp.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; @@ -24,16 +24,6 @@ */ public final class DBCPDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - @Override - public String getUsernameFieldName() { - return "username"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } - @Override public String getJdbcUrlFieldName() { return "url"; diff --git a/infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolMetaData.java b/infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolMetaData.java similarity index 82% rename from infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolMetaData.java rename to infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolMetaData.java index ecf0987d293e4..d0dd5581ce3df 100644 --- a/infra/datasource/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/dbcp/metadata/DBCPDataSourcePoolMetaData.java +++ b/infra/data-source-pool/type/dbcp/src/main/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/metadata/DBCPDataSourcePoolMetaData.java @@ -15,11 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.dbcp.metadata; +package org.apache.shardingsphere.infra.datasource.pool.dbcp.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolPropertiesValidator; import java.util.Arrays; import java.util.Collection; @@ -48,7 +46,7 @@ public Map getDefaultProperties() { } @Override - public Map getInvalidProperties() { + public Map getSkippedProperties() { return Collections.emptyMap(); } @@ -76,9 +74,4 @@ public String getType() { public Collection getTypeAliases() { return Arrays.asList("org.apache.commons.dbcp.BasicDataSource", "org.apache.tomcat.dbcp.dbcp2.BasicDataSource"); } - - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new DefaultDataSourcePoolPropertiesValidator(); - } } diff --git a/infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/data-source-pool/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData similarity index 89% rename from infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData rename to infra/data-source-pool/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData index bda50be822fed..289a77e8f95fd 100644 --- a/infra/datasource/type/druid/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ b/infra/data-source-pool/type/dbcp/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.infra.datasource.druid.metadata.DruidDataSourcePoolMetaData +org.apache.shardingsphere.infra.datasource.pool.dbcp.metadata.DBCPDataSourcePoolMetaData diff --git a/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java b/infra/data-source-pool/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/creator/DBCPDataSourcePoolCreatorTest.java similarity index 88% rename from infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java rename to infra/data-source-pool/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/creator/DBCPDataSourcePoolCreatorTest.java index c5c34e954558c..c6afb6fece67c 100644 --- a/infra/datasource/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/dbcp/creator/DBCPDataSourcePoolCreatorTest.java +++ b/infra/data-source-pool/type/dbcp/src/test/java/org/apache/shardingsphere/infra/datasource/pool/dbcp/creator/DBCPDataSourcePoolCreatorTest.java @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.dbcp.creator; +package org.apache.shardingsphere.infra.datasource.pool.dbcp.creator; import lombok.SneakyThrows; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -38,14 +38,14 @@ class DBCPDataSourcePoolCreatorTest { @Test void assertCreateDataSource() { - BasicDataSource actual = (BasicDataSource) DataSourcePoolCreator.create(new DataSourceProperties(BasicDataSource.class.getName(), createDataSourceProperties())); + BasicDataSource actual = (BasicDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(BasicDataSource.class.getName(), createDataSourcePoolProperties())); assertThat(actual.getUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getUsername(), is("root")); assertThat(actual.getPassword(), is("root")); assertThat(getConnectionProperties(actual), is(PropertiesBuilder.build(new Property("foo", "foo_value"), new Property("bar", "bar_value")))); } - private Map createDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new HashMap<>(); result.put("url", "jdbc:mock://127.0.0.1/foo_ds"); result.put("driverClassName", MockedDataSource.class.getName()); diff --git a/infra/datasource/type/hikari/pom.xml b/infra/data-source-pool/type/hikari/pom.xml similarity index 90% rename from infra/datasource/type/hikari/pom.xml rename to infra/data-source-pool/type/hikari/pom.xml index 6a157f59b6dec..8e302131daf96 100644 --- a/infra/datasource/type/hikari/pom.xml +++ b/infra/data-source-pool/type/hikari/pom.xml @@ -20,16 +20,16 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-infra-datasource-type + shardingsphere-infra-data-source-pool-type 5.4.1-SNAPSHOT - shardingsphere-infra-datasource-hikari + shardingsphere-infra-data-source-pool-hikari ${project.artifactId} org.apache.shardingsphere - shardingsphere-infra-datasource-core + shardingsphere-infra-data-source-pool-core ${project.version} diff --git a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetector.java b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetector.java similarity index 94% rename from infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetector.java rename to infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetector.java index 59834d61271e8..65c664324ac58 100644 --- a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetector.java +++ b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetector.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.hikari.detector; +package org.apache.shardingsphere.infra.datasource.pool.hikari.detector; import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector; +import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector; import javax.sql.DataSource; diff --git a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolFieldMetaData.java b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolFieldMetaData.java similarity index 82% rename from infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolFieldMetaData.java rename to infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolFieldMetaData.java index e53373dd308c6..30d8630740159 100644 --- a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolFieldMetaData.java +++ b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolFieldMetaData.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.hikari.metadata; +package org.apache.shardingsphere.infra.datasource.pool.hikari.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; @@ -24,16 +24,6 @@ */ public final class HikariDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - @Override - public String getUsernameFieldName() { - return "username"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } - @Override public String getJdbcUrlFieldName() { return "jdbcUrl"; diff --git a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolMetaData.java b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolMetaData.java similarity index 85% rename from infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolMetaData.java rename to infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolMetaData.java index 9afe20a85fbf0..2e7bfdff782d7 100644 --- a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolMetaData.java +++ b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolMetaData.java @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.hikari.metadata; +package org.apache.shardingsphere.infra.datasource.pool.hikari.metadata; import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; import java.util.Collection; import java.util.HashMap; @@ -32,7 +31,7 @@ public final class HikariDataSourcePoolMetaData implements DataSourcePoolMetaDat private static final Map DEFAULT_PROPS = new HashMap<>(6, 1F); - private static final Map INVALID_PROPS = new HashMap<>(2, 1F); + private static final Map SKIPPED_PROPS = new HashMap<>(2, 1F); private static final Map PROP_SYNONYMS = new HashMap<>(6, 1F); @@ -56,8 +55,8 @@ private static void buildDefaultProperties() { } private static void buildInvalidProperties() { - INVALID_PROPS.put("minimumIdle", -1); - INVALID_PROPS.put("maximumPoolSize", -1); + SKIPPED_PROPS.put("minimumIdle", -1); + SKIPPED_PROPS.put("maximumPoolSize", -1); } private static void buildPropertySynonyms() { @@ -81,8 +80,8 @@ public Map getDefaultProperties() { } @Override - public Map getInvalidProperties() { - return INVALID_PROPS; + public Map getSkippedProperties() { + return SKIPPED_PROPS; } @Override @@ -100,11 +99,6 @@ public HikariDataSourcePoolFieldMetaData getFieldMetaData() { return new HikariDataSourcePoolFieldMetaData(); } - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new HikariDataSourcePoolPropertiesValidator(); - } - @Override public String getType() { return "com.zaxxer.hikari.HikariDataSource"; diff --git a/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolPropertiesContentValidator.java b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolPropertiesContentValidator.java new file mode 100644 index 0000000000000..a588e758c39c5 --- /dev/null +++ b/infra/data-source-pool/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/pool/hikari/metadata/HikariDataSourcePoolPropertiesContentValidator.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.datasource.pool.hikari.metadata; + +import com.google.common.base.Preconditions; +import org.apache.shardingsphere.infra.datasource.pool.props.validator.DataSourcePoolPropertiesContentValidator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; + +import java.util.Map; +import java.util.concurrent.TimeUnit; + +/** + * Data source pool properties content validator of HikariCP. + */ +public final class HikariDataSourcePoolPropertiesContentValidator implements DataSourcePoolPropertiesContentValidator { + + private static final long MIN_CONNECTION_TIMEOUT_MILLISECONDS = 250L; + + private static final long MIN_LIFETIME_MILLISECONDS = TimeUnit.SECONDS.toMillis(30L); + + private static final long MIN_KEEP_ALIVE_TIME_MILLISECONDS = TimeUnit.SECONDS.toMillis(30L); + + @Override + public void validate(final DataSourcePoolProperties props) { + Map allLocalProps = props.getAllLocalProperties(); + validateConnectionTimeout(allLocalProps); + validateIdleTimeout(allLocalProps); + validateMaxLifetime(allLocalProps); + validateMaximumPoolSize(allLocalProps); + validateMinimumIdle(allLocalProps); + validateKeepAliveTime(allLocalProps); + } + + private void validateConnectionTimeout(final Map allLocalProps) { + if (isExisted(allLocalProps, "connectionTimeout")) { + long connectionTimeout = Long.parseLong(allLocalProps.get("connectionTimeout").toString()); + Preconditions.checkState(connectionTimeout >= MIN_CONNECTION_TIMEOUT_MILLISECONDS, "connectionTimeout can not less than %s ms.", MIN_CONNECTION_TIMEOUT_MILLISECONDS); + } + } + + private void validateIdleTimeout(final Map allLocalProps) { + if (isExisted(allLocalProps, "idleTimeout")) { + long idleTimeout = Long.parseLong(allLocalProps.get("idleTimeout").toString()); + Preconditions.checkState(idleTimeout >= 0, "idleTimeout can not be negative."); + } + } + + private void validateMaxLifetime(final Map allLocalProps) { + if (isExisted(allLocalProps, "maxLifetime")) { + long maxLifetime = Long.parseLong(allLocalProps.get("maxLifetime").toString()); + Preconditions.checkState(maxLifetime >= MIN_LIFETIME_MILLISECONDS, "maxLifetime can not less than %s ms.", MIN_LIFETIME_MILLISECONDS); + } + } + + private void validateMaximumPoolSize(final Map allLocalProps) { + if (isExisted(allLocalProps, "maximumPoolSize")) { + int maximumPoolSize = Integer.parseInt(allLocalProps.get("maximumPoolSize").toString()); + Preconditions.checkState(maximumPoolSize >= 1, "maxPoolSize can not less than 1."); + } + } + + private void validateMinimumIdle(final Map allLocalProps) { + if (isExisted(allLocalProps, "minimumIdle")) { + int minimumIdle = Integer.parseInt(allLocalProps.get("minimumIdle").toString()); + Preconditions.checkState(minimumIdle >= 0, "minimumIdle can not be negative."); + } + } + + private void validateKeepAliveTime(final Map allLocalProps) { + if (!isExisted(allLocalProps, "keepaliveTime")) { + return; + } + int keepAliveTime = Integer.parseInt(allLocalProps.get("keepaliveTime").toString()); + if (0 == keepAliveTime) { + return; + } + Preconditions.checkState(keepAliveTime >= MIN_KEEP_ALIVE_TIME_MILLISECONDS, "keepaliveTime can not be less than %s ms.", MIN_KEEP_ALIVE_TIME_MILLISECONDS); + } + + private boolean isExisted(final Map allLocalProps, final String key) { + return allLocalProps.containsKey(key) && null != allLocalProps.get(key); + } + + @Override + public Object getType() { + return "com.zaxxer.hikari.HikariDataSource"; + } +} diff --git a/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector new file mode 100644 index 0000000000000..2721717072fbf --- /dev/null +++ b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolActiveDetector @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.shardingsphere.infra.datasource.pool.hikari.detector.HikariDataSourcePoolActiveDetector diff --git a/infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData similarity index 89% rename from infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData rename to infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData index 6b4afcf4e3af1..6593f0b54ed5a 100644 --- a/infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.infra.datasource.hikari.metadata.HikariDataSourcePoolMetaData +org.apache.shardingsphere.infra.datasource.pool.hikari.metadata.HikariDataSourcePoolMetaData diff --git a/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.props.validator.DataSourcePoolPropertiesContentValidator b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.props.validator.DataSourcePoolPropertiesContentValidator new file mode 100644 index 0000000000000..01b12a14717ce --- /dev/null +++ b/infra/data-source-pool/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.props.validator.DataSourcePoolPropertiesContentValidator @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.shardingsphere.infra.datasource.pool.hikari.metadata.HikariDataSourcePoolPropertiesContentValidator diff --git a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java b/infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/creator/HikariDataSourcePoolCreatorTest.java similarity index 86% rename from infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java rename to infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/creator/HikariDataSourcePoolCreatorTest.java index 807396d4697a7..9a77791c499c7 100644 --- a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/creator/HikariDataSourcePoolCreatorTest.java +++ b/infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/creator/HikariDataSourcePoolCreatorTest.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.hikari.creator; +package org.apache.shardingsphere.infra.datasource.pool.hikari.creator; import com.zaxxer.hikari.HikariDataSource; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -35,14 +35,14 @@ class HikariDataSourcePoolCreatorTest { @Test void assertCreateDataSource() { - HikariDataSource actual = (HikariDataSource) DataSourcePoolCreator.create(new DataSourceProperties(HikariDataSource.class.getName(), createDataSourceProperties())); + HikariDataSource actual = (HikariDataSource) DataSourcePoolCreator.create(new DataSourcePoolProperties(HikariDataSource.class.getName(), createDataSourcePoolProperties())); assertThat(actual.getJdbcUrl(), is("jdbc:mock://127.0.0.1/foo_ds")); assertThat(actual.getUsername(), is("root")); assertThat(actual.getPassword(), is("root")); assertThat(actual.getDataSourceProperties(), is(PropertiesBuilder.build(new Property("foo", "foo_value"), new Property("bar", "bar_value")))); } - private Map createDataSourceProperties() { + private Map createDataSourcePoolProperties() { Map result = new HashMap<>(); result.put("jdbcUrl", "jdbc:mock://127.0.0.1/foo_ds"); result.put("driverClassName", MockedDataSource.class.getName()); diff --git a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java b/infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java similarity index 96% rename from infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java rename to infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java index e46812cf6ada2..cb6c354354fdf 100644 --- a/infra/datasource/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java +++ b/infra/data-source-pool/type/hikari/src/test/java/org/apache/shardingsphere/infra/datasource/pool/hikari/detector/HikariDataSourcePoolActiveDetectorTest.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.hikari.detector; +package org.apache.shardingsphere.infra.datasource.pool.hikari.detector; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; diff --git a/infra/datasource/type/pom.xml b/infra/data-source-pool/type/pom.xml similarity index 89% rename from infra/datasource/type/pom.xml rename to infra/data-source-pool/type/pom.xml index 7872e3eb04e67..6f5aaac7e94d7 100644 --- a/infra/datasource/type/pom.xml +++ b/infra/data-source-pool/type/pom.xml @@ -20,17 +20,16 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-infra-datasource + shardingsphere-infra-data-source-pool 5.4.1-SNAPSHOT - shardingsphere-infra-datasource-type + shardingsphere-infra-data-source-pool-type pom ${project.artifactId} c3p0 dbcp - druid hikari diff --git a/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/metadata/database/DialectDatabaseMetaData.java b/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/metadata/database/DialectDatabaseMetaData.java index 6827475c1beda..f9a7457c93c56 100644 --- a/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/metadata/database/DialectDatabaseMetaData.java +++ b/infra/database/core/src/main/java/org/apache/shardingsphere/infra/database/core/metadata/database/DialectDatabaseMetaData.java @@ -115,9 +115,18 @@ default String formatTableNamePattern(final String tableNamePattern) { /** * Is instance connection available. * - * @return true or false + * @return available or not */ default boolean isInstanceConnectionAvailable() { return false; } + + /** + * Is support three tier storage structure. + * + * @return support or not + */ + default boolean isSupportThreeTierStorageStructure() { + return false; + } } diff --git a/infra/database/type/h2/src/main/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoader.java b/infra/database/type/h2/src/main/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoader.java index fac7aa8f9e0c9..50b3251d42bc4 100644 --- a/infra/database/type/h2/src/main/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoader.java +++ b/infra/database/type/h2/src/main/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoader.java @@ -51,7 +51,7 @@ public final class H2MetaDataLoader implements DialectMetaDataLoader { private static final String TABLE_META_DATA_SQL_IN_TABLES = TABLE_META_DATA_NO_ORDER + " AND UPPER(TABLE_NAME) IN (%s)" + ORDER_BY_ORDINAL_POSITION; - private static final String INDEX_META_DATA_SQL = "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME FROM INFORMATION_SCHEMA.INDEXES" + private static final String INDEX_META_DATA_SQL = "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME, INDEX_TYPE_NAME FROM INFORMATION_SCHEMA.INDEXES" + " WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND UPPER(TABLE_NAME) IN (%s)"; private static final String PRIMARY_KEY_META_DATA_SQL = "SELECT TABLE_NAME, INDEX_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=?" @@ -126,10 +126,14 @@ private Map> loadIndexMetaData(final Connectio while (resultSet.next()) { String indexName = resultSet.getString("INDEX_NAME"); String tableName = resultSet.getString("TABLE_NAME"); + boolean uniqueIndex = "UNIQUE INDEX".equals(resultSet.getString("INDEX_TYPE_NAME")); if (!result.containsKey(tableName)) { result.put(tableName, new LinkedList<>()); } - result.get(tableName).add(new IndexMetaData(indexName)); + IndexMetaData indexMetaData = new IndexMetaData(indexName); + indexMetaData.setUnique(uniqueIndex); + result.get(tableName).add(indexMetaData); + } } } diff --git a/infra/database/type/h2/src/test/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoaderTest.java b/infra/database/type/h2/src/test/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoaderTest.java index 30ad5418a051b..38e548485aed2 100644 --- a/infra/database/type/h2/src/test/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoaderTest.java +++ b/infra/database/type/h2/src/test/java/org/apache/shardingsphere/infra/database/h2/metadata/data/loader/H2MetaDataLoaderTest.java @@ -55,7 +55,7 @@ void assertLoadWithoutTables() throws SQLException { .executeQuery()).thenReturn(resultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement( - "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND UPPER(TABLE_NAME) IN ('TBL')") + "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME, INDEX_TYPE_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND UPPER(TABLE_NAME) IN ('TBL')") .executeQuery()).thenReturn(indexResultSet); ResultSet primaryKeys = mockPrimaryKeysMetaDataResultSet(); when(dataSource.getConnection().prepareStatement( @@ -78,7 +78,7 @@ void assertLoadWithTables() throws SQLException { .executeQuery()).thenReturn(resultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement( - "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND UPPER(TABLE_NAME) IN ('TBL')") + "SELECT TABLE_CATALOG, TABLE_NAME, INDEX_NAME, INDEX_TYPE_NAME FROM INFORMATION_SCHEMA.INDEXES WHERE TABLE_CATALOG=? AND TABLE_SCHEMA=? AND UPPER(TABLE_NAME) IN ('TBL')") .executeQuery()).thenReturn(indexResultSet); ResultSet primaryKeys = mockPrimaryKeysMetaDataResultSet(); when(dataSource.getConnection().prepareStatement( @@ -142,6 +142,7 @@ private ResultSet mockIndexMetaDataResultSet() throws SQLException { when(result.next()).thenReturn(true, false); when(result.getString("INDEX_NAME")).thenReturn("id"); when(result.getString("TABLE_NAME")).thenReturn("tbl"); + when(result.getString("INDEX_TYPE_NAME")).thenReturn("UNIQUE INDEX"); return result; } @@ -160,6 +161,8 @@ private void assertTableMetaDataMap(final Collection schemaMetaD assertThat(columnsIterator.next(), is(new ColumnMetaData("name", Types.VARCHAR, false, false, false, false, false, true))); assertThat(actualTableMetaData.getIndexes().size(), is(1)); Iterator indexesIterator = actualTableMetaData.getIndexes().iterator(); - assertThat(indexesIterator.next(), is(new IndexMetaData("id"))); + IndexMetaData indexMetaData = new IndexMetaData("id"); + indexMetaData.setUnique(true); + assertThat(indexesIterator.next(), is(indexMetaData)); } } diff --git a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/metadata/database/MySQLDatabaseMetaData.java b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/metadata/database/MySQLDatabaseMetaData.java index d6ccb6ab02e32..078436c0287c4 100644 --- a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/metadata/database/MySQLDatabaseMetaData.java +++ b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/metadata/database/MySQLDatabaseMetaData.java @@ -87,6 +87,11 @@ public boolean isInstanceConnectionAvailable() { return true; } + @Override + public boolean isSupportThreeTierStorageStructure() { + return true; + } + @Override public String getDatabaseType() { return "MySQL"; diff --git a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapper.java b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapper.java index eaa790308cc0b..51dccc7cce8fe 100644 --- a/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapper.java +++ b/infra/database/type/mysql/src/main/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapper.java @@ -37,8 +37,7 @@ public Object getSmallintValue(final ResultSet resultSet, final int columnIndex) @Override public Object getDateValue(final ResultSet resultSet, final int columnIndex) throws SQLException { if (isYearDataType(resultSet.getMetaData().getColumnTypeName(columnIndex))) { - Object result = resultSet.getObject(columnIndex); - return resultSet.wasNull() ? null : result; + return resultSet.wasNull() ? null : resultSet.getObject(columnIndex); } return resultSet.getDate(columnIndex); } diff --git a/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapperTest.java b/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapperTest.java new file mode 100644 index 0000000000000..b90e45cb7d9c8 --- /dev/null +++ b/infra/database/type/mysql/src/test/java/org/apache/shardingsphere/infra/database/mysql/resultset/MySQLResultSetMapperTest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.database.mysql.resultset; + +import org.apache.shardingsphere.infra.database.core.resultset.DialectResultSetMapper; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; + +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) +class MySQLResultSetMapperTest { + + private final DialectResultSetMapper dialectResultSetMapper = DatabaseTypedSPILoader.getService(DialectResultSetMapper.class, TypedSPILoader.getService(DatabaseType.class, "MySQL")); + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private ResultSet resultSet; + + @Test + void assertGetSmallintValue() throws SQLException { + when(resultSet.getInt(1)).thenReturn(0); + assertThat(dialectResultSetMapper.getSmallintValue(resultSet, 1), is(0)); + } + + @Test + void assertGetDateValueWithYearDataTypeAndNotNullValue() throws SQLException { + when(resultSet.getMetaData().getColumnTypeName(1)).thenReturn("YEAR"); + Object expectedObject = new Object(); + when(resultSet.getObject(1)).thenReturn(expectedObject); + assertThat(dialectResultSetMapper.getDateValue(resultSet, 1), is(expectedObject)); + } + + @Test + void assertGetDateValueWithYearDataTypeAndNullValue() throws SQLException { + when(resultSet.getMetaData().getColumnTypeName(1)).thenReturn("YEAR"); + when(resultSet.wasNull()).thenReturn(true); + assertNull(dialectResultSetMapper.getDateValue(resultSet, 1)); + } + + @Test + void assertGetDateValueWithNotYearDataType() throws SQLException { + when(resultSet.getMetaData().getColumnTypeName(1)).thenReturn("DATE"); + when(resultSet.getDate(1)).thenReturn(new Date(0L)); + assertThat(dialectResultSetMapper.getDateValue(resultSet, 1), is(new Date(0L))); + } +} diff --git a/infra/database/type/oracle/src/main/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoader.java b/infra/database/type/oracle/src/main/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoader.java index 982065ca52fd7..7ff5e560c4b8d 100644 --- a/infra/database/type/oracle/src/main/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoader.java +++ b/infra/database/type/oracle/src/main/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoader.java @@ -58,13 +58,15 @@ public final class OracleMetaDataLoader implements DialectMetaDataLoader { private static final String TABLE_META_DATA_SQL_IN_TABLES = TABLE_META_DATA_SQL_NO_ORDER + " AND TABLE_NAME IN (%s)" + ORDER_BY_COLUMN_ID; - private static final String INDEX_META_DATA_SQL = "SELECT OWNER AS TABLE_SCHEMA, TABLE_NAME, INDEX_NAME FROM ALL_INDEXES WHERE OWNER = ? AND TABLE_NAME IN (%s)"; + private static final String INDEX_META_DATA_SQL = "SELECT OWNER AS TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, UNIQUENESS FROM ALL_INDEXES WHERE OWNER = ? AND TABLE_NAME IN (%s)"; private static final String PRIMARY_KEY_META_DATA_SQL = "SELECT A.OWNER AS TABLE_SCHEMA, A.TABLE_NAME AS TABLE_NAME, B.COLUMN_NAME AS COLUMN_NAME FROM ALL_CONSTRAINTS A INNER JOIN" + " ALL_CONS_COLUMNS B ON A.CONSTRAINT_NAME = B.CONSTRAINT_NAME WHERE CONSTRAINT_TYPE = 'P' AND A.OWNER = '%s'"; private static final String PRIMARY_KEY_META_DATA_SQL_IN_TABLES = PRIMARY_KEY_META_DATA_SQL + " AND A.TABLE_NAME IN (%s)"; + private static final String INDEX_COLUMN_META_DATA_SQL = "SELECT COLUMN_NAME FROM ALL_IND_COLUMNS WHERE INDEX_OWNER = ? AND TABLE_NAME = ? AND INDEX_NAME = ?"; + private static final int COLLATION_START_MAJOR_VERSION = 12; private static final int COLLATION_START_MINOR_VERSION = 2; @@ -118,8 +120,8 @@ private ColumnMetaData loadColumnMetaData(final Map dataTypeMap boolean primaryKey = primaryKeys.contains(columnName); boolean generated = versionContainsIdentityColumn(databaseMetaData) && "YES".equals(resultSet.getString("IDENTITY_COLUMN")); // TODO need to support caseSensitive when version < 12.2. - String collation = resultSet.getString("COLLATION"); - boolean caseSensitive = versionContainsCollation(databaseMetaData) && null != collation && collation.endsWith("_CS"); + String collation = versionContainsCollation(databaseMetaData) ? resultSet.getString("COLLATION") : null; + boolean caseSensitive = null != collation && collation.endsWith("_CS"); boolean isVisible = "NO".equals(resultSet.getString("HIDDEN_COLUMN")); boolean nullable = "Y".equals(resultSet.getString("NULLABLE")); return new ColumnMetaData(columnName, dataTypeMap.get(dataType), primaryKey, generated, caseSensitive, isVisible, false, nullable); @@ -162,17 +164,36 @@ private Map> loadIndexMetaData(final Connectio while (resultSet.next()) { String indexName = resultSet.getString("INDEX_NAME"); String tableName = resultSet.getString("TABLE_NAME"); + boolean isUnique = "UNIQUE".equals(resultSet.getString("UNIQUENESS")); if (!result.containsKey(tableName)) { result.put(tableName, new LinkedList<>()); } - result.get(tableName).add(new IndexMetaData(indexName)); + IndexMetaData indexMetaData = new IndexMetaData(indexName); + indexMetaData.setUnique(isUnique); + indexMetaData.getColumns().addAll(loadIndexColumnNames(connection, tableName, indexName)); + result.get(tableName).add(indexMetaData); } } } return result; } + private List loadIndexColumnNames(final Connection connection, final String tableName, final String indexName) throws SQLException { + try (PreparedStatement preparedStatement = connection.prepareStatement(INDEX_COLUMN_META_DATA_SQL)) { + preparedStatement.setString(1, connection.getSchema()); + preparedStatement.setString(2, tableName); + preparedStatement.setString(3, indexName); + List result = new LinkedList<>(); + ResultSet resultSet = preparedStatement.executeQuery(); + while (resultSet.next()) { + result.add(resultSet.getString("COLUMN_NAME")); + } + return result; + } + } + private String getIndexMetaDataSQL(final Collection tableNames) { + // TODO The table name needs to be in uppercase, otherwise the index cannot be found. return String.format(INDEX_META_DATA_SQL, tableNames.stream().map(each -> String.format("'%s'", each)).collect(Collectors.joining(","))); } diff --git a/infra/database/type/oracle/src/test/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoaderTest.java b/infra/database/type/oracle/src/test/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoaderTest.java index 05409c711edd9..0b56585ef1257 100644 --- a/infra/database/type/oracle/src/test/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoaderTest.java +++ b/infra/database/type/oracle/src/test/java/org/apache/shardingsphere/infra/database/oracle/metadata/data/loader/OracleMetaDataLoaderTest.java @@ -51,7 +51,7 @@ class OracleMetaDataLoaderTest { private static final String ALL_CONSTRAINTS_SQL_WITH_TABLES = "SELECT A.OWNER AS TABLE_SCHEMA, A.TABLE_NAME AS TABLE_NAME, B.COLUMN_NAME AS COLUMN_NAME FROM ALL_CONSTRAINTS A" + " INNER JOIN ALL_CONS_COLUMNS B ON A.CONSTRAINT_NAME = B.CONSTRAINT_NAME WHERE CONSTRAINT_TYPE = 'P' AND A.OWNER = 'TEST' AND A.TABLE_NAME IN ('tbl')"; - private static final String ALL_INDEXES_SQL = "SELECT OWNER AS TABLE_SCHEMA, TABLE_NAME, INDEX_NAME FROM ALL_INDEXES WHERE OWNER = ? AND TABLE_NAME IN ('tbl')"; + private static final String ALL_INDEXES_SQL = "SELECT OWNER AS TABLE_SCHEMA, TABLE_NAME, INDEX_NAME, UNIQUENESS FROM ALL_INDEXES WHERE OWNER = ? AND TABLE_NAME IN ('tbl')"; private static final String ALL_TAB_COLUMNS_SQL_CONDITION1 = "SELECT OWNER AS TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, NULLABLE, DATA_TYPE, COLUMN_ID, HIDDEN_COLUMN , IDENTITY_COLUMN, COLLATION" + " FROM ALL_TAB_COLS WHERE OWNER = ? AND TABLE_NAME IN ('tbl') ORDER BY COLUMN_ID"; @@ -259,6 +259,7 @@ private ResultSet mockIndexMetaDataResultSet() throws SQLException { when(result.next()).thenReturn(true, false); when(result.getString("INDEX_NAME")).thenReturn("id"); when(result.getString("TABLE_NAME")).thenReturn("tbl"); + when(result.getString("UNIQUENESS")).thenReturn("UNIQUE"); return result; } @@ -282,6 +283,8 @@ private void assertTableMetaDataMap(final Collection schemaMetaD assertThat(actualTableMetaData.getColumns().size(), is(3)); assertThat(actualTableMetaData.getIndexes().size(), is(1)); Iterator indexesIterator = actualTableMetaData.getIndexes().iterator(); - assertThat(indexesIterator.next(), is(new IndexMetaData("id"))); + IndexMetaData indexMetaData = new IndexMetaData("id"); + indexMetaData.setUnique(true); + assertThat(indexesIterator.next(), is(indexMetaData)); } } diff --git a/infra/database/type/postgresql/src/main/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoader.java b/infra/database/type/postgresql/src/main/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoader.java index 8dc69b40f1254..6c987b3cf08f5 100644 --- a/infra/database/type/postgresql/src/main/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoader.java +++ b/infra/database/type/postgresql/src/main/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoader.java @@ -40,6 +40,7 @@ import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -66,6 +67,11 @@ public final class PostgreSQLMetaDataLoader implements DialectMetaDataLoader { private static final String BASIC_INDEX_META_DATA_SQL = "SELECT tablename, indexname, schemaname FROM pg_indexes WHERE schemaname IN (%s)"; + private static final String ADVANCE_INDEX_META_DATA_SQL = + "SELECT idx.relname as index_name, insp.nspname as index_schema, tbl.relname as table_name, att.attname AS column_name, pgi.indisunique as is_unique" + + " FROM pg_index pgi JOIN pg_class idx ON idx.oid = pgi.indexrelid JOIN pg_namespace insp ON insp.oid = idx.relnamespace JOIN pg_class tbl ON tbl.oid = pgi.indrelid" + + " JOIN pg_namespace tnsp ON tnsp.oid = tbl.relnamespace JOIN pg_attribute att ON att.attrelid = tbl.oid AND att.attnum = ANY(pgi.indkey) WHERE tnsp.nspname IN (%s)"; + private static final String LOAD_ALL_ROLE_TABLE_GRANTS_SQL = "SELECT table_name FROM information_schema.role_table_grants"; private static final String LOAD_FILTERED_ROLE_TABLE_GRANTS_SQL = LOAD_ALL_ROLE_TABLE_GRANTS_SQL + " WHERE table_name IN (%s)"; @@ -99,6 +105,24 @@ private Map> loadIndexMetaDataMap(final indexMetaDataMap.put(tableName, new IndexMetaData(indexName)); } } + try (PreparedStatement preparedStatement = connection.prepareStatement(getAdvanceIndexMetaDataSQL(schemaNames)); ResultSet resultSet = preparedStatement.executeQuery()) { + while (resultSet.next()) { + String schemaName = resultSet.getString("index_schema"); + String tableName = resultSet.getString("table_name"); + String columnName = resultSet.getString("column_name"); + String indexName = resultSet.getString("index_name"); + boolean isUnique = resultSet.getBoolean("is_unique"); + Collection indexMetaDatas = result.getOrDefault(schemaName, LinkedHashMultimap.create()).get(tableName); + if (null == indexMetaDatas || indexMetaDatas.isEmpty()) { + continue; + } + Optional indexMetaData = indexMetaDatas.stream().filter(each -> each.getName().equals(indexName)).findFirst(); + if (indexMetaData.isPresent()) { + indexMetaData.get().setUnique(isUnique); + indexMetaData.get().getColumns().add(columnName); + } + } + } return result; } @@ -106,6 +130,10 @@ private String getIndexMetaDataSQL(final Collection schemaNames) { return String.format(BASIC_INDEX_META_DATA_SQL, schemaNames.stream().map(each -> String.format("'%s'", each)).collect(Collectors.joining(","))); } + private String getAdvanceIndexMetaDataSQL(final Collection schemaNames) { + return String.format(ADVANCE_INDEX_META_DATA_SQL, schemaNames.stream().map(each -> String.format("'%s'", each)).collect(Collectors.joining(","))); + } + private Map> loadColumnMetaDataMap(final Connection connection, final Collection tables, final Collection schemaNames) throws SQLException { Map> result = new LinkedHashMap<>(); diff --git a/infra/database/type/postgresql/src/test/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoaderTest.java b/infra/database/type/postgresql/src/test/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoaderTest.java index 68c9e125da780..3b8d7960f8490 100644 --- a/infra/database/type/postgresql/src/test/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoaderTest.java +++ b/infra/database/type/postgresql/src/test/java/org/apache/shardingsphere/infra/database/postgresql/metadata/data/loader/PostgreSQLMetaDataLoaderTest.java @@ -60,6 +60,11 @@ class PostgreSQLMetaDataLoaderTest { private static final String BASIC_INDEX_META_DATA_SQL = "SELECT tablename, indexname, schemaname FROM pg_indexes WHERE schemaname IN ('public')"; + private static final String ADVANCE_INDEX_META_DATA_SQL = + "SELECT idx.relname as index_name, insp.nspname as index_schema, tbl.relname as table_name, att.attname AS column_name, pgi.indisunique as is_unique" + + " FROM pg_index pgi JOIN pg_class idx ON idx.oid = pgi.indexrelid JOIN pg_namespace insp ON insp.oid = idx.relnamespace JOIN pg_class tbl ON tbl.oid = pgi.indrelid" + + " JOIN pg_namespace tnsp ON tnsp.oid = tbl.relnamespace JOIN pg_attribute att ON att.attrelid = tbl.oid AND att.attnum = ANY(pgi.indkey) WHERE tnsp.nspname IN ('public')"; + private static final String BASIC_CONSTRAINT_META_DATA_SQL = "SELECT tc.table_schema,tc.table_name,tc.constraint_name,pgo.relname refer_table_name FROM information_schema.table_constraints tc " + "JOIN pg_constraint pgc ON tc.constraint_name = pgc.conname AND contype='f' " + "JOIN pg_class pgo ON pgc.confrelid = pgo.oid " @@ -78,6 +83,8 @@ void assertLoadWithoutTables() throws SQLException { when(dataSource.getConnection().prepareStatement(PRIMARY_KEY_META_DATA_SQL).executeQuery()).thenReturn(primaryKeyResultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_INDEX_META_DATA_SQL).executeQuery()).thenReturn(indexResultSet); + ResultSet advanceIndexResultSet = mockAdvanceIndexMetaDataResultSet(); + when(dataSource.getConnection().prepareStatement(ADVANCE_INDEX_META_DATA_SQL).executeQuery()).thenReturn(advanceIndexResultSet); ResultSet constraintResultSet = mockConstraintMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_CONSTRAINT_META_DATA_SQL).executeQuery()).thenReturn(constraintResultSet); ResultSet roleTableGrantsResultSet = mockRoleTableGrantsResultSet(); @@ -103,6 +110,8 @@ void assertLoadWithTables() throws SQLException { when(dataSource.getConnection().prepareStatement(PRIMARY_KEY_META_DATA_SQL).executeQuery()).thenReturn(primaryKeyResultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_INDEX_META_DATA_SQL).executeQuery()).thenReturn(indexResultSet); + ResultSet advanceIndexResultSet = mockAdvanceIndexMetaDataResultSet(); + when(dataSource.getConnection().prepareStatement(ADVANCE_INDEX_META_DATA_SQL).executeQuery()).thenReturn(advanceIndexResultSet); ResultSet constraintResultSet = mockConstraintMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(BASIC_CONSTRAINT_META_DATA_SQL).executeQuery()).thenReturn(constraintResultSet); ResultSet roleTableGrantsResultSet = mockRoleTableGrantsResultSet(); @@ -164,6 +173,17 @@ private ResultSet mockIndexMetaDataResultSet() throws SQLException { return result; } + private ResultSet mockAdvanceIndexMetaDataResultSet() throws SQLException { + ResultSet result = mock(ResultSet.class); + when(result.next()).thenReturn(true, false); + when(result.getString("table_name")).thenReturn("tbl"); + when(result.getString("column_name")).thenReturn("id"); + when(result.getString("index_name")).thenReturn("id"); + when(result.getString("index_schema")).thenReturn("public"); + when(result.getBoolean("is_unique")).thenReturn(true); + return result; + } + private ResultSet mockConstraintMetaDataResultSet() throws SQLException { ResultSet result = mock(ResultSet.class); when(result.next()).thenReturn(true, false); @@ -189,7 +209,10 @@ private void assertTableMetaDataMap(final Collection schemaMetaD assertThat(columnsIterator.next(), is(new ColumnMetaData("name", Types.VARCHAR, false, false, true, true, false, true))); assertThat(actualTableMetaData.getIndexes().size(), is(1)); Iterator indexesIterator = actualTableMetaData.getIndexes().iterator(); - assertThat(indexesIterator.next(), is(new IndexMetaData("id"))); + IndexMetaData indexMetaData = new IndexMetaData("id"); + indexMetaData.setUnique(true); + indexMetaData.getColumns().add("id"); + assertThat(indexesIterator.next(), is(indexMetaData)); assertThat(actualTableMetaData.getConstraints().size(), is(1)); Iterator constrainsIterator = actualTableMetaData.getConstraints().iterator(); assertThat(constrainsIterator.next(), is(new ConstraintMetaData("tbl_con", "refer_tbl"))); diff --git a/infra/database/type/sqlserver/src/main/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoader.java b/infra/database/type/sqlserver/src/main/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoader.java index aa4fbdf581132..46df79e10aef4 100644 --- a/infra/database/type/sqlserver/src/main/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoader.java +++ b/infra/database/type/sqlserver/src/main/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoader.java @@ -55,8 +55,11 @@ public final class SQLServerMetaDataLoader implements DialectMetaDataLoader { private static final String TABLE_META_DATA_SQL_IN_TABLES = TABLE_META_DATA_SQL_NO_ORDER + " WHERE obj.name IN (%s)" + ORDER_BY_COLUMN_ID; - private static final String INDEX_META_DATA_SQL = "SELECT a.name AS INDEX_NAME, c.name AS TABLE_NAME FROM sys.indexes a" - + " JOIN sys.objects c ON a.object_id = c.object_id WHERE a.index_id NOT IN (0, 255) AND c.name IN (%s)"; + private static final String INDEX_META_DATA_SQL = "SELECT idx.name AS INDEX_NAME, obj.name AS TABLE_NAME, col.name AS COLUMN_NAME," + + " idx.is_unique AS IS_UNIQUE FROM sys.indexes idx" + + " LEFT JOIN sys.objects obj ON idx.object_id = obj.object_id" + + " LEFT JOIN sys.columns col ON obj.object_id = col.object_id" + + " WHERE idx.index_id NOT IN (0, 255) AND obj.name IN (%s) ORDER BY idx.index_id"; private static final int HIDDEN_COLUMN_START_MAJOR_VERSION = 15; @@ -121,7 +124,7 @@ private boolean versionContainsHiddenColumn(final DatabaseMetaData databaseMetaD } private Map> loadIndexMetaData(final DataSource dataSource, final Collection tableNames) throws SQLException { - Map> result = new HashMap<>(); + Map> tableToIndex = new HashMap<>(); try ( Connection connection = dataSource.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(getIndexMetaDataSQL(tableNames))) { @@ -129,13 +132,25 @@ private Map> loadIndexMetaData(final DataSourc while (resultSet.next()) { String indexName = resultSet.getString("INDEX_NAME"); String tableName = resultSet.getString("TABLE_NAME"); - if (!result.containsKey(tableName)) { - result.put(tableName, new LinkedList<>()); + if (!tableToIndex.containsKey(tableName)) { + tableToIndex.put(tableName, new HashMap<>()); + } + Map indexMap = tableToIndex.get(tableName); + if (indexMap.containsKey(indexName)) { + indexMap.get(indexName).getColumns().add(resultSet.getString("COLUMN_NAME")); + } else { + IndexMetaData indexMetaData = new IndexMetaData(indexName); + indexMetaData.getColumns().add(resultSet.getString("COLUMN_NAME")); + indexMetaData.setUnique("1".equals(resultSet.getString("IS_UNIQUE"))); + indexMap.put(indexName, indexMetaData); } - result.get(tableName).add(new IndexMetaData(indexName)); } } } + Map> result = new HashMap<>(tableToIndex.size(), 1); + for (Entry> each : tableToIndex.entrySet()) { + result.put(each.getKey(), each.getValue().values()); + } return result; } diff --git a/infra/database/type/sqlserver/src/test/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoaderTest.java b/infra/database/type/sqlserver/src/test/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoaderTest.java index ec7587baa5886..752d9eb05bd10 100644 --- a/infra/database/type/sqlserver/src/test/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoaderTest.java +++ b/infra/database/type/sqlserver/src/test/java/org/apache/shardingsphere/infra/database/sqlserver/metadata/data/loader/SQLServerMetaDataLoaderTest.java @@ -71,8 +71,11 @@ class SQLServerMetaDataLoaderTest { + " FROM sys.objects obj INNER JOIN sys.columns col ON obj.object_id = col.object_id LEFT JOIN sys.types t ON t.user_type_id = col.user_type_id" + " WHERE obj.name IN ('tbl') ORDER BY col.column_id"; - private static final String LOAD_INDEX_META_DATA = "SELECT a.name AS INDEX_NAME, c.name AS TABLE_NAME FROM sys.indexes a" - + " JOIN sys.objects c ON a.object_id = c.object_id WHERE a.index_id NOT IN (0, 255) AND c.name IN ('tbl')"; + private static final String LOAD_INDEX_META_DATA = "SELECT idx.name AS INDEX_NAME, obj.name AS TABLE_NAME, col.name AS COLUMN_NAME," + + " idx.is_unique AS IS_UNIQUE FROM sys.indexes idx" + + " LEFT JOIN sys.objects obj ON idx.object_id = obj.object_id" + + " LEFT JOIN sys.columns col ON obj.object_id = col.object_id" + + " WHERE idx.index_id NOT IN (0, 255) AND obj.name IN ('tbl') ORDER BY idx.index_id"; @Test void assertLoadWithoutTablesWithHighVersion() throws SQLException { @@ -178,6 +181,8 @@ private ResultSet mockIndexMetaDataResultSet() throws SQLException { when(result.next()).thenReturn(true, false); when(result.getString("INDEX_NAME")).thenReturn("id"); when(result.getString("TABLE_NAME")).thenReturn("tbl"); + when(result.getString("COLUMN_NAME")).thenReturn("id"); + when(result.getString("IS_UNIQUE")).thenReturn("1"); return result; } @@ -193,6 +198,9 @@ private void assertTableMetaDataMap(final Collection schemaMetaD assertThat(actualTableMetaData.getColumns().size(), is(2)); assertThat(actualTableMetaData.getIndexes().size(), is(1)); Iterator indexesIterator = actualTableMetaData.getIndexes().iterator(); - assertThat(indexesIterator.next(), is(new IndexMetaData("id"))); + IndexMetaData expected = new IndexMetaData("id"); + expected.getColumns().add("id"); + expected.setUnique(true); + assertThat(indexesIterator.next(), is(expected)); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java deleted file mode 100644 index e1c864eb8cbff..0000000000000 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/creator/DataSourcePoolCreator.java +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.pool.creator; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.database.core.GlobalDataSourceRegistry; -import org.apache.shardingsphere.infra.database.core.connector.url.JdbcUrl; -import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; -import org.apache.shardingsphere.infra.database.core.connector.url.UnrecognizedDatabaseURLException; -import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; -import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; -import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaDataReflection; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.custom.CustomDataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageNodeProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageResourceWithProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageUnit; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; - -import javax.sql.DataSource; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; - -/** - * Data source pool creator. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class DataSourcePoolCreator { - - /** - * Create storage resource. - * - * @param dataSourcePropsMap data source properties map - * @return created storage resource - */ - public static StorageResource createStorageResource(final Map dataSourcePropsMap) { - return createStorageResource(dataSourcePropsMap, true); - } - - /** - * Create storage resource. - * - * @param dataSourcePropsMap data source properties map - * @param cacheEnabled cache enabled - * @return created storage resource - */ - public static StorageResource createStorageResource(final Map dataSourcePropsMap, final boolean cacheEnabled) { - Map storageNodes = new LinkedHashMap<>(); - Map storageUnits = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { - StorageNodeProperties storageNodeProperties = getStorageNodeProperties(entry.getKey(), entry.getValue()); - if (storageNodes.containsKey(storageNodeProperties.getName())) { - appendStorageUnit(storageUnits, storageNodeProperties, entry.getKey(), entry.getValue()); - continue; - } - DataSource dataSource; - try { - dataSource = create(entry.getKey(), entry.getValue(), cacheEnabled); - // CHECKSTYLE:OFF - } catch (final RuntimeException ex) { - // CHECKSTYLE:ON - if (!cacheEnabled) { - storageNodes.values().stream().map(DataSourcePoolDestroyer::new).forEach(DataSourcePoolDestroyer::asyncDestroy); - } - throw ex; - } - storageNodes.put(storageNodeProperties.getName(), dataSource); - appendStorageUnit(storageUnits, storageNodeProperties, entry.getKey(), entry.getValue()); - } - return new StorageResource(storageNodes, storageUnits); - } - - /** - * Create storage resource without data source. - * - * @param dataSourcePropsMap data source properties map - * @return created storage resource - */ - public static StorageResourceWithProperties createStorageResourceWithoutDataSource(final Map dataSourcePropsMap) { - Map storageNodes = new LinkedHashMap<>(); - Map storageUnits = new LinkedHashMap<>(); - Map dataSourcePropertiesMap = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { - StorageNodeProperties storageNodeProperties = getStorageNodeProperties(entry.getKey(), entry.getValue()); - if (storageNodes.containsKey(storageNodeProperties.getName())) { - appendStorageUnit(storageUnits, storageNodeProperties, entry.getKey(), entry.getValue()); - continue; - } - storageNodes.put(storageNodeProperties.getName(), null); - appendStorageUnit(storageUnits, storageNodeProperties, entry.getKey(), entry.getValue()); - dataSourcePropertiesMap.put(storageNodeProperties.getName(), entry.getValue()); - } - return new StorageResourceWithProperties(storageNodes, storageUnits, dataSourcePropertiesMap); - } - - private static void appendStorageUnit(final Map storageUnits, final StorageNodeProperties storageNodeProperties, - final String unitName, final DataSourceProperties dataSourceProps) { - String url = dataSourceProps.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); - storageUnits.put(unitName, getStorageUnit(storageNodeProperties, unitName, url)); - } - - private static StorageUnit getStorageUnit(final StorageNodeProperties storageNodeProperties, final String unitName, final String url) { - DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(storageNodeProperties.getDatabaseType()).getDialectDatabaseMetaData(); - return dialectDatabaseMetaData.isInstanceConnectionAvailable() - ? new StorageUnit(unitName, storageNodeProperties.getName(), storageNodeProperties.getDatabase(), url) - : new StorageUnit(unitName, storageNodeProperties.getName(), url); - } - - private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final DataSourceProperties dataSourceProperties) { - Map standardProperties = dataSourceProperties.getConnectionPropertySynonyms().getStandardProperties(); - String url = standardProperties.get("url").toString(); - String username = standardProperties.get("username").toString(); - DatabaseType databaseType = DatabaseTypeFactory.get(url); - return getStorageNodeProperties(dataSourceName, dataSourceProperties, url, username, databaseType); - } - - private static StorageNodeProperties getStorageNodeProperties(final String dataSourceName, final DataSourceProperties dataSourceProperties, - final String url, final String username, final DatabaseType databaseType) { - try { - JdbcUrl jdbcUrl = new StandardJdbcUrlParser().parse(url); - DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); - String nodeName = dialectDatabaseMetaData.isInstanceConnectionAvailable() ? generateStorageNodeName(jdbcUrl.getHostname(), jdbcUrl.getPort(), username) : dataSourceName; - return new StorageNodeProperties(nodeName, databaseType, dataSourceProperties, jdbcUrl.getDatabase()); - } catch (final UnrecognizedDatabaseURLException ex) { - return new StorageNodeProperties(dataSourceName, databaseType, dataSourceProperties, null); - } - } - - private static String generateStorageNodeName(final String hostname, final int port, final String username) { - return String.format("%s_%s_%s", hostname, port, username); - } - - /** - * Create data sources. - * - * @param dataSourcePropsMap data source properties map - * @return created data sources - */ - public static Map create(final Map dataSourcePropsMap) { - return create(dataSourcePropsMap, true); - } - - /** - * Create data sources. - * - * @param dataSourcePropsMap data source properties map - * @param cacheEnabled cache enabled - * @return created data sources - */ - public static Map create(final Map dataSourcePropsMap, final boolean cacheEnabled) { - Map result = new LinkedHashMap<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { - DataSource dataSource; - try { - dataSource = create(entry.getKey(), entry.getValue(), cacheEnabled); - // CHECKSTYLE:OFF - } catch (final RuntimeException ex) { - // CHECKSTYLE:ON - if (!cacheEnabled) { - result.values().stream().map(DataSourcePoolDestroyer::new).forEach(DataSourcePoolDestroyer::asyncDestroy); - } - throw ex; - } - result.put(entry.getKey(), dataSource); - } - return result; - } - - /** - * Create data source. - * - * @param dataSourceProps data source properties - * @return created data source - */ - public static DataSource create(final DataSourceProperties dataSourceProps) { - DataSource result = createDataSource(dataSourceProps.getDataSourceClassName()); - Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSourceProps.getDataSourceClassName()); - DataSourceReflection dataSourceReflection = new DataSourceReflection(result); - if (poolMetaData.isPresent()) { - setDefaultFields(dataSourceReflection, poolMetaData.get()); - setConfiguredFields(dataSourceProps, dataSourceReflection, poolMetaData.get()); - appendJdbcUrlProperties(dataSourceProps.getCustomDataSourceProperties(), result, poolMetaData.get()); - dataSourceReflection.addDefaultDataSourceProperties(); - } else { - setConfiguredFields(dataSourceProps, dataSourceReflection); - } - return result; - } - - /** - * Create data source. - * - * @param dataSourceName data source name - * @param dataSourceProps data source properties - * @param cacheEnabled cache enabled - * @return created data source - */ - public static DataSource create(final String dataSourceName, final DataSourceProperties dataSourceProps, final boolean cacheEnabled) { - DataSource result = create(dataSourceProps); - if (cacheEnabled && !GlobalDataSourceRegistry.getInstance().getCachedDataSources().containsKey(dataSourceName)) { - GlobalDataSourceRegistry.getInstance().getCachedDataSources().put(dataSourceName, result); - } - return result; - } - - @SneakyThrows(ReflectiveOperationException.class) - private static DataSource createDataSource(final String dataSourceClassName) { - return (DataSource) Class.forName(dataSourceClassName).getConstructor().newInstance(); - } - - private static void setDefaultFields(final DataSourceReflection dataSourceReflection, final DataSourcePoolMetaData poolMetaData) { - for (Entry entry : poolMetaData.getDefaultProperties().entrySet()) { - dataSourceReflection.setField(entry.getKey(), entry.getValue()); - } - } - - private static void setConfiguredFields(final DataSourceProperties dataSourceProps, final DataSourceReflection dataSourceReflection) { - for (Entry entry : dataSourceProps.getAllLocalProperties().entrySet()) { - dataSourceReflection.setField(entry.getKey(), entry.getValue()); - } - } - - private static void setConfiguredFields(final DataSourceProperties dataSourceProps, final DataSourceReflection dataSourceReflection, final DataSourcePoolMetaData poolMetaData) { - for (Entry entry : dataSourceProps.getAllLocalProperties().entrySet()) { - String fieldName = entry.getKey(); - Object fieldValue = entry.getValue(); - if (isValidProperty(fieldName, fieldValue, poolMetaData) && !fieldName.equals(poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName())) { - dataSourceReflection.setField(fieldName, fieldValue); - } - } - } - - private static boolean isValidProperty(final String key, final Object value, final DataSourcePoolMetaData poolMetaData) { - return !poolMetaData.getInvalidProperties().containsKey(key) || null == value || !value.equals(poolMetaData.getInvalidProperties().get(key)); - } - - @SuppressWarnings("unchecked") - private static void appendJdbcUrlProperties(final CustomDataSourceProperties customDataSourceProps, final DataSource targetDataSource, final DataSourcePoolMetaData poolMetaData) { - String jdbcUrlPropertiesFieldName = poolMetaData.getFieldMetaData().getJdbcUrlPropertiesFieldName(); - if (null != jdbcUrlPropertiesFieldName && customDataSourceProps.getProperties().containsKey(jdbcUrlPropertiesFieldName)) { - Map jdbcUrlProps = (Map) customDataSourceProps.getProperties().get(jdbcUrlPropertiesFieldName); - DataSourcePoolMetaDataReflection dataSourcePoolMetaDataReflection = new DataSourcePoolMetaDataReflection(targetDataSource, poolMetaData.getFieldMetaData()); - for (Entry entry : jdbcUrlProps.entrySet()) { - dataSourcePoolMetaDataReflection.getJdbcConnectionProperties().ifPresent(optional -> optional.setProperty(entry.getKey(), entry.getValue().toString())); - } - } - } -} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreator.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreator.java deleted file mode 100644 index 9016ac7199c40..0000000000000 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/DataSourcePropertiesCreator.java +++ /dev/null @@ -1,189 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.props; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.datasource.config.ConnectionConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.config.PoolConfiguration; -import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourceReflection; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.props.custom.CustomDataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.synonym.ConnectionPropertySynonyms; -import org.apache.shardingsphere.infra.datasource.props.synonym.PoolPropertySynonyms; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; - -import javax.sql.DataSource; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Optional; -import java.util.Properties; -import java.util.stream.Collectors; - -/** - * Data source properties creator. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class DataSourcePropertiesCreator { - - /** - * Create data source properties. - * - * @param dataSourceConfigs data source configurations - * @return created data source properties - */ - public static Map createFromConfiguration(final Map dataSourceConfigs) { - return dataSourceConfigs.entrySet().stream().collect(Collectors - .toMap(Entry::getKey, entry -> create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); - } - - /** - * Create data source properties. - * - * @param dataSourceConfig data source configuration - * @return created data source properties - */ - public static DataSourceProperties create(final DataSourceConfiguration dataSourceConfig) { - return new DataSourceProperties(dataSourceConfig.getConnection().getDataSourceClassName(), createProperties(dataSourceConfig)); - } - - /** - * Create data source properties. - * - * @param dataSources data sources - * @return created data source properties - */ - public static Map create(final Map dataSources) { - Map result = new LinkedHashMap<>(); - for (Entry entry : dataSources.entrySet()) { - result.put(entry.getKey(), create(entry.getValue())); - } - return result; - } - - /** - * Create data source properties. - * - * @param dataSource data source - * @return created data source properties - */ - public static DataSourceProperties create(final DataSource dataSource) { - return new DataSourceProperties(dataSource.getClass().getName(), createProperties(dataSource)); - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - private static Map createProperties(final DataSourceConfiguration dataSourceConfig) { - Map result = new LinkedHashMap<>(); - result.put("dataSourceClassName", dataSourceConfig.getConnection().getDataSourceClassName()); - result.put("url", dataSourceConfig.getConnection().getUrl()); - result.put("username", dataSourceConfig.getConnection().getUsername()); - result.put("password", dataSourceConfig.getConnection().getPassword()); - result.put("connectionTimeoutMilliseconds", dataSourceConfig.getPool().getConnectionTimeoutMilliseconds()); - result.put("idleTimeoutMilliseconds", dataSourceConfig.getPool().getIdleTimeoutMilliseconds()); - result.put("maxLifetimeMilliseconds", dataSourceConfig.getPool().getMaxLifetimeMilliseconds()); - result.put("maxPoolSize", dataSourceConfig.getPool().getMaxPoolSize()); - result.put("minPoolSize", dataSourceConfig.getPool().getMinPoolSize()); - result.put("readOnly", dataSourceConfig.getPool().getReadOnly()); - if (null != dataSourceConfig.getPool().getCustomProperties()) { - result.putAll((Map) dataSourceConfig.getPool().getCustomProperties()); - } - return result; - } - - private static Map createProperties(final DataSource dataSource) { - Map result = new LinkedHashMap<>(); - Optional poolMetaData = TypedSPILoader.findService(DataSourcePoolMetaData.class, dataSource.getClass().getName()); - for (Entry entry : new DataSourceReflection(dataSource).convertToProperties().entrySet()) { - String propertyName = entry.getKey(); - Object propertyValue = entry.getValue(); - if (!poolMetaData.isPresent() || isValidProperty(propertyName, propertyValue, poolMetaData.get()) && !poolMetaData.get().getTransientFieldNames().contains(propertyName)) { - result.put(propertyName, propertyValue); - } - } - return result; - } - - private static boolean isValidProperty(final String key, final Object value, final DataSourcePoolMetaData poolMetaData) { - return !poolMetaData.getInvalidProperties().containsKey(key) || null == value || !value.equals(poolMetaData.getInvalidProperties().get(key)); - } - - /** - * Create data source configuration. - * - * @param dataSourceProps data source properties - * @return created data source configuration - */ - public static DataSourceConfiguration createConfiguration(final DataSourceProperties dataSourceProps) { - return new DataSourceConfiguration(getConnectionConfiguration(dataSourceProps.getConnectionPropertySynonyms()), - getPoolConfiguration(dataSourceProps.getPoolPropertySynonyms(), dataSourceProps.getCustomDataSourceProperties())); - } - - private static ConnectionConfiguration getConnectionConfiguration(final ConnectionPropertySynonyms connectionPropertySynonyms) { - Map standardProperties = connectionPropertySynonyms.getStandardProperties(); - return new ConnectionConfiguration((String) standardProperties.get("dataSourceClassName"), (String) standardProperties.get("url"), - (String) standardProperties.get("username"), (String) standardProperties.get("password")); - } - - private static PoolConfiguration getPoolConfiguration(final PoolPropertySynonyms poolPropertySynonyms, final CustomDataSourceProperties customDataSourceProperties) { - Map standardProperties = poolPropertySynonyms.getStandardProperties(); - Long connectionTimeoutMilliseconds = toLong(standardProperties, "connectionTimeoutMilliseconds", null); - Long idleTimeoutMilliseconds = toLong(standardProperties, "idleTimeoutMilliseconds", null); - Long maxLifetimeMilliseconds = toLong(standardProperties, "maxLifetimeMilliseconds", null); - Integer maxPoolSize = toInt(standardProperties, "maxPoolSize", null); - Integer minPoolSize = toInt(standardProperties, "minPoolSize", null); - Boolean readOnly = toBoolean(standardProperties, "readOnly", null); - Properties customProperties = new Properties(); - customProperties.putAll(customDataSourceProperties.getProperties()); - return new PoolConfiguration(connectionTimeoutMilliseconds, idleTimeoutMilliseconds, maxLifetimeMilliseconds, maxPoolSize, minPoolSize, readOnly, customProperties); - } - - private static Long toLong(final Map properties, final String name, final Long defaultValue) { - if (!properties.containsKey(name)) { - return defaultValue; - } - try { - return Long.parseLong(String.valueOf(properties.get(name))); - } catch (final NumberFormatException ex) { - return defaultValue; - } - } - - private static Integer toInt(final Map properties, final String name, final Integer defaultValue) { - if (!properties.containsKey(name)) { - return defaultValue; - } - try { - return Integer.parseInt(String.valueOf(properties.get(name))); - } catch (final NumberFormatException ex) { - return defaultValue; - } - } - - private static Boolean toBoolean(final Map properties, final String name, final Boolean defaultValue) { - if (!properties.containsKey(name)) { - return defaultValue; - } - try { - return Boolean.parseBoolean(String.valueOf(properties.get(name))); - } catch (final NumberFormatException ex) { - return defaultValue; - } - } -} diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUtils.java b/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUtils.java deleted file mode 100644 index bef99c65b3935..0000000000000 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/storage/StorageUtils.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.storage; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; - -import javax.sql.DataSource; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; - -/** - * Storage utility class. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class StorageUtils { - - /** - * Get storage units from provided data sources. - * - * @param dataSources data sources - * @return storage units - */ - public static Map getStorageUnits(final Map dataSources) { - Map result = new LinkedHashMap<>(dataSources.size(), 1F); - for (Entry entry : dataSources.entrySet()) { - DataSourceProperties dataSourceProperties = DataSourcePropertiesCreator.create(entry.getValue()); - String url = dataSourceProperties.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); - result.put(entry.getKey(), new StorageUnit(entry.getKey(), entry.getKey(), url)); - } - return result; - } -} diff --git a/infra/datasource/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/infra/datasource/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData deleted file mode 100644 index aa473c20f9cc9..0000000000000 --- a/infra/datasource/type/c3p0/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.infra.datasource.c3p0.metadata.C3P0DataSourcePoolMetaData diff --git a/infra/datasource/type/druid/pom.xml b/infra/datasource/type/druid/pom.xml deleted file mode 100644 index 479e40f11088d..0000000000000 --- a/infra/datasource/type/druid/pom.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - 4.0.0 - - org.apache.shardingsphere - shardingsphere-infra-datasource-type - 5.4.1-SNAPSHOT - - shardingsphere-infra-datasource-druid - ${project.artifactId} - - - - org.apache.shardingsphere - shardingsphere-infra-datasource-core - ${project.version} - - - - com.alibaba - druid - - - - org.apache.shardingsphere - shardingsphere-test-fixture-database - ${project.version} - test - - - org.apache.shardingsphere - shardingsphere-test-util - ${project.version} - test - - - diff --git a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java b/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java deleted file mode 100644 index 4ff4697479f00..0000000000000 --- a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolMetaData.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.druid.metadata; - -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolPropertiesValidator; - -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.Map; - -/** - * Druid data source pool meta data. - */ -public final class DruidDataSourcePoolMetaData implements DataSourcePoolMetaData { - - private static final Collection TRANSIENT_FIELD_NAMES = new LinkedList<>(); - - static { - buildTransientFieldNames(); - } - - private static void buildTransientFieldNames() { - TRANSIENT_FIELD_NAMES.add("closed"); - } - - @Override - public Map getDefaultProperties() { - return Collections.emptyMap(); - } - - @Override - public Map getInvalidProperties() { - return Collections.emptyMap(); - } - - @Override - public Map getPropertySynonyms() { - return Collections.emptyMap(); - } - - @Override - public Collection getTransientFieldNames() { - return TRANSIENT_FIELD_NAMES; - } - - @Override - public DruidDataSourcePoolFieldMetaData getFieldMetaData() { - return new DruidDataSourcePoolFieldMetaData(); - } - - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new DefaultDataSourcePoolPropertiesValidator(); - } - - @Override - public String getType() { - return "com.alibaba.druid.pool.DruidDataSource"; - } -} diff --git a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java b/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java deleted file mode 100644 index d8fb04757e25e..0000000000000 --- a/infra/datasource/type/hikari/src/main/java/org/apache/shardingsphere/infra/datasource/hikari/metadata/HikariDataSourcePoolPropertiesValidator.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.datasource.hikari.metadata; - -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; - -import java.util.concurrent.TimeUnit; - -/** - * Hikari data source pool properties validator. - */ -public final class HikariDataSourcePoolPropertiesValidator implements DataSourcePoolPropertiesValidator { - - private static final long CONNECTION_TIMEOUT_FLOOR = 250L; - - private static final long MAX_LIFETIME_FLOOR = TimeUnit.SECONDS.toMillis(30); - - private static final long KEEP_ALIVE_TIME_FLOOR = TimeUnit.SECONDS.toMillis(30); - - @Override - public void validateProperties(final DataSourceProperties dataSourceProps) { - validateConnectionTimeout(dataSourceProps); - validateIdleTimeout(dataSourceProps); - validateMaxLifetime(dataSourceProps); - validateMaximumPoolSize(dataSourceProps); - validateMinimumIdle(dataSourceProps); - validateKeepAliveTime(dataSourceProps); - } - - private void validateConnectionTimeout(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "connectionTimeout")) { - return; - } - long connectionTimeout = Long.parseLong(dataSourceProps.getAllLocalProperties().get("connectionTimeout").toString()); - ShardingSpherePreconditions.checkState(connectionTimeout >= CONNECTION_TIMEOUT_FLOOR, - () -> new IllegalArgumentException(String.format("connectionTimeout cannot be less than %sms", CONNECTION_TIMEOUT_FLOOR))); - } - - private void validateIdleTimeout(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "idleTimeout")) { - return; - } - long idleTimeout = Long.parseLong(dataSourceProps.getAllLocalProperties().get("idleTimeout").toString()); - ShardingSpherePreconditions.checkState(idleTimeout >= 0, () -> new IllegalArgumentException("idleTimeout cannot be negative")); - } - - private void validateMaxLifetime(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "maxLifetime")) { - return; - } - long maxLifetime = Long.parseLong(dataSourceProps.getAllLocalProperties().get("maxLifetime").toString()); - ShardingSpherePreconditions.checkState(maxLifetime >= MAX_LIFETIME_FLOOR, () -> new IllegalArgumentException(String.format("maxLifetime cannot be less than %sms", MAX_LIFETIME_FLOOR))); - } - - private void validateMaximumPoolSize(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "maximumPoolSize")) { - return; - } - int maximumPoolSize = Integer.parseInt(dataSourceProps.getAllLocalProperties().get("maximumPoolSize").toString()); - ShardingSpherePreconditions.checkState(maximumPoolSize >= 1, () -> new IllegalArgumentException("maxPoolSize cannot be less than 1")); - } - - private void validateMinimumIdle(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "minimumIdle")) { - return; - } - int minimumIdle = Integer.parseInt(dataSourceProps.getAllLocalProperties().get("minimumIdle").toString()); - ShardingSpherePreconditions.checkState(minimumIdle >= 0, () -> new IllegalArgumentException("minimumIdle cannot be negative")); - } - - private void validateKeepAliveTime(final DataSourceProperties dataSourceProps) { - if (!checkValueExist(dataSourceProps, "keepaliveTime")) { - return; - } - int keepAliveTime = Integer.parseInt(dataSourceProps.getAllLocalProperties().get("keepaliveTime").toString()); - if (keepAliveTime == 0) { - return; - } - ShardingSpherePreconditions.checkState(keepAliveTime >= KEEP_ALIVE_TIME_FLOOR, - () -> new IllegalArgumentException(String.format("keepaliveTime cannot be less than %sms", KEEP_ALIVE_TIME_FLOOR))); - } - - private boolean checkValueExist(final DataSourceProperties dataSourceProps, final String key) { - return dataSourceProps.getAllLocalProperties().containsKey(key) && null != dataSourceProps.getAllLocalProperties().get(key); - } -} diff --git a/infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector b/infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector deleted file mode 100644 index 26580fd1777f3..0000000000000 --- a/infra/datasource/type/hikari/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.destroyer.detector.DataSourcePoolActiveDetector +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.infra.datasource.hikari.detector.HikariDataSourcePoolActiveDetector diff --git a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java similarity index 61% rename from infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java rename to infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java index 7841a3f0f4759..a81c0ddd4e523 100644 --- a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePropertiesValidateHandler.java +++ b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/validate/DataSourcePoolPropertiesValidateHandler.java @@ -18,27 +18,26 @@ package org.apache.shardingsphere.distsql.handler.validate; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesValidator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.validator.DataSourcePoolPropertiesValidator; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import java.util.Collection; import java.util.Map; /** - * Data source properties validate handler. + * Data source pool properties validate handler. */ -public final class DataSourcePropertiesValidateHandler { +public final class DataSourcePoolPropertiesValidateHandler { /** * Validate data source properties map. * - * @param dataSourcePropertiesMap data source properties map + * @param propsMap data source pool properties map * @throws InvalidStorageUnitsException invalid storage units exception */ - public void validate(final Map dataSourcePropertiesMap) { - Collection errorMessages = new DataSourcePropertiesValidator().validate(dataSourcePropertiesMap); - if (!errorMessages.isEmpty()) { - throw new InvalidStorageUnitsException(errorMessages); - } + public void validate(final Map propsMap) { + Collection errorMessages = DataSourcePoolPropertiesValidator.validate(propsMap); + ShardingSpherePreconditions.checkState(errorMessages.isEmpty(), () -> new InvalidStorageUnitsException(errorMessages)); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/InvalidDataSourcePropertiesException.java b/infra/exception/core/src/main/java/org/apache/shardingsphere/infra/exception/core/external/sql/type/generic/ServerSQLException.java similarity index 63% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/InvalidDataSourcePropertiesException.java rename to infra/exception/core/src/main/java/org/apache/shardingsphere/infra/exception/core/external/sql/type/generic/ServerSQLException.java index 00fcf4e26cd6b..3ffe3a108d302 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/props/InvalidDataSourcePropertiesException.java +++ b/infra/exception/core/src/main/java/org/apache/shardingsphere/infra/exception/core/external/sql/type/generic/ServerSQLException.java @@ -15,16 +15,18 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.props; +package org.apache.shardingsphere.infra.exception.core.external.sql.type.generic; + +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; /** - * Invalid data source properties exception. + * Server SQL exception. */ -public final class InvalidDataSourcePropertiesException extends Exception { +public final class ServerSQLException extends GenericSQLException { - private static final long serialVersionUID = -7221138369057943935L; + private static final long serialVersionUID = -4072647406344887711L; - public InvalidDataSourcePropertiesException(final String dataSourceName, final String errorMessage) { - super(String.format("Invalid data source `%s`, error message is: %s", dataSourceName, errorMessage)); + public ServerSQLException(final Exception cause) { + super(cause.getMessage(), cause, XOpenSQLState.GENERAL_ERROR, 0); } } diff --git a/infra/exception/dialect/core/pom.xml b/infra/exception/dialect/core/pom.xml index c75642dc8d0a3..07ccc70a24aad 100644 --- a/infra/exception/dialect/core/pom.xml +++ b/infra/exception/dialect/core/pom.xml @@ -32,5 +32,12 @@ shardingsphere-infra-common ${project.version} + + + org.apache.shardingsphere + shardingsphere-test-fixture-database + ${project.version} + test + diff --git a/infra/exception/dialect/core/src/main/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngine.java b/infra/exception/dialect/core/src/main/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngine.java index 9b4a5b625806f..2e46f6c3f94aa 100644 --- a/infra/exception/dialect/core/src/main/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngine.java +++ b/infra/exception/dialect/core/src/main/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngine.java @@ -19,14 +19,16 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; -import org.apache.shardingsphere.infra.exception.dialect.exception.protocol.DatabaseProtocolException; -import org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.core.external.server.ShardingSphereServerException; import org.apache.shardingsphere.infra.exception.core.external.sql.ShardingSphereSQLException; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.DatabaseProtocolSQLException; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.ServerSQLException; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnknownSQLException; +import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; +import org.apache.shardingsphere.infra.exception.dialect.exception.protocol.DatabaseProtocolException; +import org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper; import java.sql.SQLException; import java.util.Optional; @@ -60,6 +62,9 @@ public static SQLException toSQLException(final Exception cause, final DatabaseT return dialectExceptionMapper.get().convert((SQLDialectException) cause); } } + if (cause instanceof ShardingSphereServerException) { + return new ServerSQLException(cause).toSQLException(); + } return new UnknownSQLException(cause).toSQLException(); } } diff --git a/infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngineTest.java b/infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngineTest.java new file mode 100644 index 0000000000000..841379c7f425e --- /dev/null +++ b/infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/SQLExceptionTransformEngineTest.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.exception.dialect; + +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.core.external.server.ShardingSphereServerException; +import org.apache.shardingsphere.infra.exception.core.external.sql.ShardingSphereSQLException; +import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; +import org.apache.shardingsphere.infra.exception.dialect.exception.protocol.DatabaseProtocolException; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.junit.jupiter.api.Test; + +import java.sql.SQLException; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class SQLExceptionTransformEngineTest { + + private final DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); + + @Test + void assertToSQLExceptionWithSQLException() { + SQLException cause = new SQLException(); + assertThat(SQLExceptionTransformEngine.toSQLException(cause, databaseType), is(cause)); + } + + @Test + void assertToSQLExceptionWithShardingSphereSQLException() { + ShardingSphereSQLException cause = mock(ShardingSphereSQLException.class); + SQLException expected = new SQLException(); + when(cause.toSQLException()).thenReturn(expected); + assertThat(SQLExceptionTransformEngine.toSQLException(cause, databaseType), is(expected)); + } + + @Test + void assertToSQLExceptionWithDatabaseProtocolException() { + DatabaseProtocolException cause = mock(DatabaseProtocolException.class); + when(cause.getMessage()).thenReturn("No reason"); + assertThat(SQLExceptionTransformEngine.toSQLException(cause, databaseType).getMessage(), is("Database protocol exception: No reason")); + } + + @Test + void assertToSQLExceptionWithSQLDialectException() { + assertThat(SQLExceptionTransformEngine.toSQLException(mock(SQLDialectException.class), databaseType).getMessage(), is("Dialect exception")); + } + + @Test + void assertToSQLExceptionWithShardingSphereServerException() { + ShardingSphereServerException cause = mock(ShardingSphereServerException.class); + when(cause.getMessage()).thenReturn("No reason"); + assertThat(SQLExceptionTransformEngine.toSQLException(cause, databaseType).getMessage(), is("No reason")); + } + + @Test + void assertToSQLExceptionWithOtherException() { + assertThat(SQLExceptionTransformEngine.toSQLException(new Exception("No reason"), databaseType).getMessage(), is("Unknown exception: No reason")); + } +} diff --git a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderRepositoryImpl.java b/infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/fixture/SQLDialectExceptionMapperFixture.java similarity index 61% rename from examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderRepositoryImpl.java rename to infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/fixture/SQLDialectExceptionMapperFixture.java index 4dd0e48ef8beb..052b09e640413 100644 --- a/examples/example-core/example-raw-jdbc/src/main/java/org/apache/shardingsphere/example/core/jdbc/repository/RangeOrderRepositoryImpl.java +++ b/infra/exception/dialect/core/src/test/java/org/apache/shardingsphere/infra/exception/dialect/fixture/SQLDialectExceptionMapperFixture.java @@ -15,23 +15,22 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.jdbc.repository; +package org.apache.shardingsphere.infra.exception.dialect.fixture; -import org.apache.shardingsphere.example.core.api.entity.Order; +import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; +import org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper; -import javax.sql.DataSource; import java.sql.SQLException; -import java.util.List; -public final class RangeOrderRepositoryImpl extends OrderRepositoryImpl { +public final class SQLDialectExceptionMapperFixture implements SQLDialectExceptionMapper { - public RangeOrderRepositoryImpl(final DataSource dataSource) { - super(dataSource); + @Override + public SQLException convert(final SQLDialectException sqlDialectException) { + return new SQLException("Dialect exception"); } @Override - public List selectAll() throws SQLException { - String sql = "SELECT * FROM t_order WHERE order_id BETWEEN 200000000000000000 AND 400000000000000000"; - return getOrders(sql); + public String getDatabaseType() { + return "FIXTURE"; } } diff --git a/examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm b/infra/exception/dialect/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper similarity index 89% rename from examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm rename to infra/exception/dialect/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper index 39a143c52f4cf..50ecded00b7ab 100644 --- a/examples/example-core/config-utility/src/main/resources/META-INF/services/org.apache.shardingsphere.encrypt.spi.EncryptAlgorithm +++ b/infra/exception/dialect/core/src/test/resources/META-INF/services/org.apache.shardingsphere.infra.exception.dialect.mapper.SQLDialectExceptionMapper @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.example.fixture.TestQueryAssistedShardingEncryptAlgorithm \ No newline at end of file +org.apache.shardingsphere.infra.exception.dialect.fixture.SQLDialectExceptionMapperFixture diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngine.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngine.java index 5e2c679bc911d..2e5f2c7e849d5 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngine.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngine.java @@ -39,8 +39,6 @@ @Getter public final class ExecutorEngine implements AutoCloseable { - private static final int CPU_CORES = Runtime.getRuntime().availableProcessors(); - private final ExecutorServiceManager executorServiceManager; private ExecutorEngine(final int executorSize) { @@ -57,30 +55,6 @@ public static ExecutorEngine createExecutorEngineWithSize(final int executorSize return new ExecutorEngine(executorSize); } - /** - * Create executor engine with CPU. - * - * @return created executor engine - */ - public static ExecutorEngine createExecutorEngineWithCPU() { - int cpuThreadCount = CPU_CORES * 2 - 1; - return new ExecutorEngine(cpuThreadCount); - } - - /** - * Execute. - * - * @param executionGroupContext execution group context - * @param callback executor callback - * @param type of input value - * @param type of return value - * @return execute result - * @throws SQLException throw if execute failure - */ - public List execute(final ExecutionGroupContext executionGroupContext, final ExecutorCallback callback) throws SQLException { - return execute(executionGroupContext, null, callback, false); - } - /** * Execute. * diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/prepare/driver/DriverExecutionPrepareEngine.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/prepare/driver/DriverExecutionPrepareEngine.java index 8212310f6b7b9..848b7efd42a0a 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/prepare/driver/DriverExecutionPrepareEngine.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/prepare/driver/DriverExecutionPrepareEngine.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.DriverExecutionUnit; import org.apache.shardingsphere.infra.executor.sql.prepare.AbstractExecutionPrepareEngine; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -54,17 +55,17 @@ public final class DriverExecutionPrepareEngine @SuppressWarnings("rawtypes") private final SQLExecutionUnitBuilder sqlExecutionUnitBuilder; - private final Map databaseTypes; + private final StorageUnitMetaData storageUnitMetaData; public DriverExecutionPrepareEngine(final String type, final int maxConnectionsSizePerQuery, final DatabaseConnectionManager databaseConnectionManager, final ExecutorStatementManager statementManager, final StorageResourceOption option, final Collection rules, - final Map databaseTypes) { + final StorageUnitMetaData storageUnitMetaData) { super(maxConnectionsSizePerQuery, rules); this.databaseConnectionManager = databaseConnectionManager; this.statementManager = statementManager; this.option = option; sqlExecutionUnitBuilder = getCachedSqlExecutionUnitBuilder(type); - this.databaseTypes = databaseTypes; + this.storageUnitMetaData = storageUnitMetaData; } /** @@ -95,10 +96,11 @@ protected List> group(final String dataSourceName, final int c @SuppressWarnings("unchecked") private ExecutionGroup createExecutionGroup(final String dataSourceName, final List sqlUnits, final C connection, final ConnectionMode connectionMode) throws SQLException { - List result = new LinkedList<>(); + List inputs = new LinkedList<>(); + DatabaseType databaseType = storageUnitMetaData.getStorageUnits().get(dataSourceName).getStorageType(); for (SQLUnit each : sqlUnits) { - result.add((T) sqlExecutionUnitBuilder.build(new ExecutionUnit(dataSourceName, each), statementManager, connection, connectionMode, option, databaseTypes.get(dataSourceName))); + inputs.add((T) sqlExecutionUnitBuilder.build(new ExecutionUnit(dataSourceName, each), statementManager, connection, connectionMode, option, databaseType)); } - return new ExecutionGroup<>(result); + return new ExecutionGroup<>(inputs); } } diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/Process.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/Process.java index a5f43d2ec3ade..8597a563fc44a 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/Process.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/Process.java @@ -57,15 +57,17 @@ public final class Process { private final boolean idle; - public Process(final ExecutionGroupContext executionGroupContext) { - this("", executionGroupContext, true); + private final boolean heldByConnection; + + public Process(final ExecutionGroupContext executionGroupContext, final boolean heldByConnection) { + this("", executionGroupContext, true, heldByConnection); } - public Process(final String sql, final ExecutionGroupContext executionGroupContext) { - this(sql, executionGroupContext, false); + public Process(final String sql, final ExecutionGroupContext executionGroupContext, final boolean heldByConnection) { + this(sql, executionGroupContext, false, heldByConnection); } - private Process(final String sql, final ExecutionGroupContext executionGroupContext, final boolean idle) { + private Process(final String sql, final ExecutionGroupContext executionGroupContext, final boolean idle, final boolean heldByConnection) { id = executionGroupContext.getReportContext().getProcessId(); startMillis = System.currentTimeMillis(); this.sql = sql; @@ -77,6 +79,7 @@ private Process(final String sql, final ExecutionGroupContext executionGroupContext) { diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/ProcessEngine.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/ProcessEngine.java index 154b83f3aa097..d7b501fa7e9e3 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/ProcessEngine.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/ProcessEngine.java @@ -17,11 +17,11 @@ package org.apache.shardingsphere.infra.executor.sql.process; -import org.apache.shardingsphere.infra.session.query.QueryContext; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupContext; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupReportContext; import org.apache.shardingsphere.infra.executor.sql.execute.engine.SQLExecutionUnit; import org.apache.shardingsphere.infra.metadata.user.Grantee; +import org.apache.shardingsphere.infra.session.query.QueryContext; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.DDLStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DMLStatement; @@ -43,7 +43,7 @@ public final class ProcessEngine { */ public String connect(final Grantee grantee, final String databaseName) { ExecutionGroupContext executionGroupContext = new ExecutionGroupContext<>(Collections.emptyList(), new ExecutionGroupReportContext(databaseName, grantee)); - Process process = new Process(executionGroupContext); + Process process = new Process(executionGroupContext, true); ProcessRegistry.getInstance().add(process); return executionGroupContext.getReportContext().getProcessId(); } @@ -66,9 +66,11 @@ public void disconnect(final String processId) { */ public void executeSQL(final ExecutionGroupContext executionGroupContext, final QueryContext queryContext) { if (isMySQLDDLOrDMLStatement(queryContext.getSqlStatementContext().getSqlStatement())) { - ProcessIdContext.set(executionGroupContext.getReportContext().getProcessId()); - Process process = new Process(queryContext.getSql(), executionGroupContext); - ProcessRegistry.getInstance().add(process); + String processId = executionGroupContext.getReportContext().getProcessId(); + // TODO remove heldByConnection when jdbc connection support generate processId and call connect and disconnect + boolean heldByConnection = null != ProcessRegistry.getInstance().get(processId) && ProcessRegistry.getInstance().get(processId).isHeldByConnection(); + ProcessIdContext.set(processId); + ProcessRegistry.getInstance().add(new Process(queryContext.getSql(), executionGroupContext, heldByConnection)); } } @@ -95,7 +97,11 @@ public void completeSQLExecution() { } ExecutionGroupContext executionGroupContext = new ExecutionGroupContext<>( Collections.emptyList(), new ExecutionGroupReportContext(ProcessIdContext.get(), process.getDatabaseName(), new Grantee(process.getUsername(), process.getHostname()))); - ProcessRegistry.getInstance().add(new Process(executionGroupContext)); + if (process.isHeldByConnection()) { + ProcessRegistry.getInstance().add(new Process(executionGroupContext, true)); + } else { + ProcessRegistry.getInstance().remove(ProcessIdContext.get()); + } ProcessIdContext.remove(); } diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/YamlProcess.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/YamlProcess.java index d72828dff963d..47dba29e7c6de 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/YamlProcess.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/YamlProcess.java @@ -45,4 +45,6 @@ public final class YamlProcess implements YamlConfiguration { private int completedUnitCount; private boolean idle; + + private boolean heldByConnection; } diff --git a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapper.java b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapper.java index fd98124fd532d..e32f9e63b4eab 100644 --- a/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapper.java +++ b/infra/executor/src/main/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapper.java @@ -41,12 +41,13 @@ public YamlProcess swapToYamlConfiguration(final Process data) { result.setTotalUnitCount(data.getTotalUnitCount()); result.setCompletedUnitCount(data.getCompletedUnitCount()); result.setIdle(data.isIdle()); + result.setHeldByConnection(data.isHeldByConnection()); return result; } @Override public Process swapToObject(final YamlProcess yamlConfig) { return new Process(yamlConfig.getId(), yamlConfig.getStartMillis(), yamlConfig.getSql(), yamlConfig.getDatabaseName(), yamlConfig.getUsername(), yamlConfig.getHostname(), - yamlConfig.getTotalUnitCount(), Collections.emptyList(), new AtomicInteger(yamlConfig.getCompletedUnitCount()), yamlConfig.isIdle()); + yamlConfig.getTotalUnitCount(), Collections.emptyList(), new AtomicInteger(yamlConfig.getCompletedUnitCount()), yamlConfig.isIdle(), yamlConfig.isHeldByConnection()); } } diff --git a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngineTest.java b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngineTest.java index 3e7914bbe472f..cda1bbe24b25d 100644 --- a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngineTest.java +++ b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/kernel/ExecutorEngineTest.java @@ -33,7 +33,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; class ExecutorEngineTest { @@ -76,13 +75,6 @@ private List createMockedInputs(final int size) { return result; } - @Test - void assertParallelExecuteWithoutFirstCallback() throws SQLException, InterruptedException { - List actual = executorEngine.execute(executionGroupContext, callback); - latch.await(); - assertThat(actual.size(), is(4)); - } - @Test void assertParallelExecuteWithFirstCallback() throws SQLException, InterruptedException { List actual = executorEngine.execute(executionGroupContext, firstCallback, callback, false); @@ -96,12 +88,4 @@ void assertSerialExecute() throws SQLException, InterruptedException { latch.await(); assertThat(actual.size(), is(4)); } - - @Test - void assertExecutionGroupIsEmpty() throws SQLException { - CountDownLatch latch = new CountDownLatch(1); - List actual = executorEngine.execute(new ExecutionGroupContext<>(new LinkedList<>(), mock(ExecutionGroupReportContext.class)), new ExecutorCallbackFixture(latch)); - latch.countDown(); - assertTrue(actual.isEmpty()); - } } diff --git a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessListSwapperTest.java b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessListSwapperTest.java index c1ab8e8ed0643..ab6a9f0935e45 100644 --- a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessListSwapperTest.java +++ b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessListSwapperTest.java @@ -42,7 +42,7 @@ class YamlProcessListSwapperTest { void assertSwapToYamlConfiguration() { ExecutionGroupReportContext reportContext = new ExecutionGroupReportContext("foo_db", new Grantee("root", "localhost")); ExecutionGroupContext executionGroupContext = new ExecutionGroupContext<>(Collections.emptyList(), reportContext); - Process process = new Process("SELECT 1", executionGroupContext); + Process process = new Process("SELECT 1", executionGroupContext, false); YamlProcessList actual = new YamlProcessListSwapper().swapToYamlConfiguration(Collections.singleton(process)); assertThat(actual.getProcesses().size(), is(1)); assertYamlProcessContext(actual.getProcesses().iterator().next()); @@ -57,6 +57,7 @@ private void assertYamlProcessContext(final YamlProcess actual) { assertThat(actual.getHostname(), is("localhost")); assertThat(actual.getCompletedUnitCount(), is(0)); assertThat(actual.getTotalUnitCount(), is(0)); + assertThat(actual.isHeldByConnection(), is(false)); assertFalse(actual.isIdle()); } @@ -80,6 +81,7 @@ private YamlProcess createYamlProcess() { result.setTotalUnitCount(10); result.setCompletedUnitCount(5); result.setIdle(true); + result.setHeldByConnection(true); return result; } @@ -92,6 +94,7 @@ private void assertProcess(final Process actual) { assertThat(actual.getHostname(), is("localhost")); assertThat(actual.getTotalUnitCount(), is(10)); assertThat(actual.getCompletedUnitCount(), is(5)); + assertThat(actual.isHeldByConnection(), is(true)); assertTrue(actual.isIdle()); } } diff --git a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapperTest.java b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapperTest.java index 6d6fe1361e644..2debede78b8f7 100644 --- a/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapperTest.java +++ b/infra/executor/src/test/java/org/apache/shardingsphere/infra/executor/sql/process/yaml/swapper/YamlProcessSwapperTest.java @@ -40,7 +40,7 @@ class YamlProcessSwapperTest { void assertSwapToYamlConfiguration() { ExecutionGroupReportContext reportContext = new ExecutionGroupReportContext("foo_db", new Grantee("root", "localhost")); ExecutionGroupContext executionGroupContext = new ExecutionGroupContext<>(Collections.emptyList(), reportContext); - Process process = new Process("SELECT 1", executionGroupContext); + Process process = new Process("SELECT 1", executionGroupContext, true); YamlProcess actual = new YamlProcessSwapper().swapToYamlConfiguration(process); assertNotNull(actual.getId()); assertThat(actual.getStartMillis(), lessThanOrEqualTo(System.currentTimeMillis())); @@ -50,6 +50,7 @@ void assertSwapToYamlConfiguration() { assertThat(actual.getHostname(), is("localhost")); assertThat(actual.getCompletedUnitCount(), is(0)); assertThat(actual.getTotalUnitCount(), is(0)); + assertThat(actual.isHeldByConnection(), is(true)); assertFalse(actual.isIdle()); } @@ -64,6 +65,7 @@ void assertSwapToObject() { assertThat(actual.getHostname(), is("localhost")); assertThat(actual.getTotalUnitCount(), is(10)); assertThat(actual.getCompletedUnitCount(), is(5)); + assertThat(actual.isHeldByConnection(), is(false)); assertTrue(actual.isIdle()); } @@ -78,6 +80,7 @@ private YamlProcess createYamlProcess() { result.setTotalUnitCount(10); result.setCompletedUnitCount(5); result.setIdle(true); + result.setHeldByConnection(false); return result; } } diff --git a/infra/expr/core/pom.xml b/infra/expr/core/pom.xml index fdc41cd673c0d..84f274b57cf14 100644 --- a/infra/expr/core/pom.xml +++ b/infra/expr/core/pom.xml @@ -39,7 +39,7 @@ org.apache.shardingsphere - shardingsphere-infra-expr-espresso + shardingsphere-infra-expr-purelist ${project.version} diff --git a/infra/expr/core/src/main/java/org/apache/shardingsphere/infra/expr/core/InlineExpressionParserFactory.java b/infra/expr/core/src/main/java/org/apache/shardingsphere/infra/expr/core/InlineExpressionParserFactory.java index 3cde01672e179..11a03cd4948e4 100644 --- a/infra/expr/core/src/main/java/org/apache/shardingsphere/infra/expr/core/InlineExpressionParserFactory.java +++ b/infra/expr/core/src/main/java/org/apache/shardingsphere/infra/expr/core/InlineExpressionParserFactory.java @@ -28,7 +28,7 @@ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class InlineExpressionParserFactory { - // workaround for https://junit.org/junit5/docs/current/api/org.junit.jupiter.api/org/junit/jupiter/api/condition/EnabledInNativeImage.html + // workaround for https://junit.org/junit5/docs/5.10.0/api/org.junit.jupiter.api/org/junit/jupiter/api/condition/EnabledInNativeImage.html private static final boolean IS_SUBSTRATE_VM = "runtime".equals(System.getProperty("org.graalvm.nativeimage.imagecode")); /** @@ -37,6 +37,6 @@ public final class InlineExpressionParserFactory { * @return created instance */ public static InlineExpressionParser newInstance() { - return TypedSPILoader.getService(InlineExpressionParser.class, IS_SUBSTRATE_VM ? "ESPRESSO" : "HOTSPOT"); + return TypedSPILoader.getService(InlineExpressionParser.class, IS_SUBSTRATE_VM ? "PURELIST" : "HOTSPOT"); } } diff --git a/infra/expr/espresso/pom.xml b/infra/expr/espresso/pom.xml index fead9afddea14..653fe6e1653d7 100644 --- a/infra/expr/espresso/pom.xml +++ b/infra/expr/espresso/pom.xml @@ -42,12 +42,6 @@ shardingsphere-infra-util ${project.version} - - - org.apache.shardingsphere - shardingsphere-infra-expr-hotsopt - ${project.version} - org.graalvm.truffle @@ -67,30 +61,9 @@ copy - prepare-package + process-test-classes - - org.apache.shardingsphere - shardingsphere-infra-expr-spi - ${project.version} - jar - true - - - org.apache.shardingsphere - shardingsphere-infra-util - ${project.version} - jar - true - - - org.apache.shardingsphere - shardingsphere-infra-expr-hotsopt - ${project.version} - jar - true - org.apache.groovy groovy @@ -98,13 +71,6 @@ jar true - - com.google.guava - guava - ${guava.version} - jar - true - true ${project.build.outputDirectory}/espresso-need-libs diff --git a/infra/expr/espresso/src/main/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParser.java b/infra/expr/espresso/src/main/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParser.java index 32e95cb916a89..bfe5201bb08be 100644 --- a/infra/expr/espresso/src/main/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParser.java +++ b/infra/expr/espresso/src/main/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParser.java @@ -17,20 +17,23 @@ package org.apache.shardingsphere.infra.expr.espresso; +import com.google.common.base.Strings; +import com.google.common.collect.Sets; import groovy.lang.Closure; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.infra.expr.hotsopt.HotspotInlineExpressionParser; +import groovy.lang.GroovyShell; import org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.TypeLiteral; import org.graalvm.polyglot.Value; import java.io.File; import java.net.URL; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; -import java.util.stream.Stream; /** * Espresso inline expression parser. @@ -39,56 +42,154 @@ public final class EspressoInlineExpressionParser implements InlineExpressionPar private static final String JAVA_CLASSPATH; - private static final String JAVA_HOME; + private static final char SPLITTER = ','; static { - JAVA_HOME = System.getenv("JAVA_HOME"); URL resource = Thread.currentThread().getContextClassLoader().getResource("espresso-need-libs"); - String dir = null != resource ? resource.getPath() : null; - JAVA_CLASSPATH = Stream.of("groovy.jar", "guava.jar", "shardingsphere-infra-expr-hotsopt.jar", "shardingsphere-infra-expr-spi.jar", "shardingsphere-infra-util.jar") - .map(each -> dir + File.separator + each).collect(Collectors.joining(":")); + String dir = null == resource ? null : resource.getPath(); + JAVA_CLASSPATH = dir + File.separator + "groovy.jar"; } @Override public String handlePlaceHolder(final String inlineExpression) { - try (Context context = createContext()) { - return createInlineExpressionParser(context).invokeMember("handlePlaceHolder", inlineExpression).asString(); - } + return inlineExpression.contains("$->{") ? inlineExpression.replaceAll("\\$->\\{", "\\$\\{") : inlineExpression; } @Override public List splitAndEvaluate(final String inlineExpression) { try (Context context = createContext()) { - List listProjection = createInlineExpressionParser(context).invokeMember("splitAndEvaluate", inlineExpression) - .as(new TypeLiteral>() { - }); - // org.graalvm.polyglot.Value#as only creates projections for classes in Truffle Context - return new ArrayList<>(listProjection); + return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : flatten(evaluate(split(inlineExpression), context)); } } @Override public Closure evaluateClosure(final String inlineExpression) { - try (Context context = createContext()) { - return createInlineExpressionParser(context).invokeMember("evaluateClosure", inlineExpression).as(Closure.class); - } - } - - private Value createInlineExpressionParser(final Context context) { - return context.getBindings("java").getMember(HotspotInlineExpressionParser.class.getName()).newInstance(); + throw new UnsupportedOperationException("GraalVM Truffle's Espresso implementation cannot return an instance of `groovy.lang.Closure` to the Host JVM."); } private Context createContext() { // TODO https://github.com/oracle/graal/issues/4555 not yet closed - ShardingSpherePreconditions.checkNotNull(JAVA_HOME, () -> new RuntimeException("Failed to determine the system's environment variable JAVA_HOME!")); return Context.newBuilder() .allowAllAccess(true) - .option("java.Properties.org.graalvm.home", JAVA_HOME) - .option("java.MultiThreaded", Boolean.TRUE.toString()) + .option("java.Properties.org.graalvm.home", System.getenv("JAVA_HOME")) .option("java.Classpath", JAVA_CLASSPATH) .build(); } + private List split(final String inlineExpression) { + List result = new ArrayList<>(); + StringBuilder segment = new StringBuilder(); + int bracketsDepth = 0; + for (int i = 0; i < inlineExpression.length(); i++) { + char each = inlineExpression.charAt(i); + switch (each) { + case SPLITTER: + if (bracketsDepth > 0) { + segment.append(each); + } else { + result.add(segment.toString().trim()); + segment.setLength(0); + } + break; + case '$': + if ('{' == inlineExpression.charAt(i + 1)) { + bracketsDepth++; + } + if ("->{".equals(inlineExpression.substring(i + 1, i + 4))) { + bracketsDepth++; + } + segment.append(each); + break; + case '}': + if (bracketsDepth > 0) { + bracketsDepth--; + } + segment.append(each); + break; + default: + segment.append(each); + break; + } + } + if (segment.length() > 0) { + result.add(segment.toString().trim()); + } + return result; + } + + private List evaluate(final List inlineExpressions, final Context context) { + List result = new ArrayList<>(inlineExpressions.size()); + for (String each : inlineExpressions) { + StringBuilder expression = new StringBuilder(handlePlaceHolder(each)); + if (!each.startsWith("\"")) { + expression.insert(0, '"'); + } + if (!each.endsWith("\"")) { + expression.append('"'); + } + result.add(evaluate(expression.toString(), context)); + } + return result; + } + + private Value evaluate(final String expression, final Context context) { + return context.getBindings("java") + .getMember(GroovyShell.class.getName()) + .newInstance() + .invokeMember("parse", expression) + .invokeMember("run"); + } + + private List flatten(final List segments) { + List result = new ArrayList<>(); + for (Value each : segments) { + if (!each.isString()) { + result.addAll(assemblyCartesianSegments(each)); + } else { + result.add(each.toString()); + } + } + return result; + } + + private List assemblyCartesianSegments(final Value segment) { + Set> cartesianValues = getCartesianValues(segment); + List result = new ArrayList<>(cartesianValues.size()); + for (List each : cartesianValues) { + result.add(assemblySegment(each, segment)); + } + return result; + } + + @SuppressWarnings("unchecked") + private Set> getCartesianValues(final Value segment) { + Object[] temp = segment.invokeMember("getValues").as(Object[].class); + List> result = new ArrayList<>(temp.length); + for (Object each : temp) { + if (null == each) { + continue; + } + if (each instanceof Collection) { + result.add(((Collection) each).stream().map(Object::toString).collect(Collectors.toCollection(LinkedHashSet::new))); + } else { + result.add(Sets.newHashSet(each.toString())); + } + } + return Sets.cartesianProduct(result); + } + + private String assemblySegment(final List cartesianValue, final Value segment) { + String[] temp = segment.invokeMember("getStrings").as(String[].class); + StringBuilder result = new StringBuilder(); + for (int i = 0; i < temp.length; i++) { + result.append(temp[i]); + if (i < cartesianValue.size()) { + result.append(cartesianValue.get(i)); + } + } + return result.toString(); + } + @Override public String getType() { return "ESPRESSO"; diff --git a/infra/expr/espresso/src/test/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParserTest.java b/infra/expr/espresso/src/test/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParserTest.java index b389c5a2f2d72..3f7999f5e4cad 100644 --- a/infra/expr/espresso/src/test/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParserTest.java +++ b/infra/expr/espresso/src/test/java/org/apache/shardingsphere/infra/expr/espresso/EspressoInlineExpressionParserTest.java @@ -119,8 +119,8 @@ void assertHandlePlaceHolder() { } /* - * TODO This method needs to avoid returning a groovy.lang.Closure class instance, and instead return the result of `Closure#call`. Because `org.graalvm.polyglot.Value#as` does not allow this type - * to be returned from the guest JVM. + * TODO This method needs to avoid returning a `groovy.lang.Closure` class instance, and instead return the result of `groovy.lang.Closure#call`. Because `org.graalvm.polyglot.Value#as` does not + * allow this type to be returned from the guest JVM. */ @Test @Disabled("See java doc") diff --git a/infra/expr/pom.xml b/infra/expr/pom.xml index ee51962feb40d..14aa380d873ee 100644 --- a/infra/expr/pom.xml +++ b/infra/expr/pom.xml @@ -32,5 +32,6 @@ core hotsopt espresso + purelist diff --git a/features/sharding/plugin/nanoid/pom.xml b/infra/expr/purelist/pom.xml similarity index 78% rename from features/sharding/plugin/nanoid/pom.xml rename to infra/expr/purelist/pom.xml index e7f7fd6fd3387..b45ff489b3770 100644 --- a/features/sharding/plugin/nanoid/pom.xml +++ b/infra/expr/purelist/pom.xml @@ -20,27 +20,22 @@ 4.0.0 org.apache.shardingsphere - shardingsphere-sharding-plugin + shardingsphere-infra-expr 5.4.1-SNAPSHOT - shardingsphere-sharding-nanoid + shardingsphere-infra-expr-purelist ${project.artifactId} - - 2.0.0 - - org.apache.shardingsphere - shardingsphere-sharding-api + shardingsphere-infra-expr-spi ${project.version} - com.aventrix.jnanoid - jnanoid - ${jnanoid.version} + org.apache.groovy + groovy diff --git a/infra/expr/purelist/src/main/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParser.java b/infra/expr/purelist/src/main/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParser.java new file mode 100644 index 0000000000000..347b6d4f3e219 --- /dev/null +++ b/infra/expr/purelist/src/main/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParser.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.expr.purelist; + +import com.google.common.base.Strings; +import groovy.lang.Closure; +import org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Pure List inline expression parser. + */ +public final class PureListInlineExpressionParser implements InlineExpressionParser { + + private static final char SPLITTER = ','; + + @Override + public String handlePlaceHolder(final String inlineExpression) { + return inlineExpression.contains("$->{") ? inlineExpression.replaceAll("\\$->\\{", "\\$\\{") : inlineExpression; + } + + @Override + public List splitAndEvaluate(final String inlineExpression) { + return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : split(inlineExpression); + } + + @Override + public Closure evaluateClosure(final String inlineExpression) { + throw new UnsupportedOperationException("Groovy classes cannot be used directly within GraalVM Native Image."); + } + + private List split(final String inlineExpression) { + List result = new ArrayList<>(); + StringBuilder segment = new StringBuilder(); + for (int i = 0; i < inlineExpression.length(); i++) { + char each = inlineExpression.charAt(i); + if (each == SPLITTER) { + result.add(segment.toString().trim()); + segment.setLength(0); + } else { + segment.append(each); + } + } + if (segment.length() > 0) { + result.add(segment.toString().trim()); + } + return result; + } + + @Override + public String getType() { + return "PURELIST"; + } +} diff --git a/agent/core/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator b/infra/expr/purelist/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser similarity index 91% rename from agent/core/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator rename to infra/expr/purelist/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser index 8983dc323f80b..9f038f1b9fb55 100644 --- a/agent/core/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator +++ b/infra/expr/purelist/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.expr.spi.InlineExpressionParser @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.agent.core.log.config.LogbackConfiguration +org.apache.shardingsphere.infra.expr.purelist.PureListInlineExpressionParser diff --git a/infra/expr/purelist/src/test/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParserTest.java b/infra/expr/purelist/src/test/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParserTest.java new file mode 100644 index 0000000000000..bb8ebbf8855ce --- /dev/null +++ b/infra/expr/purelist/src/test/java/org/apache/shardingsphere/infra/expr/purelist/PureListInlineExpressionParserTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.expr.purelist; + +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.CoreMatchers.hasItems; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +class PureListInlineExpressionParserTest { + + @Test + void assertEvaluateForExpressionIsNull() { + List expected = new PureListInlineExpressionParser().splitAndEvaluate(null); + assertThat(expected, is(Collections.emptyList())); + } + + @Test + void assertEvaluateForSimpleString() { + List expected = new PureListInlineExpressionParser().splitAndEvaluate(" t_order_0, t_order_1 "); + assertThat(expected.size(), is(2)); + assertThat(expected, hasItems("t_order_0", "t_order_1")); + } + + @Test + void assertEvaluateForLong() { + StringBuilder expression = new StringBuilder(); + for (int i = 0; i < 1024; i++) { + expression.append("ds_"); + expression.append(i / 64); + expression.append(".t_user_"); + expression.append(i); + if (i != 1023) { + expression.append(","); + } + } + List expected = new PureListInlineExpressionParser().splitAndEvaluate(expression.toString()); + assertThat(expected.size(), is(1024)); + assertThat(expected, hasItems("ds_0.t_user_0", "ds_15.t_user_1023")); + } +} diff --git a/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/DecoratorRuleFixture.java b/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/DecoratorRuleFixture.java index 8873285157c8d..6e2b3cdaa8c7b 100644 --- a/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/DecoratorRuleFixture.java +++ b/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/DecoratorRuleFixture.java @@ -28,9 +28,4 @@ public final class DecoratorRuleFixture implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return DecoratorRuleFixture.class.getSimpleName(); - } } diff --git a/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/MergerRuleFixture.java b/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/MergerRuleFixture.java index de9916edddfbb..6d15a2d293bd7 100644 --- a/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/MergerRuleFixture.java +++ b/infra/merge/src/test/java/org/apache/shardingsphere/infra/merge/fixture/rule/MergerRuleFixture.java @@ -28,9 +28,4 @@ public final class MergerRuleFixture implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return MergerRuleFixture.class.getSimpleName(); - } } diff --git a/infra/pom.xml b/infra/pom.xml index 8641830e9affe..9019fc66ef9a5 100644 --- a/infra/pom.xml +++ b/infra/pom.xml @@ -31,7 +31,7 @@ spi exception database - datasource + data-source-pool binder common context diff --git a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntry.java b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntry.java index 661389d4c3564..22933bf9d115f 100644 --- a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntry.java +++ b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntry.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContextDecorator; @@ -76,10 +77,10 @@ public SQLRewriteResult rewrite(final String sql, final List params, fin SQLRewriteContext sqlRewriteContext = createSQLRewriteContext(sql, params, sqlStatementContext, routeContext, connectionContext, hintValueContext); SQLTranslatorRule rule = globalRuleMetaData.getSingleRule(SQLTranslatorRule.class); DatabaseType protocolType = database.getProtocolType(); - Map storageTypes = database.getResourceMetaData().getStorageTypes(); + Map storageUnits = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); return routeContext.getRouteUnits().isEmpty() - ? new GenericSQLRewriteEngine(rule, protocolType, storageTypes).rewrite(sqlRewriteContext) - : new RouteSQLRewriteEngine(rule, protocolType, storageTypes).rewrite(sqlRewriteContext, routeContext); + ? new GenericSQLRewriteEngine(rule, protocolType, storageUnits).rewrite(sqlRewriteContext) + : new RouteSQLRewriteEngine(rule, protocolType, storageUnits).rewrite(sqlRewriteContext, routeContext); } private SQLRewriteContext createSQLRewriteContext(final String sql, final List params, final SQLStatementContext sqlStatementContext, diff --git a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/context/SQLRewriteContext.java b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/context/SQLRewriteContext.java index 3ea33d8eddeab..7625e227500a1 100644 --- a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/context/SQLRewriteContext.java +++ b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/context/SQLRewriteContext.java @@ -69,12 +69,18 @@ public SQLRewriteContext(final ShardingSphereDatabase database, final SQLStateme if (!hintValueContext.isSkipSQLRewrite()) { addSQLTokenGenerators(new DefaultTokenGeneratorBuilder(sqlStatementContext).getSQLTokenGenerators()); } - parameterBuilder = sqlStatementContext instanceof InsertStatementContext && null == ((InsertStatementContext) sqlStatementContext).getInsertSelectContext() - ? new GroupedParameterBuilder( - ((InsertStatementContext) sqlStatementContext).getGroupedParameters(), ((InsertStatementContext) sqlStatementContext).getOnDuplicateKeyUpdateParameters()) + parameterBuilder = containsInsertValues(sqlStatementContext) + ? new GroupedParameterBuilder(((InsertStatementContext) sqlStatementContext).getGroupedParameters(), ((InsertStatementContext) sqlStatementContext).getOnDuplicateKeyUpdateParameters()) : new StandardParameterBuilder(params); } + private boolean containsInsertValues(final SQLStatementContext sqlStatementContext) { + if (!(sqlStatementContext instanceof InsertStatementContext)) { + return false; + } + return null == ((InsertStatementContext) sqlStatementContext).getInsertSelectContext(); + } + /** * Add SQL token generators. * diff --git a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngine.java b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngine.java index e7740f3c95f62..c66539eff4297 100644 --- a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngine.java +++ b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngine.java @@ -19,6 +19,7 @@ import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.engine.result.GenericSQLRewriteResult; import org.apache.shardingsphere.infra.rewrite.engine.result.SQLRewriteUnit; @@ -37,7 +38,7 @@ public final class GenericSQLRewriteEngine { private final DatabaseType protocolType; - private final Map storageTypes; + private final Map storageUnits; /** * Rewrite SQL and parameters. @@ -48,7 +49,7 @@ public final class GenericSQLRewriteEngine { public GenericSQLRewriteResult rewrite(final SQLRewriteContext sqlRewriteContext) { String sql = translatorRule.translate( new DefaultSQLBuilder(sqlRewriteContext).toSQL(), sqlRewriteContext.getSqlStatementContext().getSqlStatement(), protocolType, - storageTypes.isEmpty() ? protocolType : storageTypes.values().iterator().next()); + storageUnits.isEmpty() ? protocolType : storageUnits.values().iterator().next().getStorageType()); return new GenericSQLRewriteResult(new SQLRewriteUnit(sql, sqlRewriteContext.getParameterBuilder().getParameters())); } } diff --git a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngine.java b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngine.java index 527ca579b0be4..a7857c8697761 100644 --- a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngine.java +++ b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngine.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datanode.DataNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.engine.result.RouteSQLRewriteResult; import org.apache.shardingsphere.infra.rewrite.engine.result.SQLRewriteUnit; @@ -53,7 +54,7 @@ public final class RouteSQLRewriteEngine { private final DatabaseType protocolType; - private final Map storageTypes; + private final Map storageUnits; /** * Rewrite SQL and parameters. @@ -156,7 +157,7 @@ private boolean isInSameDataNode(final Collection dataNodes, final Rou private Map translate(final SQLStatement sqlStatement, final Map sqlRewriteUnits) { Map result = new LinkedHashMap<>(sqlRewriteUnits.size(), 1F); for (Entry entry : sqlRewriteUnits.entrySet()) { - DatabaseType storageType = storageTypes.get(entry.getKey().getDataSourceMapper().getActualName()); + DatabaseType storageType = storageUnits.get(entry.getKey().getDataSourceMapper().getActualName()).getStorageType(); String sql = translatorRule.translate(entry.getValue().getSql(), sqlStatement, protocolType, storageType); SQLRewriteUnit sqlRewriteUnit = new SQLRewriteUnit(sql, entry.getValue().getParameters()); result.put(entry.getKey(), sqlRewriteUnit); diff --git a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/sql/token/generator/SQLTokenGenerators.java b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/sql/token/generator/SQLTokenGenerators.java index 7be04c6e527f5..342a479f0131c 100644 --- a/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/sql/token/generator/SQLTokenGenerators.java +++ b/infra/rewrite/src/main/java/org/apache/shardingsphere/infra/rewrite/sql/token/generator/SQLTokenGenerators.java @@ -18,16 +18,15 @@ package org.apache.shardingsphere.infra.rewrite.sql.token.generator; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; -import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.ConnectionContextAware; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.ParametersAware; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.PreviousSQLTokensAware; import org.apache.shardingsphere.infra.rewrite.sql.token.generator.aware.SchemaMetaDataAware; import org.apache.shardingsphere.infra.rewrite.sql.token.pojo.SQLToken; +import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import java.util.Collection; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -37,7 +36,7 @@ */ public final class SQLTokenGenerators { - private final Map, SQLTokenGenerator> generators = new LinkedHashMap<>(); + private final Collection generators = new LinkedList<>(); /** * Add all SQL token generators. @@ -45,9 +44,7 @@ public final class SQLTokenGenerators { * @param sqlTokenGenerators SQL token generators */ public void addAll(final Collection sqlTokenGenerators) { - for (SQLTokenGenerator each : sqlTokenGenerators) { - generators.putIfAbsent(each.getClass(), each); - } + generators.addAll(sqlTokenGenerators); } /** @@ -64,7 +61,7 @@ public void addAll(final Collection sqlTokenGenerators) { public List generateSQLTokens(final String databaseName, final Map schemas, final SQLStatementContext sqlStatementContext, final List params, final ConnectionContext connectionContext) { List result = new LinkedList<>(); - for (SQLTokenGenerator each : generators.values()) { + for (SQLTokenGenerator each : generators) { setUpSQLTokenGenerator(each, params, databaseName, schemas, result, connectionContext); if (each instanceof OptionalSQLTokenGenerator) { SQLToken sqlToken = ((OptionalSQLTokenGenerator) each).generateSQLToken(sqlStatementContext); diff --git a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntryTest.java b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntryTest.java index e7318c161106a..c6eae555fee61 100644 --- a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntryTest.java +++ b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/SQLRewriteEntryTest.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.engine.result.GenericSQLRewriteResult; @@ -45,6 +46,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -52,7 +54,7 @@ class SQLRewriteEntryTest { @Test void assertRewriteForGenericSQLRewriteResult() { - ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "H2"), mockResource(), + ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "H2"), mockResourceMetaData(), mock(RuleMetaData.class), Collections.singletonMap("test", mock(ShardingSphereSchema.class))); SQLRewriteEntry sqlRewriteEntry = new SQLRewriteEntry( database, new RuleMetaData(Collections.singleton(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()))), new ConfigurationProperties(new Properties())); @@ -65,7 +67,7 @@ void assertRewriteForGenericSQLRewriteResult() { @Test void assertRewriteForRouteSQLRewriteResult() { - ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "H2"), mockResource(), + ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, TypedSPILoader.getService(DatabaseType.class, "H2"), mockResourceMetaData(), mock(RuleMetaData.class), Collections.singletonMap("test", mock(ShardingSphereSchema.class))); SQLRewriteEntry sqlRewriteEntry = new SQLRewriteEntry( database, new RuleMetaData(Collections.singleton(mock(SQLTranslatorRule.class))), new ConfigurationProperties(new Properties())); @@ -80,12 +82,16 @@ void assertRewriteForRouteSQLRewriteResult() { assertThat(sqlRewriteResult.getSqlRewriteUnits().size(), is(2)); } - private ResourceMetaData mockResource() { - ResourceMetaData result = mock(ResourceMetaData.class); - Map databaseTypes = new LinkedHashMap<>(2, 1F); - databaseTypes.put("ds_0", TypedSPILoader.getService(DatabaseType.class, "H2")); - databaseTypes.put("ds_1", TypedSPILoader.getService(DatabaseType.class, "MySQL")); - when(result.getStorageTypes()).thenReturn(databaseTypes); + private ResourceMetaData mockResourceMetaData() { + Map storageUnits = new LinkedHashMap<>(2, 1F); + StorageUnit storageUnit1 = mock(StorageUnit.class); + when(storageUnit1.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "H2")); + StorageUnit storageUnit2 = mock(StorageUnit.class); + when(storageUnit2.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); + storageUnits.put("ds_0", storageUnit1); + storageUnits.put("ds_1", storageUnit2); + ResourceMetaData result = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); + when(result.getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); return result; } } diff --git a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngineTest.java b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngineTest.java index 1ecfdcee6d7e2..ee5dd4bfea48d 100644 --- a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngineTest.java +++ b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/GenericSQLRewriteEngineTest.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.engine.result.GenericSQLRewriteResult; @@ -43,7 +44,7 @@ class GenericSQLRewriteEngineTest { void assertRewrite() { DatabaseType databaseType = mock(DatabaseType.class); SQLTranslatorRule rule = new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()); - GenericSQLRewriteResult actual = new GenericSQLRewriteEngine(rule, databaseType, Collections.singletonMap("ds_0", databaseType)) + GenericSQLRewriteResult actual = new GenericSQLRewriteEngine(rule, databaseType, Collections.singletonMap("ds_0", mockStorageUnit(databaseType))) .rewrite(new SQLRewriteContext(mockDatabase(), mock(CommonSQLStatementContext.class), "SELECT 1", Collections.emptyList(), mock(ConnectionContext.class), new HintValueContext())); assertThat(actual.getSqlRewriteUnit().getSql(), is("SELECT 1")); @@ -60,6 +61,12 @@ void assertRewriteStorageTypeIsEmpty() { assertThat(actual.getSqlRewriteUnit().getParameters(), is(Collections.emptyList())); } + private StorageUnit mockStorageUnit(final DatabaseType databaseType) { + StorageUnit result = mock(StorageUnit.class); + when(result.getStorageType()).thenReturn(databaseType); + return result; + } + private ShardingSphereDatabase mockDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class); when(result.getName()).thenReturn(DefaultDatabase.LOGIC_NAME); diff --git a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngineTest.java b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngineTest.java index 29b0192852d73..9cb4985b2be8d 100644 --- a/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngineTest.java +++ b/infra/rewrite/src/test/java/org/apache/shardingsphere/infra/rewrite/engine/RouteSQLRewriteEngineTest.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rewrite.context.SQLRewriteContext; import org.apache.shardingsphere.infra.rewrite.engine.result.RouteSQLRewriteResult; @@ -39,6 +40,7 @@ import java.util.Arrays; import java.util.Collections; +import java.util.Map; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -57,8 +59,8 @@ void assertRewriteWithStandardParameterBuilder() { RouteContext routeContext = new RouteContext(); routeContext.getRouteUnits().add(routeUnit); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getSql(), is("SELECT ?")); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getParameters(), is(Collections.singletonList(1))); @@ -76,8 +78,8 @@ void assertRewriteWithStandardParameterBuilderWhenNeedAggregateRewrite() { routeContext.getRouteUnits().add(firstRouteUnit); routeContext.getRouteUnits().add(secondRouteUnit); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(firstRouteUnit).getSql(), is("SELECT ? UNION ALL SELECT ?")); assertThat(actual.getSqlRewriteUnits().get(firstRouteUnit).getParameters(), is(Arrays.asList(1, 1))); @@ -87,15 +89,17 @@ void assertRewriteWithStandardParameterBuilderWhenNeedAggregateRewrite() { void assertRewriteWithGroupedParameterBuilderForBroadcast() { InsertStatementContext statementContext = mock(InsertStatementContext.class, RETURNS_DEEP_STUBS); when(((TableAvailable) statementContext).getTablesContext().getDatabaseName().isPresent()).thenReturn(false); + when(statementContext.getInsertSelectContext()).thenReturn(null); when(statementContext.getGroupedParameters()).thenReturn(Collections.singletonList(Collections.singletonList(1))); + when(statementContext.getOnDuplicateKeyUpdateParameters()).thenReturn(Collections.emptyList()); SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(mockDatabase(), statementContext, "INSERT INTO tbl VALUES (?)", Collections.singletonList(1), mock(ConnectionContext.class), new HintValueContext()); RouteUnit routeUnit = new RouteUnit(new RouteMapper("ds", "ds_0"), Collections.singletonList(new RouteMapper("tbl", "tbl_0"))); RouteContext routeContext = new RouteContext(); routeContext.getRouteUnits().add(routeUnit); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getSql(), is("INSERT INTO tbl VALUES (?)")); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getParameters(), is(Collections.singletonList(1))); @@ -105,7 +109,9 @@ void assertRewriteWithGroupedParameterBuilderForBroadcast() { void assertRewriteWithGroupedParameterBuilderForRouteWithSameDataNode() { InsertStatementContext statementContext = mock(InsertStatementContext.class, RETURNS_DEEP_STUBS); when(((TableAvailable) statementContext).getTablesContext().getDatabaseName().isPresent()).thenReturn(false); + when(statementContext.getInsertSelectContext()).thenReturn(null); when(statementContext.getGroupedParameters()).thenReturn(Collections.singletonList(Collections.singletonList(1))); + when(statementContext.getOnDuplicateKeyUpdateParameters()).thenReturn(Collections.emptyList()); SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(mockDatabase(), statementContext, "INSERT INTO tbl VALUES (?)", Collections.singletonList(1), mock(ConnectionContext.class), new HintValueContext()); RouteUnit routeUnit = new RouteUnit(new RouteMapper("ds", "ds_0"), Collections.singletonList(new RouteMapper("tbl", "tbl_0"))); @@ -114,8 +120,8 @@ void assertRewriteWithGroupedParameterBuilderForRouteWithSameDataNode() { // TODO check why data node is "ds.tbl_0", not "ds_0.tbl_0" routeContext.getOriginalDataNodes().add(Collections.singletonList(new DataNode("ds.tbl_0"))); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getSql(), is("INSERT INTO tbl VALUES (?)")); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getParameters(), is(Collections.singletonList(1))); @@ -125,7 +131,9 @@ void assertRewriteWithGroupedParameterBuilderForRouteWithSameDataNode() { void assertRewriteWithGroupedParameterBuilderForRouteWithEmptyDataNode() { InsertStatementContext statementContext = mock(InsertStatementContext.class, RETURNS_DEEP_STUBS); when(((TableAvailable) statementContext).getTablesContext().getDatabaseName().isPresent()).thenReturn(false); + when(statementContext.getInsertSelectContext()).thenReturn(null); when(statementContext.getGroupedParameters()).thenReturn(Collections.singletonList(Collections.singletonList(1))); + when(statementContext.getOnDuplicateKeyUpdateParameters()).thenReturn(Collections.emptyList()); SQLRewriteContext sqlRewriteContext = new SQLRewriteContext(mockDatabase(), statementContext, "INSERT INTO tbl VALUES (?)", Collections.singletonList(1), mock(ConnectionContext.class), new HintValueContext()); RouteUnit routeUnit = new RouteUnit(new RouteMapper("ds", "ds_0"), Collections.singletonList(new RouteMapper("tbl", "tbl_0"))); @@ -133,8 +141,8 @@ void assertRewriteWithGroupedParameterBuilderForRouteWithEmptyDataNode() { routeContext.getRouteUnits().add(routeUnit); routeContext.getOriginalDataNodes().add(Collections.emptyList()); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getSql(), is("INSERT INTO tbl VALUES (?)")); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getParameters(), is(Collections.singletonList(1))); @@ -154,8 +162,8 @@ void assertRewriteWithGroupedParameterBuilderForRouteWithNotSameDataNode() { routeContext.getRouteUnits().add(routeUnit); routeContext.getOriginalDataNodes().add(Collections.singletonList(new DataNode("ds_1.tbl_1"))); DatabaseType databaseType = mock(DatabaseType.class); - RouteSQLRewriteResult actual = new RouteSQLRewriteEngine(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, Collections.singletonMap("ds_0", databaseType)) - .rewrite(sqlRewriteContext, routeContext); + RouteSQLRewriteResult actual = new RouteSQLRewriteEngine( + new SQLTranslatorRule(new SQLTranslatorRuleConfiguration()), databaseType, mockStorageUnits(databaseType)).rewrite(sqlRewriteContext, routeContext); assertThat(actual.getSqlRewriteUnits().size(), is(1)); assertThat(actual.getSqlRewriteUnits().get(routeUnit).getSql(), is("INSERT INTO tbl VALUES (?)")); assertTrue(actual.getSqlRewriteUnits().get(routeUnit).getParameters().isEmpty()); @@ -167,4 +175,10 @@ private ShardingSphereDatabase mockDatabase() { when(result.getSchemas()).thenReturn(Collections.singletonMap("test", mock(ShardingSphereSchema.class))); return result; } + + private Map mockStorageUnits(final DatabaseType databaseType) { + StorageUnit result = mock(StorageUnit.class); + when(result.getStorageType()).thenReturn(databaseType); + return Collections.singletonMap("ds_0", result); + } } diff --git a/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/AllSQLRouteExecutor.java b/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/AllSQLRouteExecutor.java index ad5381e5689a3..cc22cbd17fcf5 100644 --- a/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/AllSQLRouteExecutor.java +++ b/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/AllSQLRouteExecutor.java @@ -36,7 +36,7 @@ public final class AllSQLRouteExecutor implements SQLRouteExecutor { @Override public RouteContext route(final ConnectionContext connectionContext, final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database) { RouteContext result = new RouteContext(); - for (String each : database.getResourceMetaData().getDataSources().keySet()) { + for (String each : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet()) { result.getRouteUnits().add(new RouteUnit(new RouteMapper(each, each), Collections.emptyList())); } return result; diff --git a/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutor.java b/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutor.java index e755297f9cf24..27df0a9d3b4fe 100644 --- a/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutor.java +++ b/infra/route/src/main/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutor.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.hint.SQLHintDataSourceNotExistsException; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.route.SQLRouter; import org.apache.shardingsphere.infra.route.context.RouteContext; @@ -33,7 +34,6 @@ import org.apache.shardingsphere.infra.session.query.QueryContext; import org.apache.shardingsphere.infra.spi.type.ordered.OrderedSPILoader; -import javax.sql.DataSource; import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -59,7 +59,7 @@ public PartialSQLRouteExecutor(final Collection rules, final @SuppressWarnings({"unchecked", "rawtypes"}) public RouteContext route(final ConnectionContext connectionContext, final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database) { RouteContext result = new RouteContext(); - Optional dataSourceName = findDataSourceByHint(queryContext.getHintValueContext(), database.getResourceMetaData().getDataSources()); + Optional dataSourceName = findDataSourceByHint(queryContext.getHintValueContext(), database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()); if (dataSourceName.isPresent()) { result.getRouteUnits().add(new RouteUnit(new RouteMapper(dataSourceName.get(), dataSourceName.get()), Collections.emptyList())); return result; @@ -71,21 +71,16 @@ public RouteContext route(final ConnectionContext connectionContext, final Query entry.getValue().decorateRouteContext(result, queryContext, database, entry.getKey(), props, connectionContext); } } - if (result.getRouteUnits().isEmpty() && 1 == database.getResourceMetaData().getDataSources().size()) { - String singleDataSourceName = database.getResourceMetaData().getDataSources().keySet().iterator().next(); + if (result.getRouteUnits().isEmpty() && 1 == database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()) { + String singleDataSourceName = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet().iterator().next(); result.getRouteUnits().add(new RouteUnit(new RouteMapper(singleDataSourceName, singleDataSourceName), Collections.emptyList())); } return result; } - private Optional findDataSourceByHint(final HintValueContext hintValueContext, final Map dataSources) { - Optional result; - if (HintManager.isInstantiated() && HintManager.getDataSourceName().isPresent()) { - result = HintManager.getDataSourceName(); - } else { - result = hintValueContext.findHintDataSourceName(); - } - if (result.isPresent() && !dataSources.containsKey(result.get())) { + private Optional findDataSourceByHint(final HintValueContext hintValueContext, final Map storageUnits) { + Optional result = HintManager.isInstantiated() && HintManager.getDataSourceName().isPresent() ? HintManager.getDataSourceName() : hintValueContext.findHintDataSourceName(); + if (result.isPresent() && !storageUnits.containsKey(result.get())) { throw new SQLHintDataSourceNotExistsException(result.get()); } return result; diff --git a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/AllSQLRouteExecutorTest.java b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/AllSQLRouteExecutorTest.java index 00c213bd5b3b5..cce673b123904 100644 --- a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/AllSQLRouteExecutorTest.java +++ b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/AllSQLRouteExecutorTest.java @@ -42,7 +42,7 @@ class AllSQLRouteExecutorTest { void assertRouteSuccess() { String name = "test"; ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(database.getResourceMetaData().getDataSources().keySet()).thenReturn(Stream.of(name).collect(Collectors.toSet())); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet()).thenReturn(Stream.of(name).collect(Collectors.toSet())); AllSQLRouteExecutor allSQLRouteExecutor = new AllSQLRouteExecutor(); RouteContext actual = allSQLRouteExecutor.route(new ConnectionContext(), mock(QueryContext.class), mock(RuleMetaData.class), database); assertThat(actual.getRouteUnits().size(), is(1)); diff --git a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutorTest.java b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutorTest.java index 03f58de8a9e5d..66ce7718922ff 100644 --- a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutorTest.java +++ b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/engine/impl/PartialSQLRouteExecutorTest.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.hint.SQLHintDataSourceNotExistsException; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.route.context.RouteContext; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; @@ -34,7 +35,6 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import javax.sql.DataSource; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -65,10 +65,10 @@ class PartialSQLRouteExecutorTest { @BeforeEach void setup() { - Map dataSourceMap = new HashMap<>(); - dataSourceMap.put("ds_0", null); - dataSourceMap.put("ds_1", null); - when(database.getResourceMetaData().getDataSources()).thenReturn(dataSourceMap); + Map storageUnits = new HashMap<>(); + storageUnits.put("ds_0", null); + storageUnits.put("ds_1", null); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); } @Test diff --git a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteFailureRuleFixture.java b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteFailureRuleFixture.java index 123c1f4b38429..3e0c24fea42ac 100644 --- a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteFailureRuleFixture.java +++ b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteFailureRuleFixture.java @@ -28,9 +28,4 @@ public final class RouteFailureRuleFixture implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return RouteFailureRuleFixture.class.getSimpleName(); - } } diff --git a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteRuleFixture.java b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteRuleFixture.java index 5b7552395be73..edb1c534dcf4f 100644 --- a/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteRuleFixture.java +++ b/infra/route/src/test/java/org/apache/shardingsphere/infra/route/fixture/rule/RouteRuleFixture.java @@ -28,9 +28,4 @@ public final class RouteRuleFixture implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return RouteRuleFixture.class.getSimpleName(); - } } diff --git a/infra/session/src/main/java/org/apache/shardingsphere/infra/session/connection/ConnectionContext.java b/infra/session/src/main/java/org/apache/shardingsphere/infra/session/connection/ConnectionContext.java index 2e5c7fc6b29ef..02692a8a04350 100644 --- a/infra/session/src/main/java/org/apache/shardingsphere/infra/session/connection/ConnectionContext.java +++ b/infra/session/src/main/java/org/apache/shardingsphere/infra/session/connection/ConnectionContext.java @@ -27,6 +27,7 @@ import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Optional; /** @@ -43,6 +44,8 @@ public final class ConnectionContext implements AutoCloseable { @Getter(AccessLevel.NONE) private final UsedDataSourceProvider usedDataSourceProvider; + private String databaseName; + @Setter private String trafficInstanceId; @@ -56,7 +59,11 @@ public ConnectionContext() { * @return used data source names */ public Collection getUsedDataSourceNames() { - return usedDataSourceProvider.getNames(); + Collection result = new HashSet<>(usedDataSourceProvider.getNames().size(), 1L); + for (String each : usedDataSourceProvider.getNames()) { + result.add(each.contains(".") ? each.split("\\.")[1] : each); + } + return result; } /** @@ -82,6 +89,26 @@ public void clearTransactionConnectionContext() { transactionContext.close(); } + /** + * Set current database name. + * + * @param databaseName database name + */ + public void setCurrentDatabase(final String databaseName) { + if (null != databaseName && !databaseName.equals(this.databaseName)) { + this.databaseName = databaseName; + } + } + + /** + * Get database name. + * + * @return database name + */ + public Optional getDatabaseName() { + return Optional.ofNullable(databaseName); + } + @Override public void close() { trafficInstanceId = null; diff --git a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/json/JsonUtils.java b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/json/JsonUtils.java index c230a8693493f..4f45b9376725d 100644 --- a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/json/JsonUtils.java +++ b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/json/JsonUtils.java @@ -19,6 +19,7 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; @@ -50,26 +51,39 @@ private static ObjectMapper initDefaultMapper() { } /** - * Parse data to json string. + * Serialize object as json string. * - * @param data data + * @param obj object * @return json string */ @SneakyThrows(JsonProcessingException.class) - public static String toJsonString(final Object data) { - return MAPPER.writeValueAsString(data); + public static String toJsonString(final Object obj) { + return MAPPER.writeValueAsString(obj); } /** - * Deserialize to Object from json string. + * Deserialize from json string to object. * * @param value json string - * @param clazz target Object - * @param the type of return Object data - * @return target Object data + * @param clazz target object type + * @param the type of target object + * @return object */ @SneakyThrows(JsonProcessingException.class) - public static T readValue(final String value, final Class clazz) { + public static T fromJsonString(final String value, final Class clazz) { return MAPPER.readValue(value, clazz); } + + /** + * Deserialize from json string to object. + * + * @param value json string + * @param typeReference target object type reference + * @param the type of target object + * @return object + */ + @SneakyThrows(JsonProcessingException.class) + public static T fromJsonString(final String value, final TypeReference typeReference) { + return MAPPER.readValue(value, typeReference); + } } diff --git a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/reflection/ReflectionUtils.java b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/reflection/ReflectionUtils.java index de338d0bc5b91..c99d85ede5c58 100644 --- a/infra/util/src/main/java/org/apache/shardingsphere/infra/util/reflection/ReflectionUtils.java +++ b/infra/util/src/main/java/org/apache/shardingsphere/infra/util/reflection/ReflectionUtils.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.infra.util.reflection; +import com.google.common.base.CaseFormat; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.SneakyThrows; @@ -31,6 +32,8 @@ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class ReflectionUtils { + private static final String GETTER_PREFIX = "get"; + /** * Get field value. * @@ -134,4 +137,35 @@ public static T invokeMethod(final Method method, final Object target, final } return result; } + + /** + * Get field value by get method. + * + * @param target target + * @param fieldName field name + * @param type of field value + * @return field value + */ + public static Optional getFieldValueByGetMethod(final Object target, final String fieldName) { + String getterName = GETTER_PREFIX + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, fieldName); + final Optional method = findMethod(target.getClass(), getterName); + if (method.isPresent()) { + T value = invokeMethod(method.get(), target); + return Optional.ofNullable(value); + } else { + return Optional.empty(); + } + } + + private static Optional findMethod(final Class clazz, final String methodName, final Class... parameterTypes) { + try { + return Optional.of(clazz.getMethod(methodName, parameterTypes)); + } catch (final NoSuchMethodException ex) { + Class superclass = clazz.getSuperclass(); + if (null != superclass && Object.class != superclass) { + return findMethod(superclass, methodName, parameterTypes); + } + } + return Optional.empty(); + } } diff --git a/jdbc/core/pom.xml b/jdbc/core/pom.xml index 54ba0c104385a..bae493cf8ccf2 100644 --- a/jdbc/core/pom.xml +++ b/jdbc/core/pom.xml @@ -139,11 +139,6 @@ shardingsphere-encrypt-core ${project.version} - - org.apache.shardingsphere - shardingsphere-encrypt-sm - ${project.version} - org.apache.shardingsphere shardingsphere-mask-core diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/api/yaml/YamlShardingSphereDataSourceFactory.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/api/yaml/YamlShardingSphereDataSourceFactory.java index 6c74393d90d07..a89aa183ddb64 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/api/yaml/YamlShardingSphereDataSourceFactory.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/api/yaml/YamlShardingSphereDataSourceFactory.java @@ -167,31 +167,31 @@ private static DataSource createDataSource(final Map dataSou return ShardingSphereDataSourceFactory.createDataSource(jdbcConfig.getDatabaseName(), modeConfig, dataSourceMap, ruleConfigs, jdbcConfig.getProps()); } - private static void rebuildGlobalRuleConfiguration(final YamlJDBCConfiguration jdbcConfiguration) { - jdbcConfiguration.getRules().removeIf(YamlGlobalRuleConfiguration.class::isInstance); - if (null != jdbcConfiguration.getAuthority()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getAuthority()); + private static void rebuildGlobalRuleConfiguration(final YamlJDBCConfiguration jdbcConfig) { + jdbcConfig.getRules().removeIf(YamlGlobalRuleConfiguration.class::isInstance); + if (null != jdbcConfig.getAuthority()) { + jdbcConfig.getRules().add(jdbcConfig.getAuthority()); } - if (null != jdbcConfiguration.getTransaction()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getTransaction()); + if (null != jdbcConfig.getTransaction()) { + jdbcConfig.getRules().add(jdbcConfig.getTransaction()); } - if (null != jdbcConfiguration.getGlobalClock()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getGlobalClock()); + if (null != jdbcConfig.getGlobalClock()) { + jdbcConfig.getRules().add(jdbcConfig.getGlobalClock()); } - if (null != jdbcConfiguration.getSqlParser()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getSqlParser()); + if (null != jdbcConfig.getSqlParser()) { + jdbcConfig.getRules().add(jdbcConfig.getSqlParser()); } - if (null != jdbcConfiguration.getSqlTranslator()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getSqlTranslator()); + if (null != jdbcConfig.getSqlTranslator()) { + jdbcConfig.getRules().add(jdbcConfig.getSqlTranslator()); } - if (null != jdbcConfiguration.getTraffic()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getTraffic()); + if (null != jdbcConfig.getTraffic()) { + jdbcConfig.getRules().add(jdbcConfig.getTraffic()); } - if (null != jdbcConfiguration.getLogging()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getLogging()); + if (null != jdbcConfig.getLogging()) { + jdbcConfig.getRules().add(jdbcConfig.getLogging()); } - if (null != jdbcConfiguration.getSqlFederation()) { - jdbcConfiguration.getRules().add(jdbcConfiguration.getSqlFederation()); + if (null != jdbcConfig.getSqlFederation()) { + jdbcConfig.getRules().add(jdbcConfig.getSqlFederation()); } } } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/executor/DriverJDBCExecutor.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/executor/DriverJDBCExecutor.java index 8ab8af641c663..5ee5584e95758 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/executor/DriverJDBCExecutor.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/executor/DriverJDBCExecutor.java @@ -115,7 +115,7 @@ private boolean isNeedAccumulate(final Collection rules, fin private int accumulate(final List updateResults) { int result = 0; for (Integer each : updateResults) { - result += null != each ? each : 0; + result += null == each ? 0 : each; } return result; } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java index ec68ac680594e..c1f56fc7f2c85 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManager.java @@ -26,13 +26,14 @@ import org.apache.shardingsphere.driver.jdbc.adapter.invocation.MethodInvocationRecorder; import org.apache.shardingsphere.driver.jdbc.core.ShardingSphereSavepoint; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.OverallConnectionNotEnoughException; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.executor.sql.prepare.driver.DatabaseConnectionManager; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.InstanceType; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.session.connection.transaction.TransactionConnectionContext; @@ -54,6 +55,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.Random; @@ -66,6 +68,8 @@ public final class DriverDatabaseConnectionManager implements DatabaseConnection private final Map physicalDataSourceMap = new LinkedHashMap<>(); + private final Map trafficDataSourceMap = new LinkedHashMap<>(); + @Getter private final ConnectionTransaction connectionTransaction; @@ -80,12 +84,27 @@ public final class DriverDatabaseConnectionManager implements DatabaseConnection @Getter private final ConnectionContext connectionContext; + private final ContextManager contextManager; + + private final String databaseName; + public DriverDatabaseConnectionManager(final String databaseName, final ContextManager contextManager) { - dataSourceMap.putAll(contextManager.getDataSourceMap(databaseName)); - dataSourceMap.putAll(getTrafficDataSourceMap(databaseName, contextManager)); - physicalDataSourceMap.putAll(contextManager.getDataSourceMap(databaseName)); - connectionTransaction = createConnectionTransaction(databaseName, contextManager); + for (Entry entry : contextManager.getStorageUnits(databaseName).entrySet()) { + DataSource dataSource = entry.getValue().getDataSource(); + String cacheKey = getKey(databaseName, entry.getKey()); + dataSourceMap.put(cacheKey, dataSource); + physicalDataSourceMap.put(cacheKey, dataSource); + } + for (Entry entry : getTrafficDataSourceMap(databaseName, contextManager).entrySet()) { + String cacheKey = getKey(databaseName, entry.getKey()); + dataSourceMap.put(cacheKey, entry.getValue()); + trafficDataSourceMap.put(cacheKey, entry.getValue()); + } + connectionTransaction = createConnectionTransaction(contextManager); connectionContext = new ConnectionContext(cachedConnections::keySet); + connectionContext.setCurrentDatabase(databaseName); + this.contextManager = contextManager; + this.databaseName = databaseName; } private Map getTrafficDataSourceMap(final String databaseName, final ContextManager contextManager) { @@ -95,31 +114,31 @@ private Map getTrafficDataSourceMap(final String databaseNam } MetaDataBasedPersistService persistService = contextManager.getMetaDataContexts().getPersistService(); String actualDatabaseName = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(); - Map dataSourcePropsMap = persistService.getDataSourceUnitService().load(actualDatabaseName); - Preconditions.checkState(!dataSourcePropsMap.isEmpty(), "Can not get data source properties from meta data."); - DataSourceProperties dataSourcePropsSample = dataSourcePropsMap.values().iterator().next(); + Map propsMap = persistService.getDataSourceUnitService().load(actualDatabaseName); + Preconditions.checkState(!propsMap.isEmpty(), "Can not get data source properties from meta data."); + DataSourcePoolProperties propsSample = propsMap.values().iterator().next(); Collection users = persistService.getGlobalRuleService().loadUsers(); Collection instances = contextManager.getInstanceContext().getAllClusterInstances(InstanceType.PROXY, rule.getLabels()); - return DataSourcePoolCreator.create(createDataSourcePropertiesMap(instances, users, dataSourcePropsSample, actualDatabaseName)); + return DataSourcePoolCreator.create(createDataSourcePoolPropertiesMap(instances, users, propsSample, actualDatabaseName), true); } - private Map createDataSourcePropertiesMap(final Collection instances, final Collection users, - final DataSourceProperties dataSourcePropsSample, final String schema) { - Map result = new LinkedHashMap<>(); + private Map createDataSourcePoolPropertiesMap(final Collection instances, final Collection users, + final DataSourcePoolProperties propsSample, final String schema) { + Map result = new LinkedHashMap<>(); for (InstanceMetaData each : instances) { - result.put(each.getId(), createDataSourceProperties((ProxyInstanceMetaData) each, users, dataSourcePropsSample, schema)); + result.put(each.getId(), createDataSourcePoolProperties((ProxyInstanceMetaData) each, users, propsSample, schema)); } return result; } - private DataSourceProperties createDataSourceProperties(final ProxyInstanceMetaData instanceMetaData, final Collection users, - final DataSourceProperties dataSourcePropsSample, final String schema) { - Map props = dataSourcePropsSample.getAllLocalProperties(); + private DataSourcePoolProperties createDataSourcePoolProperties(final ProxyInstanceMetaData instanceMetaData, final Collection users, + final DataSourcePoolProperties propsSample, final String schema) { + Map props = propsSample.getAllLocalProperties(); props.put("jdbcUrl", createJdbcUrl(instanceMetaData, schema, props)); ShardingSphereUser user = users.iterator().next(); props.put("username", user.getGrantee().getUsername()); props.put("password", user.getPassword()); - return new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", props); + return new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props); } private String createJdbcUrl(final ProxyInstanceMetaData instanceMetaData, final String schema, final Map props) { @@ -129,9 +148,9 @@ private String createJdbcUrl(final ProxyInstanceMetaData instanceMetaData, final return String.format("%s//%s:%s/%s%s", jdbcUrlPrefix, instanceMetaData.getIp(), instanceMetaData.getPort(), schema, jdbcUrlSuffix); } - private ConnectionTransaction createConnectionTransaction(final String databaseName, final ContextManager contextManager) { + private ConnectionTransaction createConnectionTransaction(final ContextManager contextManager) { TransactionRule rule = contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getSingleRule(TransactionRule.class); - return new ConnectionTransaction(databaseName, rule); + return new ConnectionTransaction(rule); } /** @@ -294,9 +313,13 @@ public boolean isValid(final int timeout) throws SQLException { * @return random physical data source name */ public String getRandomPhysicalDataSourceName() { + return getRandomPhysicalDatabaseAndDataSourceName()[1]; + } + + private String[] getRandomPhysicalDatabaseAndDataSourceName() { Collection cachedPhysicalDataSourceNames = Sets.intersection(physicalDataSourceMap.keySet(), cachedConnections.keySet()); - Collection datasourceNames = cachedPhysicalDataSourceNames.isEmpty() ? physicalDataSourceMap.keySet() : cachedPhysicalDataSourceNames; - return new ArrayList<>(datasourceNames).get(random.nextInt(datasourceNames.size())); + Collection databaseAndDatasourceNames = cachedPhysicalDataSourceNames.isEmpty() ? physicalDataSourceMap.keySet() : cachedPhysicalDataSourceNames; + return new ArrayList<>(databaseAndDatasourceNames).get(random.nextInt(databaseAndDatasourceNames.size())).split("\\."); } /** @@ -306,61 +329,73 @@ public String getRandomPhysicalDataSourceName() { * @throws SQLException SQL exception */ public Connection getRandomConnection() throws SQLException { - return getConnections(getRandomPhysicalDataSourceName(), 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); + String[] databaseAndDataSourceName = getRandomPhysicalDatabaseAndDataSourceName(); + return getConnections(databaseAndDataSourceName[0], databaseAndDataSourceName[1], 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); } @Override public List getConnections(final String dataSourceName, final int connectionOffset, final int connectionSize, final ConnectionMode connectionMode) throws SQLException { - DataSource dataSource = dataSourceMap.get(dataSourceName); + return getConnections(connectionContext.getDatabaseName().orElse(databaseName), dataSourceName, connectionOffset, connectionSize, connectionMode); + } + + private List getConnections(final String currentDatabaseName, final String dataSourceName, final int connectionOffset, final int connectionSize, + final ConnectionMode connectionMode) throws SQLException { + String cacheKey = getKey(currentDatabaseName, dataSourceName); + DataSource dataSource = databaseName.equals(currentDatabaseName) ? dataSourceMap.get(cacheKey) : contextManager.getStorageUnits(currentDatabaseName).get(dataSourceName).getDataSource(); Preconditions.checkNotNull(dataSource, "Missing the data source name: '%s'", dataSourceName); Collection connections; synchronized (cachedConnections) { - connections = cachedConnections.get(dataSourceName); + connections = cachedConnections.get(cacheKey); } List result; int maxConnectionSize = connectionOffset + connectionSize; if (connections.size() >= maxConnectionSize) { result = new ArrayList<>(connections).subList(connectionOffset, maxConnectionSize); } else if (connections.isEmpty()) { - Collection newConnections = createConnections(dataSourceName, dataSource, maxConnectionSize, connectionMode); + Collection newConnections = createConnections(currentDatabaseName, dataSourceName, dataSource, maxConnectionSize, connectionMode); result = new ArrayList<>(newConnections).subList(connectionOffset, maxConnectionSize); synchronized (cachedConnections) { - cachedConnections.putAll(dataSourceName, newConnections); + cachedConnections.putAll(cacheKey, newConnections); } } else { List allConnections = new ArrayList<>(maxConnectionSize); allConnections.addAll(connections); - Collection newConnections = createConnections(dataSourceName, dataSource, maxConnectionSize - connections.size(), connectionMode); + Collection newConnections = createConnections(currentDatabaseName, dataSourceName, dataSource, maxConnectionSize - connections.size(), connectionMode); allConnections.addAll(newConnections); result = allConnections.subList(connectionOffset, maxConnectionSize); synchronized (cachedConnections) { - cachedConnections.putAll(dataSourceName, newConnections); + cachedConnections.putAll(cacheKey, newConnections); } } return result; } + private String getKey(final String databaseName, final String dataSourceName) { + return databaseName.toLowerCase() + "." + dataSourceName; + } + @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") - private List createConnections(final String dataSourceName, final DataSource dataSource, final int connectionSize, final ConnectionMode connectionMode) throws SQLException { + private List createConnections(final String databaseName, final String dataSourceName, final DataSource dataSource, final int connectionSize, + final ConnectionMode connectionMode) throws SQLException { if (1 == connectionSize) { - Connection connection = createConnection(dataSourceName, dataSource, connectionContext.getTransactionContext()); + Connection connection = createConnection(databaseName, dataSourceName, dataSource, connectionContext.getTransactionContext()); methodInvocationRecorder.replay(connection); return Collections.singletonList(connection); } if (ConnectionMode.CONNECTION_STRICTLY == connectionMode) { - return createConnections(dataSourceName, dataSource, connectionSize, connectionContext.getTransactionContext()); + return createConnections(databaseName, dataSourceName, dataSource, connectionSize, connectionContext.getTransactionContext()); } synchronized (dataSource) { - return createConnections(dataSourceName, dataSource, connectionSize, connectionContext.getTransactionContext()); + return createConnections(databaseName, dataSourceName, dataSource, connectionSize, connectionContext.getTransactionContext()); } } - private List createConnections(final String dataSourceName, final DataSource dataSource, final int connectionSize, + private List createConnections(final String databaseName, final String dataSourceName, final DataSource dataSource, final int connectionSize, final TransactionConnectionContext transactionConnectionContext) throws SQLException { List result = new ArrayList<>(connectionSize); for (int i = 0; i < connectionSize; i++) { try { - Connection connection = createConnection(dataSourceName, dataSource, transactionConnectionContext); + Connection connection = createConnection(databaseName, dataSourceName, dataSource, transactionConnectionContext); methodInvocationRecorder.replay(connection); result.add(connection); } catch (final SQLException ignored) { @@ -373,13 +408,15 @@ private List createConnections(final String dataSourceName, final Da return result; } - private Connection createConnection(final String dataSourceName, final DataSource dataSource, final TransactionConnectionContext transactionConnectionContext) throws SQLException { - Optional connectionInTransaction = isRawJdbcDataSource(dataSourceName) ? connectionTransaction.getConnection(dataSourceName, transactionConnectionContext) : Optional.empty(); + private Connection createConnection(final String databaseName, final String dataSourceName, final DataSource dataSource, + final TransactionConnectionContext transactionConnectionContext) throws SQLException { + Optional connectionInTransaction = + isRawJdbcDataSource(databaseName, dataSourceName) ? connectionTransaction.getConnection(databaseName, dataSourceName, transactionConnectionContext) : Optional.empty(); return connectionInTransaction.isPresent() ? connectionInTransaction.get() : dataSource.getConnection(); } - private boolean isRawJdbcDataSource(final String dataSourceName) { - return physicalDataSourceMap.containsKey(dataSourceName); + private boolean isRawJdbcDataSource(final String databaseName, final String dataSourceName) { + return !trafficDataSourceMap.containsKey(getKey(databaseName, dataSourceName)); } @Override diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSource.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSource.java index 7004f2f60752e..3cd1eef41b77b 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSource.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSource.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaDataBuilder; import org.apache.shardingsphere.infra.instance.metadata.InstanceType; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.manager.ContextManager; @@ -98,17 +99,11 @@ public Connection getConnection(final String username, final String password) { return getConnection(); } - /** - * Close data sources. - * - * @param dataSourceNames data source names to be closed - * @throws SQLException SQL exception - */ - // TODO Replace public to private? - public void close(final Collection dataSourceNames) throws SQLException { - Map dataSourceMap = contextManager.getDataSourceMap(databaseName); - for (String each : dataSourceNames) { - close(dataSourceMap.get(each)); + @Override + public void close() throws SQLException { + contextManagerDestroyedCallback(databaseName); + for (StorageUnit each : contextManager.getStorageUnits(databaseName).values()) { + close(each.getDataSource()); } contextManager.close(); } @@ -125,12 +120,6 @@ private void close(final DataSource dataSource) throws SQLException { } } - @Override - public void close() throws SQLException { - contextManagerDestroyedCallback(databaseName); - close(contextManager.getDataSourceMap(databaseName).keySet()); - } - private void contextManagerDestroyedCallback(final String databaseName) { for (ContextManagerLifecycleListener each : ShardingSphereServiceLoader.getServiceInstances(ContextManagerLifecycleListener.class)) { try { @@ -144,14 +133,14 @@ private void contextManagerDestroyedCallback(final String databaseName) { @Override public int getLoginTimeout() throws SQLException { - Map dataSourceMap = contextManager.getDataSourceMap(databaseName); - return dataSourceMap.isEmpty() ? 0 : dataSourceMap.values().iterator().next().getLoginTimeout(); + Map storageUnits = contextManager.getStorageUnits(databaseName); + return storageUnits.isEmpty() ? 0 : storageUnits.values().iterator().next().getDataSource().getLoginTimeout(); } @Override public void setLoginTimeout(final int seconds) throws SQLException { - for (DataSource each : contextManager.getDataSourceMap(databaseName).values()) { - each.setLoginTimeout(seconds); + for (StorageUnit each : contextManager.getStorageUnits(databaseName).values()) { + each.getDataSource().setLoginTimeout(seconds); } } } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java index e3771090da887..4cf8b2dbec1b7 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaData.java @@ -224,13 +224,13 @@ private ResultSet createDatabaseMetaDataResultSet(final ResultSet resultSet) thr private String getActualCatalog(final String catalog) { ConnectionProperties connectionProps = connection.getContextManager() .getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getConnectionProperties(getDataSourceName()); - return null != catalog && catalog.contains(DefaultDatabase.LOGIC_NAME) ? connectionProps.getCatalog() : catalog; + return null == catalog || !catalog.contains(DefaultDatabase.LOGIC_NAME) ? catalog : connectionProps.getCatalog(); } private String getActualSchema(final String schema) { ConnectionProperties connectionProps = connection.getContextManager() .getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getConnectionProperties(getDataSourceName()); - return null != schema && schema.contains(DefaultDatabase.LOGIC_NAME) ? connectionProps.getSchema() : schema; + return null == schema || !schema.contains(DefaultDatabase.LOGIC_NAME) ? schema : connectionProps.getSchema(); } private String getDataSourceName() { diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/DriverDataSourceCache.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/DriverDataSourceCache.java index 6bc221bda34cd..0550bc8fc8a0e 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/DriverDataSourceCache.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/DriverDataSourceCache.java @@ -49,7 +49,7 @@ public DataSource get(final String url, final String urlPrefix) { @SuppressWarnings("unchecked") private DataSource createDataSource(final String url, final String urlPrefix) throws T { try { - return YamlShardingSphereDataSourceFactory.createDataSource(ShardingSphereDriverURLManager.getContent(url, urlPrefix)); + return YamlShardingSphereDataSourceFactory.createDataSource(ShardingSphereURLManager.getContent(url, urlPrefix)); } catch (final IOException ex) { throw (T) new SQLException(ex); } catch (final SQLException ex) { diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManager.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManager.java similarity index 74% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManager.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManager.java index 01d9f06498f87..10deed6cb25fb 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManager.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManager.java @@ -19,14 +19,14 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.driver.jdbc.exception.syntax.DriverURLProviderNotFoundException; +import org.apache.shardingsphere.driver.jdbc.exception.syntax.URLProviderNotFoundException; import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; /** - * ShardingSphere driver URL manager. + * ShardingSphere URL manager. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class ShardingSphereDriverURLManager { +public final class ShardingSphereURLManager { /** * Get config content from URL. @@ -34,14 +34,14 @@ public final class ShardingSphereDriverURLManager { * @param url driver URL * @param urlPrefix url prefix * @return configuration content - * @throws DriverURLProviderNotFoundException driver URL provider not found exception + * @throws URLProviderNotFoundException driver URL provider not found exception */ public static byte[] getContent(final String url, final String urlPrefix) { - for (ShardingSphereDriverURLProvider each : ShardingSphereServiceLoader.getServiceInstances(ShardingSphereDriverURLProvider.class)) { + for (ShardingSphereURLProvider each : ShardingSphereServiceLoader.getServiceInstances(ShardingSphereURLProvider.class)) { if (each.accept(url)) { return each.getContent(url, urlPrefix); } } - throw new DriverURLProviderNotFoundException(url); + throw new URLProviderNotFoundException(url); } } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLProvider.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLProvider.java similarity index 93% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLProvider.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLProvider.java index 6237b6cffb063..0529181e2607e 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLProvider.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLProvider.java @@ -18,9 +18,9 @@ package org.apache.shardingsphere.driver.jdbc.core.driver; /** - * ShardingSphere driver URL provider. + * ShardingSphere URL provider. */ -public interface ShardingSphereDriverURLProvider { +public interface ShardingSphereURLProvider { /** * Check if the url is suitable for this provider. diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathDriverURLProvider.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathURLProvider.java similarity index 87% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathDriverURLProvider.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathURLProvider.java index f5b5ca30c33d2..3d95c8fda88ce 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathDriverURLProvider.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/AbsolutePathURLProvider.java @@ -18,9 +18,9 @@ package org.apache.shardingsphere.driver.jdbc.core.driver.spi; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import lombok.SneakyThrows; -import org.apache.commons.lang3.StringUtils; -import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider; +import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider; import java.io.BufferedReader; import java.io.File; @@ -31,15 +31,15 @@ import java.nio.file.Files; /** - * Absolute path driver URL provider. + * Absolute path URL provider. */ -public final class AbsolutePathDriverURLProvider implements ShardingSphereDriverURLProvider { +public final class AbsolutePathURLProvider implements ShardingSphereURLProvider { private static final String PATH_TYPE = "absolutepath:"; @Override public boolean accept(final String url) { - return StringUtils.isNotBlank(url) && url.contains(PATH_TYPE); + return !Strings.isNullOrEmpty(url) && url.contains(PATH_TYPE); } @Override @@ -47,7 +47,7 @@ public boolean accept(final String url) { public byte[] getContent(final String url, final String urlPrefix) { String configuredFile = url.substring(urlPrefix.length(), url.contains("?") ? url.indexOf('?') : url.length()); String file = configuredFile.substring(PATH_TYPE.length()); - Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere driver URL."); + Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere URL."); try ( InputStream stream = Files.newInputStream(new File(file).toPath()); BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloDriverURLProvider.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloURLProvider.java similarity index 85% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloDriverURLProvider.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloURLProvider.java index 751b618e74bcb..f267cf12f72a7 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloDriverURLProvider.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ApolloURLProvider.java @@ -21,28 +21,28 @@ import com.ctrip.framework.apollo.ConfigService; import com.ctrip.framework.apollo.core.enums.ConfigFileFormat; import com.google.common.base.Preconditions; -import org.apache.commons.lang3.StringUtils; -import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider; +import com.google.common.base.Strings; +import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider; import java.nio.charset.StandardCharsets; /** - * Apollo driver URL provider. + * Apollo URL provider. */ -public final class ApolloDriverURLProvider implements ShardingSphereDriverURLProvider { +public final class ApolloURLProvider implements ShardingSphereURLProvider { private static final String APOLLO_TYPE = "apollo:"; @Override public boolean accept(final String url) { - return StringUtils.isNotBlank(url) && url.contains(APOLLO_TYPE); + return !Strings.isNullOrEmpty(url) && url.contains(APOLLO_TYPE); } @Override public byte[] getContent(final String url, final String urlPrefix) { String configPath = url.substring(urlPrefix.length(), url.contains("?") ? url.indexOf('?') : url.length()); String namespace = configPath.substring(APOLLO_TYPE.length()); - Preconditions.checkArgument(!namespace.isEmpty(), "Apollo namespace is required in ShardingSphere driver URL."); + Preconditions.checkArgument(!namespace.isEmpty(), "Apollo namespace is required in ShardingSphere URL."); ConfigFile configFile = ConfigService.getConfigFile(namespace, ConfigFileFormat.YAML); return configFile.getContent().getBytes(StandardCharsets.UTF_8); } diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathDriverURLProvider.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathURLProvider.java similarity index 89% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathDriverURLProvider.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathURLProvider.java index c4a5fa56b6ce8..d34e395c37c62 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathDriverURLProvider.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/driver/spi/ClasspathURLProvider.java @@ -18,9 +18,9 @@ package org.apache.shardingsphere.driver.jdbc.core.driver.spi; import com.google.common.base.Preconditions; +import com.google.common.base.Strings; import lombok.SneakyThrows; -import org.apache.commons.lang3.StringUtils; -import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider; +import org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider; import java.io.BufferedReader; import java.io.IOException; @@ -29,15 +29,15 @@ import java.nio.charset.StandardCharsets; /** - * Classpath driver URL provider. + * Classpath URL provider. */ -public final class ClasspathDriverURLProvider implements ShardingSphereDriverURLProvider { +public final class ClasspathURLProvider implements ShardingSphereURLProvider { private static final String CLASSPATH_TYPE = "classpath:"; @Override public boolean accept(final String url) { - return StringUtils.isNotBlank(url) && url.contains(CLASSPATH_TYPE); + return !Strings.isNullOrEmpty(url) && url.contains(CLASSPATH_TYPE); } @Override @@ -45,7 +45,7 @@ public boolean accept(final String url) { public byte[] getContent(final String url, final String urlPrefix) { String configuredFile = url.substring(urlPrefix.length(), url.contains("?") ? url.indexOf('?') : url.length()); String file = configuredFile.substring(CLASSPATH_TYPE.length()); - Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere driver URL."); + Preconditions.checkArgument(!file.isEmpty(), "Configuration file is required in ShardingSphere URL."); try ( InputStream stream = getResourceAsStream(file); BufferedReader reader = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))) { diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSpherePreparedStatement.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSpherePreparedStatement.java index d0d4daa6b926c..c2bba63790112 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSpherePreparedStatement.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSpherePreparedStatement.java @@ -161,6 +161,8 @@ public final class ShardingSpherePreparedStatement extends AbstractPreparedState private final HintValueContext hintValueContext; + private ResultSet currentBatchGeneratedKeysResultSet; + public ShardingSpherePreparedStatement(final ShardingSphereConnection connection, final String sql) throws SQLException { this(connection, sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT, false, null); } @@ -195,11 +197,11 @@ private ShardingSpherePreparedStatement(final ShardingSphereConnection connectio this.sql = sqlParserRule.isSqlCommentParseEnabled() ? sql : SQLHintUtils.removeHint(sql); statements = new ArrayList<>(); parameterSets = new ArrayList<>(); - DatabaseType protocolType = metaDataContexts.getMetaData().getDatabase(connection.getDatabaseName()).getProtocolType(); - SQLParserEngine sqlParserEngine = sqlParserRule.getSQLParserEngine(protocolType.getTrunkDatabaseType().orElse(protocolType)); + SQLParserEngine sqlParserEngine = sqlParserRule.getSQLParserEngine(getDatabaseType(connection)); sqlStatement = sqlParserEngine.parse(this.sql, true); - sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), connection.getDatabaseName()).bind(sqlStatement, Collections.emptyList()); + sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), connection.getDatabaseName(), hintValueContext).bind(sqlStatement, Collections.emptyList()); databaseName = sqlStatementContext.getTablesContext().getDatabaseName().orElse(connection.getDatabaseName()); + connection.getDatabaseConnectionManager().getConnectionContext().setCurrentDatabase(databaseName); parameterMetaData = new ShardingSphereParameterMetaData(sqlStatement); statementOption = returnGeneratedKeys ? new StatementOption(true, columns) : new StatementOption(resultSetType, resultSetConcurrency, resultSetHoldability); executor = new DriverExecutor(connection); @@ -212,6 +214,11 @@ private ShardingSpherePreparedStatement(final ShardingSphereConnection connectio statementManager = new StatementManager(); } + private DatabaseType getDatabaseType(final ShardingSphereConnection connection) { + DatabaseType protocolType = metaDataContexts.getMetaData().getDatabase(connection.getDatabaseName()).getProtocolType(); + return protocolType.getTrunkDatabaseType().orElse(protocolType); + } + private boolean isStatementsCacheable(final RuleMetaData databaseRuleMetaData) { return databaseRuleMetaData.findRules(StorageConnectorReusableRule.class).size() == databaseRuleMetaData.getRules().size() && !HintManager.isInstantiated(); } @@ -264,7 +271,7 @@ private JDBCExecutionUnit createTrafficExecutionUnit(final String trafficInstanc DriverExecutionPrepareEngine prepareEngine = createDriverExecutionPrepareEngine(); ExecutionUnit executionUnit = new ExecutionUnit(trafficInstanceId, new SQLUnit(queryContext.getSql(), queryContext.getParameters())); ExecutionGroupContext context = - prepareEngine.prepare(new RouteContext(), Collections.singletonList(executionUnit), new ExecutionGroupReportContext(databaseName)); + prepareEngine.prepare(new RouteContext(), Collections.singleton(executionUnit), new ExecutionGroupReportContext(databaseName)); if (context.getInputGroups().isEmpty() || context.getInputGroups().iterator().next().getInputs().isEmpty()) { throw new EmptyTrafficExecutionUnitException(); } @@ -319,7 +326,7 @@ private DriverExecutionPrepareEngine createDriver int maxConnectionsSizePerQuery = metaDataContexts.getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY); return new DriverExecutionPrepareEngine<>(JDBCDriverType.PREPARED_STATEMENT, maxConnectionsSizePerQuery, connection.getDatabaseConnectionManager(), statementManager, statementOption, metaDataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules(), - metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData()); } @Override @@ -443,7 +450,7 @@ private boolean executeWithImplicitCommitTransaction() throws SQLException { result = useDriverToExecute(); connection.commit(); // CHECKSTYLE:OFF - } catch (final RuntimeException ex) { + } catch (final Exception ex) { // CHECKSTYLE:ON connection.rollback(); throw SQLExceptionTransformEngine.toSQLException(ex, metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType()); @@ -606,6 +613,9 @@ private Optional findGeneratedKey(final ExecutionContext ex @Override public ResultSet getGeneratedKeys() throws SQLException { + if (null != currentBatchGeneratedKeysResultSet) { + return currentBatchGeneratedKeysResultSet; + } Optional generatedKey = findGeneratedKey(executionContext); if (generatedKey.isPresent() && statementOption.isReturnGeneratedKeys() && !generatedValues.isEmpty()) { return new GeneratedKeysResultSet(getGeneratedKeysColumnName(generatedKey.get().getColumnName()), generatedValues.iterator(), this); @@ -629,7 +639,7 @@ public void addBatch() { try { QueryContext queryContext = createQueryContext(); trafficInstanceId = getInstanceIdAndSet(queryContext).orElse(null); - executionContext = null != trafficInstanceId ? createExecutionContext(queryContext, trafficInstanceId) : createExecutionContext(queryContext); + executionContext = null == trafficInstanceId ? createExecutionContext(queryContext) : createExecutionContext(queryContext, trafficInstanceId); batchPreparedStatementExecutor.addBatchForExecutionUnits(executionContext.getExecutionUnits()); } finally { currentResultSet = null; @@ -645,7 +655,16 @@ public int[] executeBatch() throws SQLException { try { // TODO add raw SQL executor initBatchPreparedStatementExecutor(); - return batchPreparedStatementExecutor.executeBatch(executionContext.getSqlStatementContext()); + int[] results = batchPreparedStatementExecutor.executeBatch(executionContext.getSqlStatementContext()); + if (statementOption.isReturnGeneratedKeys() && generatedValues.isEmpty()) { + List batchPreparedStatementExecutorStatements = batchPreparedStatementExecutor.getStatements(); + for (Statement statement : batchPreparedStatementExecutorStatements) { + statements.add((PreparedStatement) statement); + } + currentBatchGeneratedKeysResultSet = getGeneratedKeys(); + statements.clear(); + } + return results; // CHECKSTYLE:OFF } catch (final RuntimeException ex) { // CHECKSTYLE:ON @@ -660,7 +679,7 @@ private void initBatchPreparedStatementExecutor() throws SQLException { DriverExecutionPrepareEngine prepareEngine = new DriverExecutionPrepareEngine<>(JDBCDriverType.PREPARED_STATEMENT, metaDataContexts.getMetaData().getProps() .getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY), connection.getDatabaseConnectionManager(), statementManager, statementOption, metaDataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules(), - metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData()); List executionUnits = new ArrayList<>(batchPreparedStatementExecutor.getBatchExecutionUnits().size()); for (BatchExecutionUnit each : batchPreparedStatementExecutor.getBatchExecutionUnits()) { ExecutionUnit executionUnit = each.getExecutionUnit(); diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSphereStatement.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSphereStatement.java index a908328d1ab00..fe78fd1d84388 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSphereStatement.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/core/statement/ShardingSphereStatement.java @@ -20,7 +20,6 @@ import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.Getter; -import org.apache.shardingsphere.infra.exception.dialect.SQLExceptionTransformEngine; import org.apache.shardingsphere.driver.executor.DriverExecutor; import org.apache.shardingsphere.driver.executor.batch.BatchStatementExecutor; import org.apache.shardingsphere.driver.executor.callback.ExecuteCallback; @@ -31,17 +30,16 @@ import org.apache.shardingsphere.driver.jdbc.core.resultset.GeneratedKeysResultSet; import org.apache.shardingsphere.driver.jdbc.core.resultset.ShardingSphereResultSet; import org.apache.shardingsphere.driver.jdbc.exception.syntax.EmptySQLException; -import org.apache.shardingsphere.driver.jdbc.exception.transaction.JDBCTransactionAcrossDatabasesException; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.segment.insert.keygen.GeneratedKeyContext; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.SelectStatementContext; -import org.apache.shardingsphere.infra.binder.context.type.TableAvailable; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.connection.kernel.KernelProcessor; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; +import org.apache.shardingsphere.infra.exception.dialect.SQLExceptionTransformEngine; import org.apache.shardingsphere.infra.executor.audit.SQLAuditEngine; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroup; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupContext; @@ -164,7 +162,7 @@ public ResultSet executeQuery(final String sql) throws SQLException { try { QueryContext queryContext = createQueryContext(sql); databaseName = queryContext.getDatabaseNameFromSQLStatement().orElse(connection.getDatabaseName()); - checkSameDatabaseNameInTransaction(queryContext.getSqlStatementContext(), databaseName); + connection.getDatabaseConnectionManager().getConnectionContext().setCurrentDatabase(databaseName); trafficInstanceId = getInstanceIdAndSet(queryContext).orElse(null); if (null != trafficInstanceId) { JDBCExecutionUnit executionUnit = createTrafficExecutionUnit(trafficInstanceId, queryContext); @@ -240,7 +238,7 @@ private DriverExecutionPrepareEngine createDriver int maxConnectionsSizePerQuery = metaDataContexts.getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY); return new DriverExecutionPrepareEngine<>(JDBCDriverType.STATEMENT, maxConnectionsSizePerQuery, connection.getDatabaseConnectionManager(), statementManager, statementOption, metaDataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules(), - metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData()); } @Override @@ -313,7 +311,7 @@ private int executeUpdate(final ExecuteUpdateCallback updateCallback, final SQLS private int executeUpdate0(final String sql, final ExecuteUpdateCallback updateCallback, final TrafficExecutorCallback trafficCallback) throws SQLException { QueryContext queryContext = createQueryContext(sql); databaseName = queryContext.getDatabaseNameFromSQLStatement().orElse(connection.getDatabaseName()); - checkSameDatabaseNameInTransaction(queryContext.getSqlStatementContext(), databaseName); + connection.getDatabaseConnectionManager().getConnectionContext().setCurrentDatabase(databaseName); trafficInstanceId = getInstanceIdAndSet(queryContext).orElse(null); if (null != trafficInstanceId) { JDBCExecutionUnit executionUnit = createTrafficExecutionUnit(trafficInstanceId, queryContext); @@ -431,7 +429,7 @@ private boolean execute0(final String sql, final ExecuteCallback executeCallback try { QueryContext queryContext = createQueryContext(sql); databaseName = queryContext.getDatabaseNameFromSQLStatement().orElse(connection.getDatabaseName()); - checkSameDatabaseNameInTransaction(queryContext.getSqlStatementContext(), databaseName); + connection.getDatabaseConnectionManager().getConnectionContext().setCurrentDatabase(databaseName); trafficInstanceId = getInstanceIdAndSet(queryContext).orElse(null); if (null != trafficInstanceId) { JDBCExecutionUnit executionUnit = createTrafficExecutionUnit(trafficInstanceId, queryContext); @@ -454,19 +452,6 @@ private boolean execute0(final String sql, final ExecuteCallback executeCallback } } - private void checkSameDatabaseNameInTransaction(final SQLStatementContext sqlStatementContext, final String connectionDatabaseName) { - if (!connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()) { - return; - } - if (sqlStatementContext instanceof TableAvailable) { - ((TableAvailable) sqlStatementContext).getTablesContext().getDatabaseName().ifPresent(optional -> { - if (!optional.equals(connectionDatabaseName)) { - throw new JDBCTransactionAcrossDatabasesException(); - } - }); - } - } - private JDBCExecutionUnit createTrafficExecutionUnit(final String trafficInstanceId, final QueryContext queryContext) throws SQLException { DriverExecutionPrepareEngine prepareEngine = createDriverExecutionPrepareEngine(); ExecutionUnit executionUnit = new ExecutionUnit(trafficInstanceId, new SQLUnit(queryContext.getSql(), queryContext.getParameters())); @@ -500,13 +485,17 @@ public int[] executeBatch() throws SQLException { private QueryContext createQueryContext(final String originSQL) { SQLParserRule sqlParserRule = metaDataContexts.getMetaData().getGlobalRuleMetaData().getSingleRule(SQLParserRule.class); String sql = sqlParserRule.isSqlCommentParseEnabled() ? originSQL : SQLHintUtils.removeHint(originSQL); - DatabaseType protocolType = metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType(); - SQLStatement sqlStatement = sqlParserRule.getSQLParserEngine(protocolType.getTrunkDatabaseType().orElse(protocolType)).parse(sql, false); - SQLStatementContext sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), databaseName).bind(sqlStatement, Collections.emptyList()); + SQLStatement sqlStatement = sqlParserRule.getSQLParserEngine(getDatabaseType()).parse(sql, false); HintValueContext hintValueContext = sqlParserRule.isSqlCommentParseEnabled() ? new HintValueContext() : SQLHintUtils.extractHint(originSQL).orElseGet(HintValueContext::new); + SQLStatementContext sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), databaseName, hintValueContext).bind(sqlStatement, Collections.emptyList()); return new QueryContext(sqlStatementContext, sql, Collections.emptyList(), hintValueContext); } + private DatabaseType getDatabaseType() { + DatabaseType protocolType = metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType(); + return protocolType.getTrunkDatabaseType().orElse(protocolType); + } + private ExecutionContext createExecutionContext(final QueryContext queryContext) throws SQLException { clearStatements(); RuleMetaData globalRuleMetaData = metaDataContexts.getMetaData().getGlobalRuleMetaData(); @@ -534,7 +523,7 @@ private boolean executeWithImplicitCommitTransaction(final ExecuteCallback callb result = useDriverToExecute(callback); connection.commit(); // CHECKSTYLE:OFF - } catch (final RuntimeException ex) { + } catch (final Exception ex) { // CHECKSTYLE:ON connection.rollback(); throw SQLExceptionTransformEngine.toSQLException(ex, metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType()); diff --git a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/DriverURLProviderNotFoundException.java b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/URLProviderNotFoundException.java similarity index 80% rename from jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/DriverURLProviderNotFoundException.java rename to jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/URLProviderNotFoundException.java index 841837e11c63e..2f952ec96d4ca 100644 --- a/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/DriverURLProviderNotFoundException.java +++ b/jdbc/core/src/main/java/org/apache/shardingsphere/driver/jdbc/exception/syntax/URLProviderNotFoundException.java @@ -21,13 +21,13 @@ import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; /** - * Driver URL provider not found exception. + * URL provider not found exception. */ -public final class DriverURLProviderNotFoundException extends SyntaxSQLException { +public final class URLProviderNotFoundException extends SyntaxSQLException { private static final long serialVersionUID = 1017115393560838384L; - public DriverURLProviderNotFoundException(final String url) { - super(XOpenSQLState.NOT_FOUND, 12, "Can not find driver url provider for `%s`.", url); + public URLProviderNotFoundException(final String url) { + super(XOpenSQLState.NOT_FOUND, 12, "Can not find url provider for `%s`.", url); } } diff --git a/jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider b/jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider similarity index 82% rename from jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider rename to jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider index 5a03d04716e78..5ccd96e77e8cd 100644 --- a/jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereDriverURLProvider +++ b/jdbc/core/src/main/resources/META-INF/services/org.apache.shardingsphere.driver.jdbc.core.driver.ShardingSphereURLProvider @@ -15,6 +15,6 @@ # limitations under the License. # -org.apache.shardingsphere.driver.jdbc.core.driver.spi.AbsolutePathDriverURLProvider -org.apache.shardingsphere.driver.jdbc.core.driver.spi.ClasspathDriverURLProvider -org.apache.shardingsphere.driver.jdbc.core.driver.spi.ApolloDriverURLProvider +org.apache.shardingsphere.driver.jdbc.core.driver.spi.AbsolutePathURLProvider +org.apache.shardingsphere.driver.jdbc.core.driver.spi.ClasspathURLProvider +org.apache.shardingsphere.driver.jdbc.core.driver.spi.ApolloURLProvider diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/executor/batch/BatchPreparedStatementExecutorTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/executor/batch/BatchPreparedStatementExecutorTest.java index 1b9a51255177a..5a8e8716a303b 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/executor/batch/BatchPreparedStatementExecutorTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/executor/batch/BatchPreparedStatementExecutorTest.java @@ -21,7 +21,6 @@ import org.apache.shardingsphere.driver.jdbc.core.connection.ShardingSphereConnection; import org.apache.shardingsphere.infra.binder.context.segment.table.TablesContext; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroup; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupContext; @@ -33,7 +32,6 @@ import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutionUnit; import org.apache.shardingsphere.infra.executor.sql.execute.engine.driver.jdbc.JDBCExecutor; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.sharding.rule.ShardingRule; @@ -52,16 +50,13 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import javax.sql.DataSource; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.Properties; import static org.hamcrest.CoreMatchers.is; @@ -79,7 +74,7 @@ class BatchPreparedStatementExecutorTest { private static final String SQL = "DELETE FROM table_x WHERE id=?"; - private final ExecutorEngine executorEngine = ExecutorEngine.createExecutorEngineWithCPU(); + private final ExecutorEngine executorEngine = ExecutorEngine.createExecutorEngineWithSize(Runtime.getRuntime().availableProcessors() * 2 - 1); private BatchPreparedStatementExecutor executor; @@ -99,7 +94,6 @@ private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); MetaDataContexts metaDataContexts = mockMetaDataContexts(); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); - when(result.getDataSourceMap("foo_db")).thenReturn(mockDataSourceMap()); return result; } @@ -107,8 +101,6 @@ private MetaDataContexts mockMetaDataContexts() { MetaDataContexts result = mock(MetaDataContexts.class, RETURNS_DEEP_STUBS); RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList(mockTransactionRule(), new TrafficRule(new DefaultTrafficRuleConfigurationBuilder().build()))); when(result.getMetaData().getGlobalRuleMetaData()).thenReturn(globalRuleMetaData); - when(result.getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageTypes()) - .thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "H2"))); RuleMetaData databaseRuleMetaData = new RuleMetaData(Collections.singleton(mockShardingRule())); when(result.getMetaData().getDatabase("foo_db").getRuleMetaData()).thenReturn(databaseRuleMetaData); return result; @@ -124,14 +116,6 @@ private ShardingRule mockShardingRule() { return result; } - private Map mockDataSourceMap() { - Map result = new LinkedHashMap<>(2, 1F); - DataSource dataSource = mock(DataSource.class, RETURNS_DEEP_STUBS); - result.put("ds_0", dataSource); - result.put("ds_1", dataSource); - return result; - } - @AfterEach void tearDown() { executorEngine.close(); @@ -141,7 +125,7 @@ void tearDown() { void assertNoPreparedStatement() throws SQLException { PreparedStatement preparedStatement = getPreparedStatement(); when(preparedStatement.executeBatch()).thenReturn(new int[]{0, 0}); - setExecutionGroups(Collections.singletonList(preparedStatement)); + setExecutionGroups(Collections.singleton(preparedStatement)); assertThat(executor.executeBatch(sqlStatementContext), is(new int[]{0, 0})); } @@ -149,7 +133,7 @@ void assertNoPreparedStatement() throws SQLException { void assertExecuteBatchForSinglePreparedStatementSuccess() throws SQLException { PreparedStatement preparedStatement = getPreparedStatement(); when(preparedStatement.executeBatch()).thenReturn(new int[]{10, 20}); - setExecutionGroups(Collections.singletonList(preparedStatement)); + setExecutionGroups(Collections.singleton(preparedStatement)); assertThat(executor.executeBatch(sqlStatementContext), is(new int[]{10, 20})); verify(preparedStatement).executeBatch(); } @@ -171,7 +155,7 @@ void assertExecuteBatchForSinglePreparedStatementFailure() throws SQLException { PreparedStatement preparedStatement = getPreparedStatement(); SQLException ex = new SQLException(""); when(preparedStatement.executeBatch()).thenThrow(ex); - setExecutionGroups(Collections.singletonList(preparedStatement)); + setExecutionGroups(Collections.singleton(preparedStatement)); assertThrows(SQLException.class, () -> executor.executeBatch(sqlStatementContext)); verify(preparedStatement).executeBatch(); } @@ -192,7 +176,7 @@ private PreparedStatement getPreparedStatement() throws SQLException { return result; } - private void setExecutionGroups(final List preparedStatements) { + private void setExecutionGroups(final Collection preparedStatements) { Collection> executionGroups = new LinkedList<>(); List executionUnits = new LinkedList<>(); executionGroups.add(new ExecutionGroup<>(executionUnits)); diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/adapter/PreparedStatementAdapterTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/adapter/PreparedStatementAdapterTest.java index e699ce0644e86..d242a0d1a38fb 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/adapter/PreparedStatementAdapterTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/adapter/PreparedStatementAdapterTest.java @@ -75,8 +75,6 @@ void setUp() throws SQLException { when(connection.getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(new ConfigurationProperties(new Properties())); when(connection.getContextManager().getMetaDataContexts().getMetaData().getDatabase( connection.getDatabaseName()).getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); - when(connection.getContextManager().getMetaDataContexts().getMetaData().getDatabase(connection.getDatabaseName()).getResourceMetaData().getStorageTypes()) - .thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "MySQL"))); shardingSpherePreparedStatement = new ShardingSpherePreparedStatement(connection, "SELECT 1"); } diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java index 8cbc47fdfb3d8..654d6f56bfd75 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/DriverDatabaseConnectionManagerTest.java @@ -20,10 +20,11 @@ import com.zaxxer.hikari.HikariDataSource; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.instance.metadata.InstanceType; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; @@ -55,6 +56,7 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -71,11 +73,10 @@ void setUp() throws SQLException { databaseConnectionManager = new DriverDatabaseConnectionManager(DefaultDatabase.LOGIC_NAME, mockContextManager()); } - @SuppressWarnings({"unchecked", "rawtypes"}) private ContextManager mockContextManager() throws SQLException { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); - Map dataSourceMap = mockDataSourceMap(); - when(result.getDataSourceMap(DefaultDatabase.LOGIC_NAME)).thenReturn(dataSourceMap); + Map storageUnits = mockStorageUnits(); + when(result.getStorageUnits(DefaultDatabase.LOGIC_NAME)).thenReturn(storageUnits); MetaDataPersistService persistService = mockMetaDataPersistService(); when(result.getMetaDataContexts().getPersistService()).thenReturn(persistService); when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn( @@ -83,23 +84,27 @@ private ContextManager mockContextManager() throws SQLException { when(result.getInstanceContext().getAllClusterInstances(InstanceType.PROXY, Arrays.asList("OLTP", "OLAP"))).thenReturn( Collections.singletonList(new ProxyInstanceMetaData("foo_id", "127.0.0.1@3307", "foo_version"))); Map trafficDataSourceMap = mockTrafficDataSourceMap(); - when(DataSourcePoolCreator.create((Map) any())).thenReturn(trafficDataSourceMap); + when(DataSourcePoolCreator.create(any(), eq(true))).thenReturn(trafficDataSourceMap); return result; } - private Map mockDataSourceMap() throws SQLException { - Map result = new HashMap<>(2, 1F); - result.put("ds", new MockedDataSource()); + private Map mockStorageUnits() throws SQLException { + Map result = new HashMap<>(2, 1F); + StorageUnit validStorageUnit = mock(StorageUnit.class); + when(validStorageUnit.getDataSource()).thenReturn(new MockedDataSource()); + result.put("ds", validStorageUnit); + StorageUnit invalidStorageUnit = mock(StorageUnit.class); DataSource invalidDataSource = mock(DataSource.class); when(invalidDataSource.getConnection()).thenThrow(new SQLException()); - result.put("invalid_ds", invalidDataSource); + when(invalidStorageUnit.getDataSource()).thenReturn(invalidDataSource); + result.put("invalid_ds", invalidStorageUnit); return result; } private MetaDataPersistService mockMetaDataPersistService() { MetaDataPersistService result = mock(MetaDataPersistService.class, RETURNS_DEEP_STUBS); when(result.getDataSourceUnitService().load(DefaultDatabase.LOGIC_NAME)) - .thenReturn(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, new DataSourceProperties(HikariDataSource.class.getName(), createProperties()))); + .thenReturn(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, new DataSourcePoolProperties(HikariDataSource.class.getName(), createProperties()))); when(result.getGlobalRuleService().loadUsers()).thenReturn(Collections.singletonList(new ShardingSphereUser("root", "root", "localhost"))); return result; } @@ -131,8 +136,9 @@ void assertGetRandomPhysicalDataSourceNameFromContextManager() { @Test void assertGetRandomPhysicalDataSourceNameFromCache() throws SQLException { databaseConnectionManager.getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - String actual = databaseConnectionManager.getRandomPhysicalDataSourceName(); - assertThat(actual, is("ds")); + assertThat(databaseConnectionManager.getRandomPhysicalDataSourceName(), is("ds")); + assertThat(databaseConnectionManager.getRandomPhysicalDataSourceName(), is("ds")); + assertThat(databaseConnectionManager.getRandomPhysicalDataSourceName(), is("ds")); } @Test diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/ShardingSphereConnectionTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/ShardingSphereConnectionTest.java index 2ef4191b23781..b8fb0254e1718 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/ShardingSphereConnectionTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/connection/ShardingSphereConnectionTest.java @@ -20,6 +20,7 @@ import lombok.SneakyThrows; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.mode.manager.ContextManager; @@ -29,12 +30,9 @@ import org.apache.shardingsphere.transaction.api.TransactionType; import org.apache.shardingsphere.transaction.config.TransactionRuleConfiguration; import org.apache.shardingsphere.transaction.rule.TransactionRule; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.internal.configuration.plugins.Plugins; -import javax.sql.DataSource; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.SQLException; @@ -54,52 +52,33 @@ class ShardingSphereConnectionTest { - private ShardingSphereConnection connection; - - @BeforeEach - void setUp() { - connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager()); - } - - private ContextManager mockContextManager() { - ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(result.getDataSourceMap(DefaultDatabase.LOGIC_NAME)).thenReturn(Collections.singletonMap("ds", mock(DataSource.class, RETURNS_DEEP_STUBS))); - when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()) - .thenReturn(new RuleMetaData(Arrays.asList(mockTransactionRule(), mock(TrafficRule.class)))); - return result; - } - - private TransactionRule mockTransactionRule() { - return new TransactionRule(new TransactionRuleConfiguration(TransactionType.LOCAL.name(), "", new Properties()), Collections.emptyMap()); - } - - @AfterEach - void clear() { - try { - connection.close(); - } catch (final SQLException ignored) { - } - } - @Test void assertIsHoldTransaction() throws SQLException { - connection.setAutoCommit(false); - assertTrue(connection.isHoldTransaction()); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + connection.setAutoCommit(false); + assertTrue(connection.isHoldTransaction()); + } } @Test void assertIsNotHoldTransaction() throws SQLException { - connection.setAutoCommit(true); - assertFalse(connection.isHoldTransaction()); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + connection.setAutoCommit(true); + assertFalse(connection.isHoldTransaction()); + } + } @Test void assertSetAutoCommitWithLocalTransaction() throws SQLException { Connection physicalConnection = mock(Connection.class); - when(connection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection()).thenReturn(physicalConnection); - connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - connection.setAutoCommit(true); - assertTrue(connection.getAutoCommit()); + StorageUnit storageUnit = mock(StorageUnit.class, RETURNS_DEEP_STUBS); + when(storageUnit.getDataSource().getConnection()).thenReturn(physicalConnection); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager(storageUnit))) { + connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); + connection.setAutoCommit(true); + assertTrue(connection.getAutoCommit()); + } verify(physicalConnection).setAutoCommit(true); } @@ -108,24 +87,29 @@ void assertSetAutoCommitWithDistributedTransaction() throws SQLException { ConnectionTransaction connectionTransaction = mock(ConnectionTransaction.class); when(connectionTransaction.getDistributedTransactionOperationType(true)).thenReturn(DistributedTransactionOperationType.COMMIT); when(connectionTransaction.getTransactionType()).thenReturn(TransactionType.XA); - mockConnectionManager(connectionTransaction); - connection.setAutoCommit(true); - assertTrue(connection.getAutoCommit()); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + mockConnectionManager(connection, connectionTransaction); + connection.setAutoCommit(true); + assertTrue(connection.getAutoCommit()); + } verify(connectionTransaction).commit(); } @Test void assertCommitWithLocalTransaction() throws SQLException { Connection physicalConnection = mock(Connection.class); - when(connection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection()).thenReturn(physicalConnection); - connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - connection.setAutoCommit(false); - assertFalse(connection.getAutoCommit()); - assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(physicalConnection).setAutoCommit(false); - connection.commit(); - assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(physicalConnection).commit(); + StorageUnit storageUnit = mock(StorageUnit.class, RETURNS_DEEP_STUBS); + when(storageUnit.getDataSource().getConnection()).thenReturn(physicalConnection); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager(storageUnit))) { + connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); + connection.setAutoCommit(false); + assertFalse(connection.getAutoCommit()); + assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(physicalConnection).setAutoCommit(false); + connection.commit(); + assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(physicalConnection).commit(); + } } @Test @@ -133,25 +117,30 @@ void assertCommitWithDistributedTransaction() throws SQLException { ConnectionTransaction connectionTransaction = mock(ConnectionTransaction.class); when(connectionTransaction.getDistributedTransactionOperationType(false)).thenReturn(DistributedTransactionOperationType.BEGIN); when(connectionTransaction.getTransactionType()).thenReturn(TransactionType.XA); - DriverDatabaseConnectionManager databaseConnectionManager = mockConnectionManager(connectionTransaction); - connection.setAutoCommit(false); - assertTrue(databaseConnectionManager.getConnectionContext().getTransactionContext().isInTransaction()); - assertFalse(connection.getAutoCommit()); - assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(connectionTransaction).begin(); - connection.commit(); - assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(databaseConnectionManager).commit(); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + DriverDatabaseConnectionManager databaseConnectionManager = mockConnectionManager(connection, connectionTransaction); + connection.setAutoCommit(false); + assertTrue(databaseConnectionManager.getConnectionContext().getTransactionContext().isInTransaction()); + assertFalse(connection.getAutoCommit()); + assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(connectionTransaction).begin(); + connection.commit(); + assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(databaseConnectionManager).commit(); + } } @Test void assertRollbackWithLocalTransaction() throws SQLException { Connection physicalConnection = mock(Connection.class); - when(connection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection()).thenReturn(physicalConnection); - connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - connection.setAutoCommit(false); - assertFalse(connection.getAutoCommit()); - connection.rollback(); + StorageUnit storageUnit = mock(StorageUnit.class, RETURNS_DEEP_STUBS); + when(storageUnit.getDataSource().getConnection()).thenReturn(physicalConnection); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager(storageUnit))) { + connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); + connection.setAutoCommit(false); + assertFalse(connection.getAutoCommit()); + connection.rollback(); + } verify(physicalConnection).rollback(); } @@ -160,18 +149,20 @@ void assertRollbackWithDistributedTransaction() throws SQLException { ConnectionTransaction connectionTransaction = mock(ConnectionTransaction.class); when(connectionTransaction.getDistributedTransactionOperationType(false)).thenReturn(DistributedTransactionOperationType.BEGIN); when(connectionTransaction.getTransactionType()).thenReturn(TransactionType.XA); - final DriverDatabaseConnectionManager databaseConnectionManager = mockConnectionManager(connectionTransaction); - connection.setAutoCommit(false); - assertFalse(connection.getAutoCommit()); - assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(connectionTransaction).begin(); - connection.rollback(); - assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); - verify(databaseConnectionManager).rollback(); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + final DriverDatabaseConnectionManager databaseConnectionManager = mockConnectionManager(connection, connectionTransaction); + connection.setAutoCommit(false); + assertFalse(connection.getAutoCommit()); + assertTrue(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(connectionTransaction).begin(); + connection.rollback(); + assertFalse(connection.getDatabaseConnectionManager().getConnectionContext().getTransactionContext().isInTransaction()); + verify(databaseConnectionManager).rollback(); + } } @SneakyThrows(ReflectiveOperationException.class) - private DriverDatabaseConnectionManager mockConnectionManager(final ConnectionTransaction connectionTransaction) { + private DriverDatabaseConnectionManager mockConnectionManager(final ShardingSphereConnection connection, final ConnectionTransaction connectionTransaction) { DriverDatabaseConnectionManager result = mock(DriverDatabaseConnectionManager.class); when(result.getConnectionTransaction()).thenReturn(connectionTransaction); when(result.getConnectionContext()).thenReturn(new ConnectionContext()); @@ -181,42 +172,56 @@ private DriverDatabaseConnectionManager mockConnectionManager(final ConnectionTr @Test void assertIsValidWhenEmptyConnection() throws SQLException { - assertTrue(connection.isValid(0)); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + assertTrue(connection.isValid(0)); + } } @Test void assertIsInvalid() throws SQLException { - connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - assertFalse(connection.isValid(0)); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); + assertFalse(connection.isValid(0)); + } } @Test void assertSetReadOnly() throws SQLException { - assertFalse(connection.isReadOnly()); - Connection physicalConnection = connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); - connection.setReadOnly(true); - assertTrue(connection.isReadOnly()); - verify(physicalConnection).setReadOnly(true); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + assertFalse(connection.isReadOnly()); + Connection physicalConnection = connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); + connection.setReadOnly(true); + assertTrue(connection.isReadOnly()); + verify(physicalConnection).setReadOnly(true); + } } @Test void assertGetTransactionIsolationWithoutCachedConnections() throws SQLException { - assertThat(connection.getTransactionIsolation(), is(Connection.TRANSACTION_READ_UNCOMMITTED)); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + assertThat(connection.getTransactionIsolation(), is(Connection.TRANSACTION_READ_UNCOMMITTED)); + } + } @Test void assertSetTransactionIsolation() throws SQLException { - Connection physicalConnection = connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); - connection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); - verify(physicalConnection).setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + Connection physicalConnection = connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); + connection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); + verify(physicalConnection).setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); + } } @Test void assertCreateArrayOf() throws SQLException { Connection physicalConnection = mock(Connection.class); - when(connection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection()).thenReturn(physicalConnection); - connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); - assertNull(connection.createArrayOf("int", null)); + StorageUnit storageUnit = mock(StorageUnit.class, RETURNS_DEEP_STUBS); + when(storageUnit.getDataSource().getConnection()).thenReturn(physicalConnection); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager(storageUnit))) { + connection.getDatabaseConnectionManager().getConnections("ds", 0, 1, ConnectionMode.MEMORY_STRICTLY); + assertNull(connection.createArrayOf("int", null)); + } verify(physicalConnection).createArrayOf("int", null); } @@ -225,14 +230,33 @@ void assertPrepareCall() throws SQLException { CallableStatement expected = mock(CallableStatement.class); Connection physicalConnection = mock(Connection.class); when(physicalConnection.prepareCall("")).thenReturn(expected); - when(connection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection()).thenReturn(physicalConnection); - CallableStatement actual = connection.prepareCall(""); - assertThat(actual, is(expected)); + StorageUnit storageUnit = mock(StorageUnit.class, RETURNS_DEEP_STUBS); + when(storageUnit.getDataSource().getConnection()).thenReturn(physicalConnection); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager(storageUnit))) { + assertThat(connection.prepareCall(""), is(expected)); + } } @Test void assertClose() throws SQLException { - connection.close(); - assertTrue(connection.isClosed()); + try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { + connection.close(); + assertTrue(connection.isClosed()); + } + } + + private ContextManager mockContextManager() { + return mockContextManager(mock(StorageUnit.class, RETURNS_DEEP_STUBS)); + } + + private ContextManager mockContextManager(final StorageUnit storageUnit) { + ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); + when(result.getStorageUnits(DefaultDatabase.LOGIC_NAME)).thenReturn(Collections.singletonMap("ds", storageUnit)); + when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(new RuleMetaData(Arrays.asList(mockTransactionRule(), mock(TrafficRule.class)))); + return result; + } + + private TransactionRule mockTransactionRule() { + return new TransactionRule(new TransactionRuleConfiguration(TransactionType.LOCAL.name(), "", new Properties()), Collections.emptyMap()); } } diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSourceTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSourceTest.java index ebee0d67c9e09..380119975e8d1 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSourceTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/ShardingSphereDataSourceTest.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.driver.jdbc.core.connection.ShardingSphereConnection; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.state.cluster.ClusterState; import org.apache.shardingsphere.infra.state.instance.InstanceState; @@ -58,7 +59,7 @@ void assertNewConstructorWithModeConfigurationOnly() throws Exception { assertNotNull(contextManager.getMetaDataContexts().getMetaData().getDatabase(DefaultDatabase.LOGIC_NAME)); assertThat(contextManager.getClusterStateContext().getCurrentState(), is(ClusterState.OK)); assertThat(contextManager.getInstanceContext().getInstance().getState().getCurrentState(), is(InstanceState.OK)); - assertTrue(contextManager.getDataSourceMap(DefaultDatabase.LOGIC_NAME).isEmpty()); + assertTrue(contextManager.getStorageUnits(DefaultDatabase.LOGIC_NAME).isEmpty()); } } @@ -71,8 +72,8 @@ void assertNewConstructorWithAllArguments() throws Exception { assertNotNull(contextManager.getMetaDataContexts().getMetaData().getDatabase(DefaultDatabase.LOGIC_NAME)); assertThat(contextManager.getClusterStateContext().getCurrentState(), is(ClusterState.OK)); assertThat(contextManager.getInstanceContext().getInstance().getState().getCurrentState(), is(InstanceState.OK)); - assertThat(contextManager.getDataSourceMap(DefaultDatabase.LOGIC_NAME).size(), is(1)); - assertThat(contextManager.getDataSourceMap(DefaultDatabase.LOGIC_NAME).get("ds").getConnection().getMetaData().getURL(), is("jdbc:mock://127.0.0.1/foo_ds")); + assertThat(contextManager.getStorageUnits(DefaultDatabase.LOGIC_NAME).size(), is(1)); + assertThat(contextManager.getStorageUnits(DefaultDatabase.LOGIC_NAME).get("ds").getDataSource().getConnection().getMetaData().getURL(), is("jdbc:mock://127.0.0.1/foo_ds")); } } @@ -105,7 +106,7 @@ private ShardingSphereDataSource createShardingSphereDataSource(final DataSource @Test void assertEmptyDataSourceMap() throws Exception { try (ShardingSphereDataSource actual = new ShardingSphereDataSource(DefaultDatabase.LOGIC_NAME, null)) { - assertTrue(getContextManager(actual).getDataSourceMap(DefaultDatabase.LOGIC_NAME).isEmpty()); + assertTrue(getContextManager(actual).getStorageUnits(DefaultDatabase.LOGIC_NAME).isEmpty()); assertThat(actual.getLoginTimeout(), is(0)); } } @@ -113,7 +114,7 @@ void assertEmptyDataSourceMap() throws Exception { @Test void assertNotEmptyDataSourceMap() throws Exception { try (ShardingSphereDataSource actual = createShardingSphereDataSource(createHikariDataSource())) { - assertThat(getContextManager(actual).getDataSourceMap(DefaultDatabase.LOGIC_NAME).size(), is(1)); + assertThat(getContextManager(actual).getStorageUnits(DefaultDatabase.LOGIC_NAME).size(), is(1)); assertThat(actual.getLoginTimeout(), is(15)); } } @@ -131,20 +132,9 @@ void assertClose() throws Exception { try (HikariDataSource dataSource = createHikariDataSource()) { ShardingSphereDataSource actual = createShardingSphereDataSource(dataSource); actual.close(); - Map dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData() - .getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getStorageNodeMetaData().getDataSources(); - assertTrue(((HikariDataSource) dataSourceMap.get("ds")).isClosed()); - } - } - - @Test - void assertCloseWithDataSourceNames() throws SQLException { - try (HikariDataSource dataSource = createHikariDataSource()) { - ShardingSphereDataSource actual = createShardingSphereDataSource(dataSource); - actual.close(Collections.singleton("ds")); - Map dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData() - .getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getStorageNodeMetaData().getDataSources(); - assertTrue(((HikariDataSource) dataSourceMap.get("ds")).isClosed()); + Map dataSourceMap = getContextManager(actual).getMetaDataContexts().getMetaData() + .getDatabase(DefaultDatabase.LOGIC_NAME).getResourceMetaData().getStorageNodeDataSources(); + assertTrue(((HikariDataSource) dataSourceMap.get(new StorageNode("ds"))).isClosed()); } } diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaDataTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaDataTest.java index 13c319343e8e4..037a7dfce3e3f 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaDataTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/datasource/metadata/ShardingSphereDatabaseMetaDataTest.java @@ -43,8 +43,6 @@ import java.sql.SQLException; import java.sql.Types; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; @@ -80,20 +78,16 @@ class ShardingSphereDatabaseMetaDataTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) private MetaDataContexts metaDataContexts; - private final Map dataSourceMap = new HashMap<>(); - private ShardingSphereDatabaseMetaData shardingSphereDatabaseMetaData; @BeforeEach void setUp() throws SQLException { - dataSourceMap.put(DATA_SOURCE_NAME, dataSource); when(dataSource.getConnection()).thenReturn(connection); when(connection.getMetaData()).thenReturn(databaseMetaData); when(resultSet.getMetaData()).thenReturn(mock(ResultSetMetaData.class)); when(shardingSphereConnection.getDatabaseConnectionManager().getRandomPhysicalDataSourceName()).thenReturn(DATA_SOURCE_NAME); when(shardingSphereConnection.getDatabaseConnectionManager().getRandomConnection()).thenReturn(connection); when(shardingSphereConnection.getContextManager().getMetaDataContexts()).thenReturn(metaDataContexts); - when(shardingSphereConnection.getContextManager().getDataSourceMap(DefaultDatabase.LOGIC_NAME)).thenReturn(dataSourceMap); when(shardingSphereConnection.getDatabaseName()).thenReturn(DefaultDatabase.LOGIC_NAME); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(metaDataContexts.getMetaData().getDatabase(shardingSphereConnection.getDatabaseName())).thenReturn(database); @@ -105,8 +99,8 @@ void setUp() throws SQLException { private ShardingRule mockShardingRule() { ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration(); ShardingTableRuleConfiguration shardingTableRuleConfig = new ShardingTableRuleConfiguration(TABLE_NAME, DATA_SOURCE_NAME + "." + TABLE_NAME); - ruleConfig.setTables(Collections.singletonList(shardingTableRuleConfig)); - return new ShardingRule(ruleConfig, Collections.singletonList(DATA_SOURCE_NAME), mock(InstanceContext.class)); + ruleConfig.setTables(Collections.singleton(shardingTableRuleConfig)); + return new ShardingRule(ruleConfig, Collections.singleton(DATA_SOURCE_NAME), mock(InstanceContext.class)); } @Test diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManagerTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManagerTest.java similarity index 81% rename from jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManagerTest.java rename to jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManagerTest.java index b67572a775902..ce12dec26a0b5 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereDriverURLManagerTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/driver/ShardingSphereURLManagerTest.java @@ -20,7 +20,7 @@ import com.ctrip.framework.apollo.ConfigFile; import com.ctrip.framework.apollo.ConfigService; import com.ctrip.framework.apollo.core.enums.ConfigFileFormat; -import org.apache.shardingsphere.driver.jdbc.exception.syntax.DriverURLProviderNotFoundException; +import org.apache.shardingsphere.driver.jdbc.exception.syntax.URLProviderNotFoundException; import org.apache.shardingsphere.test.mock.AutoMockExtension; import org.apache.shardingsphere.test.mock.StaticMockSettings; import org.junit.jupiter.api.Test; @@ -39,7 +39,7 @@ @ExtendWith(AutoMockExtension.class) @StaticMockSettings(ConfigService.class) -class ShardingSphereDriverURLManagerTest { +class ShardingSphereURLManagerTest { private final int fooDriverConfigLength = 999; @@ -47,19 +47,19 @@ class ShardingSphereDriverURLManagerTest { @Test void assertNewConstructorWithEmptyURL() { - assertThrows(DriverURLProviderNotFoundException.class, () -> ShardingSphereDriverURLManager.getContent("jdbc:shardingsphere:", urlPrefix)); + assertThrows(URLProviderNotFoundException.class, () -> ShardingSphereURLManager.getContent("jdbc:shardingsphere:", urlPrefix)); } @Test void assertToClasspathConfigurationFile() { - byte[] actual = ShardingSphereDriverURLManager.getContent("jdbc:shardingsphere:classpath:config/driver/foo-driver-fixture.yaml", urlPrefix); + byte[] actual = ShardingSphereURLManager.getContent("jdbc:shardingsphere:classpath:config/driver/foo-driver-fixture.yaml", urlPrefix); assertThat(actual.length, is(fooDriverConfigLength)); } @Test void assertToAbsolutePathConfigurationFile() { String absolutePath = Objects.requireNonNull(Thread.currentThread().getContextClassLoader().getResource("config/driver/foo-driver-fixture.yaml")).getPath(); - byte[] actual = ShardingSphereDriverURLManager.getContent("jdbc:shardingsphere:absolutepath:" + absolutePath, urlPrefix); + byte[] actual = ShardingSphereURLManager.getContent("jdbc:shardingsphere:absolutepath:" + absolutePath, urlPrefix); assertThat(actual.length, is(fooDriverConfigLength)); } @@ -69,7 +69,7 @@ void assertToApolloConfigurationFile() { when(configFile.getContent()).thenReturn("config content"); when(ConfigService.getConfigFile(anyString(), any(ConfigFileFormat.class))).thenReturn(configFile); String url = "jdbc:shardingsphere:apollo:namespace"; - byte[] content = ShardingSphereDriverURLManager.getContent(url, urlPrefix); + byte[] content = ShardingSphereURLManager.getContent(url, urlPrefix); assertThat("config content".getBytes(StandardCharsets.UTF_8), is(content)); } } diff --git a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/resultset/ShardingSphereResultSetTest.java b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/resultset/ShardingSphereResultSetTest.java index 8327f715fcd4d..0f179ddea2467 100644 --- a/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/resultset/ShardingSphereResultSetTest.java +++ b/jdbc/core/src/test/java/org/apache/shardingsphere/driver/jdbc/core/resultset/ShardingSphereResultSetTest.java @@ -537,7 +537,7 @@ void assertGetObjectWithDouble() throws SQLException { @Test void assertGetObjectWithFloat() throws SQLException { - float result = 0.0f; + float result = 0.0F; when(mergeResultSet.getValue(1, float.class)).thenReturn(result); assertThat(shardingSphereResultSet.getObject(1, float.class), is(result)); when(mergeResultSet.getValue(1, Float.class)).thenReturn(result); diff --git a/kernel/authority/api/src/main/java/org/apache/shardingsphere/authority/spi/AuthorityRegistryProvider.java b/kernel/authority/api/src/main/java/org/apache/shardingsphere/authority/spi/AuthorityRegistryProvider.java index 14a854d86cf81..7678600a6170c 100644 --- a/kernel/authority/api/src/main/java/org/apache/shardingsphere/authority/spi/AuthorityRegistryProvider.java +++ b/kernel/authority/api/src/main/java/org/apache/shardingsphere/authority/spi/AuthorityRegistryProvider.java @@ -18,12 +18,10 @@ package org.apache.shardingsphere.authority.spi; import org.apache.shardingsphere.authority.model.AuthorityRegistry; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI; import java.util.Collection; -import java.util.Map; /** * Authority registry provider. @@ -33,9 +31,8 @@ public interface AuthorityRegistryProvider extends TypedSPI { /** * Build authority registry. * - * @param databases databases * @param users users * @return built authority registry */ - AuthorityRegistry build(Map databases, Collection users); + AuthorityRegistry build(Collection users); } diff --git a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProvider.java b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProvider.java index f3367c9208886..a2c77eaf2e52d 100644 --- a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProvider.java +++ b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProvider.java @@ -21,12 +21,10 @@ import org.apache.shardingsphere.authority.provider.database.builder.DatabasePrivilegeBuilder; import org.apache.shardingsphere.authority.registry.UserPrivilegeMapAuthorityRegistry; import org.apache.shardingsphere.authority.spi.AuthorityRegistryProvider; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import java.util.Collection; import java.util.Collections; -import java.util.Map; import java.util.Properties; /** @@ -44,7 +42,7 @@ public void init(final Properties props) { } @Override - public AuthorityRegistry build(final Map databases, final Collection users) { + public AuthorityRegistry build(final Collection users) { return new UserPrivilegeMapAuthorityRegistry(DatabasePrivilegeBuilder.build(users, props)); } diff --git a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/simple/AllPermittedAuthorityRegistryProvider.java b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/simple/AllPermittedAuthorityRegistryProvider.java index 203e59ef8190c..4714cbf1cac5a 100644 --- a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/simple/AllPermittedAuthorityRegistryProvider.java +++ b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/provider/simple/AllPermittedAuthorityRegistryProvider.java @@ -20,12 +20,10 @@ import org.apache.shardingsphere.authority.model.AuthorityRegistry; import org.apache.shardingsphere.authority.registry.AllPermittedAuthorityRegistry; import org.apache.shardingsphere.authority.spi.AuthorityRegistryProvider; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import java.util.Collection; import java.util.Collections; -import java.util.Map; /** * All permitted authority registry provider. @@ -33,7 +31,7 @@ public final class AllPermittedAuthorityRegistryProvider implements AuthorityRegistryProvider { @Override - public AuthorityRegistry build(final Map databases, final Collection users) { + public AuthorityRegistry build(final Collection users) { return new AllPermittedAuthorityRegistry(); } diff --git a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/AuthorityRule.java b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/AuthorityRule.java index 667b9831365fa..9f1fdb0a82a74 100644 --- a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/AuthorityRule.java +++ b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/AuthorityRule.java @@ -22,13 +22,11 @@ import org.apache.shardingsphere.authority.model.AuthorityRegistry; import org.apache.shardingsphere.authority.model.ShardingSpherePrivileges; import org.apache.shardingsphere.authority.spi.AuthorityRegistryProvider; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.user.Grantee; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; import org.apache.shardingsphere.infra.rule.identifier.scope.GlobalRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import java.util.Map; import java.util.Optional; /** @@ -41,10 +39,10 @@ public final class AuthorityRule implements GlobalRule { private final AuthorityRegistry authorityRegistry; - public AuthorityRule(final AuthorityRuleConfiguration ruleConfig, final Map databases) { + public AuthorityRule(final AuthorityRuleConfiguration ruleConfig) { configuration = ruleConfig; AuthorityRegistryProvider provider = TypedSPILoader.getService(AuthorityRegistryProvider.class, ruleConfig.getAuthorityProvider().getType(), ruleConfig.getAuthorityProvider().getProps()); - authorityRegistry = provider.build(databases, ruleConfig.getUsers()); + authorityRegistry = provider.build(ruleConfig.getUsers()); } /** @@ -78,9 +76,4 @@ public Optional findUser(final Grantee grantee) { public Optional findPrivileges(final Grantee grantee) { return authorityRegistry.findPrivileges(grantee); } - - @Override - public String getType() { - return AuthorityRule.class.getSimpleName(); - } } diff --git a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/builder/AuthorityRuleBuilder.java b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/builder/AuthorityRuleBuilder.java index b6209ef7e3c54..e85c789113420 100644 --- a/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/builder/AuthorityRuleBuilder.java +++ b/kernel/authority/core/src/main/java/org/apache/shardingsphere/authority/rule/builder/AuthorityRuleBuilder.java @@ -33,7 +33,7 @@ public final class AuthorityRuleBuilder implements GlobalRuleBuilder databases, final ConfigurationProperties props) { - return new AuthorityRule(ruleConfig, databases); + return new AuthorityRule(ruleConfig); } @Override diff --git a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/checker/AuthorityCheckerTest.java b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/checker/AuthorityCheckerTest.java index f10307cd19b11..bb7d9a1e9be93 100644 --- a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/checker/AuthorityCheckerTest.java +++ b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/checker/AuthorityCheckerTest.java @@ -40,13 +40,13 @@ class AuthorityCheckerTest { void assertCheckIsAuthorizedDatabase() { Collection users = Collections.singleton(new ShardingSphereUser("root", "", "localhost")); AuthorityRuleConfiguration ruleConfig = new AuthorityRuleConfiguration(users, new AlgorithmConfiguration("ALL_PERMITTED", new Properties()), null); - assertTrue(new AuthorityChecker(new AuthorityRule(ruleConfig, Collections.emptyMap()), new Grantee("root", "localhost")).isAuthorized("db0")); + assertTrue(new AuthorityChecker(new AuthorityRule(ruleConfig), new Grantee("root", "localhost")).isAuthorized("db0")); } @Test void assertCheckPrivileges() { Collection users = Collections.singleton(new ShardingSphereUser("root", "", "localhost")); - AuthorityRule rule = new AuthorityRule(new AuthorityRuleConfiguration(users, new AlgorithmConfiguration("ALL_PERMITTED", new Properties()), null), Collections.emptyMap()); + AuthorityRule rule = new AuthorityRule(new AuthorityRuleConfiguration(users, new AlgorithmConfiguration("ALL_PERMITTED", new Properties()), null)); AuthorityChecker authorityChecker = new AuthorityChecker(rule, new Grantee("root", "localhost")); authorityChecker.checkPrivileges(null, mock(SelectStatement.class)); authorityChecker.checkPrivileges(null, mock(InsertStatement.class)); diff --git a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProviderTest.java b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProviderTest.java index 7b2fe39409852..13a1ff928a4d4 100644 --- a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProviderTest.java +++ b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/provider/database/DatabasePermittedAuthorityRegistryProviderTest.java @@ -40,7 +40,7 @@ void assertBuild() { Properties props = PropertiesBuilder.build( new Property(DatabasePermittedAuthorityRegistryProvider.PROP_USER_DATABASE_MAPPINGS, "root@localhost=test, user1@127.0.0.1=db_dal_admin, user1@=test, user1@=test1, user1@=*")); AuthorityRegistryProvider provider = TypedSPILoader.getService(AuthorityRegistryProvider.class, "DATABASE_PERMITTED", props); - AuthorityRegistry actual = provider.build(Collections.emptyMap(), Collections.singletonList(new ShardingSphereUser("user1", "", "127.0.0.2"))); + AuthorityRegistry actual = provider.build(Collections.singletonList(new ShardingSphereUser("user1", "", "127.0.0.2"))); Optional privileges = actual.findPrivileges(new Grantee("user1", "127.0.0.2")); assertTrue(privileges.isPresent()); assertTrue(privileges.get().hasPrivileges("test")); diff --git a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/rule/AuthorityRuleTest.java b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/rule/AuthorityRuleTest.java index 582e9ac25c752..ad8d7078f7183 100644 --- a/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/rule/AuthorityRuleTest.java +++ b/kernel/authority/core/src/test/java/org/apache/shardingsphere/authority/rule/AuthorityRuleTest.java @@ -24,14 +24,13 @@ import org.junit.jupiter.api.Test; import java.util.Collection; -import java.util.Collections; import java.util.LinkedList; import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.is; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; class AuthorityRuleTest { @@ -59,6 +58,6 @@ private AuthorityRule createAuthorityRule() { Collection users = new LinkedList<>(); users.add(new ShardingSphereUser("root", "root", "localhost")); users.add(new ShardingSphereUser("admin", "123456", "localhost")); - return new AuthorityRule(new AuthorityRuleConfiguration(users, new AlgorithmConfiguration("ALL_PERMITTED", new Properties()), null), Collections.emptyMap()); + return new AuthorityRule(new AuthorityRuleConfiguration(users, new AlgorithmConfiguration("ALL_PERMITTED", new Properties()), null)); } } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/TableNameSchemaNameMapping.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/TableNameSchemaNameMapping.java index 32dbc1620ef72..5b37485ce61bc 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/TableNameSchemaNameMapping.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/TableNameSchemaNameMapping.java @@ -33,11 +33,6 @@ public final class TableNameSchemaNameMapping { private final Map mapping; - /** - * Convert table name and schema name mapping from schemas. - * - * @param tableSchemaMap table name and schema name map - */ public TableNameSchemaNameMapping(final Map tableSchemaMap) { mapping = null == tableSchemaMap ? Collections.emptyMap() : getLogicTableNameMap(tableSchemaMap); } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/DumperConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/DumperConfiguration.java index 044658d2aa93a..474ea9cc8be2d 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/DumperConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/DumperConfiguration.java @@ -113,7 +113,7 @@ public String getSchemaName(final ActualTableName actualTableName) { * @return column names */ public Optional> getColumnNames(final LogicTableName logicTableName) { - Set columnNames = null != targetTableColumnsMap ? targetTableColumnsMap.get(logicTableName) : null; + Set columnNames = null == targetTableColumnsMap ? null : targetTableColumnsMap.get(logicTableName); if (null == columnNames) { return Optional.empty(); } @@ -127,7 +127,7 @@ public Optional> getColumnNames(final LogicTableName logicTableName * @return column names of table */ public Optional> getColumnNameSet(final String actualTableName) { - Set result = null != targetTableColumnsMap ? targetTableColumnsMap.get(getLogicTableName(actualTableName)) : null; + Set result = null == targetTableColumnsMap ? null : targetTableColumnsMap.get(getLogicTableName(actualTableName)); return Optional.ofNullable(result); } } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/InventoryDumperConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/InventoryDumperConfiguration.java index b53da97d11765..6663e20af4a5e 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/InventoryDumperConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ingest/InventoryDumperConfiguration.java @@ -46,7 +46,7 @@ public final class InventoryDumperConfiguration extends DumperConfiguration { private Integer transactionIsolation; - private Integer shardingItem; + private int shardingItem; private int batchSize = 1000; diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java index 796900fd0f099..8180e30b9c6e2 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/ShardingSpherePipelineDataSourceConfiguration.java @@ -66,7 +66,7 @@ public ShardingSpherePipelineDataSourceConfiguration(final String param) { Map props = rootConfig.getDataSources().values().iterator().next(); databaseType = DatabaseTypeFactory.get(getJdbcUrl(props)); appendJdbcQueryProperties(databaseType); - adjustDataSourceProperties(rootConfig.getDataSources()); + adjustDataSourcePoolProperties(rootConfig.getDataSources()); } public ShardingSpherePipelineDataSourceConfiguration(final YamlRootConfiguration rootConfig) { @@ -96,13 +96,13 @@ private void appendJdbcQueryProperties(final DatabaseType databaseType) { rootConfig.getDataSources().forEach((key, value) -> { String jdbcUrlKey = value.containsKey("url") ? "url" : "jdbcUrl"; String jdbcUrl = value.get(jdbcUrlKey).toString(); - Properties queryProperties = standardJdbcUrlParser.parseQueryProperties(jdbcUrl.contains("?") ? jdbcUrl.substring(jdbcUrl.indexOf("?") + 1) : ""); - extension.get().extendQueryProperties(queryProperties); - value.replace(jdbcUrlKey, new JdbcUrlAppender().appendQueryProperties(jdbcUrl, queryProperties)); + Properties queryProps = standardJdbcUrlParser.parseQueryProperties(jdbcUrl.contains("?") ? jdbcUrl.substring(jdbcUrl.indexOf("?") + 1) : ""); + extension.get().extendQueryProperties(queryProps); + value.replace(jdbcUrlKey, new JdbcUrlAppender().appendQueryProperties(jdbcUrl, queryProps)); }); } - private void adjustDataSourceProperties(final Map> dataSources) { + private void adjustDataSourcePoolProperties(final Map> dataSources) { for (Map queryProps : dataSources.values()) { for (String each : Arrays.asList("minPoolSize", "minimumIdle")) { queryProps.put(each, "1"); diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java index a4b9dc207a671..39b30d6bd266f 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfiguration.java @@ -26,7 +26,7 @@ import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -51,7 +51,7 @@ public final class StandardPipelineDataSourceConfiguration implements PipelineDa @Getter private final String parameter; - private final DataSourceProperties dataSourceProperties; + private final DataSourcePoolProperties dataSourcePoolProps; @Getter private final YamlJdbcConfiguration jdbcConfig; @@ -64,8 +64,8 @@ public StandardPipelineDataSourceConfiguration(final String param) { this(param, YamlEngine.unmarshal(param, Map.class)); } - public StandardPipelineDataSourceConfiguration(final Map yamlDataSourceConfig) { - this(YamlEngine.marshal(yamlDataSourceConfig), new HashMap<>(yamlDataSourceConfig)); + public StandardPipelineDataSourceConfiguration(final Map poolProps) { + this(YamlEngine.marshal(poolProps), new HashMap<>(poolProps)); } private StandardPipelineDataSourceConfiguration(final String param, final Map yamlConfig) { @@ -82,7 +82,7 @@ private StandardPipelineDataSourceConfiguration(final String param, final Map columns; @@ -47,11 +49,18 @@ public final class DataRecord extends Record { private final List oldUniqueKeyValues = new ArrayList<>(); + private String actualTableName; + private Long csn; public DataRecord(final String type, final String tableName, final IngestPosition position, final int columnCount) { + this(type, null, tableName, position, columnCount); + } + + public DataRecord(final String type, final String schemaName, final String tableName, final IngestPosition position, final int columnCount) { super(position); this.type = type; + this.schemaName = schemaName; this.tableName = tableName; columns = new ArrayList<>(columnCount); } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/IdentifierName.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/IdentifierName.java index b6a2776da0622..9407dcfe52b02 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/IdentifierName.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/IdentifierName.java @@ -35,8 +35,8 @@ public class IdentifierName { private final String lowercase; public IdentifierName(final String identifierName) { - this.original = identifierName; - this.lowercase = null != identifierName ? identifierName.toLowerCase() : null; + original = identifierName; + lowercase = null == identifierName ? null : identifierName.toLowerCase(); } @Override diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/SchemaTableName.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/SchemaTableName.java index c85372ab79bcf..985ab7607a206 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/SchemaTableName.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/SchemaTableName.java @@ -37,4 +37,19 @@ public class SchemaTableName { @NonNull private final TableName tableName; + + public SchemaTableName(final String schemaName, final String tableName) { + this.schemaName = new SchemaName(schemaName); + this.tableName = new TableName(tableName); + } + + /** + * Marshal to text. + * + * @return text + */ + public String marshal() { + String schemaName = this.schemaName.getOriginal(); + return null == schemaName ? tableName.getOriginal() : schemaName + "." + tableName.getOriginal(); + } } diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/model/PipelineTableMetaData.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/model/PipelineTableMetaData.java index 17df4192867e3..e593a95b7515b 100644 --- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/model/PipelineTableMetaData.java +++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/model/PipelineTableMetaData.java @@ -17,9 +17,11 @@ package org.apache.shardingsphere.data.pipeline.api.metadata.model; +import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; +import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; @@ -33,6 +35,7 @@ /** * Pipeline table meta data. */ +@RequiredArgsConstructor(access = AccessLevel.PRIVATE) @Slf4j @EqualsAndHashCode(of = "name") @ToString diff --git a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfigurationTest.java b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfigurationTest.java index a5b0c7f148eb7..88c7cc36025ac 100644 --- a/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfigurationTest.java +++ b/kernel/data-pipeline/api/src/test/java/org/apache/shardingsphere/data/pipeline/api/datasource/config/impl/StandardPipelineDataSourceConfigurationTest.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.data.pipeline.api.datasource.config.impl; import org.apache.shardingsphere.data.pipeline.api.datasource.config.yaml.YamlJdbcConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.junit.jupiter.api.Test; @@ -76,10 +76,10 @@ private void assertParameterUnchanged(final Map backup, final Ma private void assertGetConfig(final StandardPipelineDataSourceConfiguration actual) { assertThat(actual.getDatabaseType().getType(), is("MySQL")); assertThat(actual.getType(), is(StandardPipelineDataSourceConfiguration.TYPE)); - DataSourceProperties dataSourceProps = (DataSourceProperties) actual.getDataSourceConfiguration(); - assertThat(dataSourceProps.getDataSourceClassName(), is("com.zaxxer.hikari.HikariDataSource")); + DataSourcePoolProperties props = (DataSourcePoolProperties) actual.getDataSourceConfiguration(); + assertThat(props.getPoolClassName(), is("com.zaxxer.hikari.HikariDataSource")); assertGetJdbcConfig(actual.getJdbcConfig()); - assertDataSourceProperties(dataSourceProps); + assertDataSourcePoolProperties(props); } private void assertGetJdbcConfig(final YamlJdbcConfiguration actual) { @@ -88,8 +88,8 @@ private void assertGetJdbcConfig(final YamlJdbcConfiguration actual) { assertThat(actual.getPassword(), is(PASSWORD)); } - private void assertDataSourceProperties(final DataSourceProperties dataSourceProps) { - Map actual = new YamlDataSourceConfigurationSwapper().swapToMap(dataSourceProps); + private void assertDataSourcePoolProperties(final DataSourcePoolProperties props) { + Map actual = new YamlDataSourceConfigurationSwapper().swapToMap(props); assertThat(actual.get("minPoolSize"), is("1")); } } diff --git a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/ingest/IncrementalDumperConfigurationCreator.java similarity index 60% rename from infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/ingest/IncrementalDumperConfigurationCreator.java index f7507dbb9563a..2934a8f142f34 100644 --- a/infra/datasource/core/src/main/java/org/apache/shardingsphere/infra/datasource/pool/metadata/DataSourcePoolPropertiesValidator.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/ingest/IncrementalDumperConfigurationCreator.java @@ -15,19 +15,21 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.pool.metadata; +package org.apache.shardingsphere.data.pipeline.common.config.ingest; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.data.pipeline.api.config.ingest.DumperConfiguration; +import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine; /** - * Data source pool properties validator. + * Incremental dumper configuration creator. */ -public interface DataSourcePoolPropertiesValidator { +public interface IncrementalDumperConfigurationCreator { /** - * Check properties. - * - * @param dataSourceProps Data source properties + * Create dumper configuration. + * + * @param jobDataNodeLine job data node line + * @return dumper configuration */ - void validateProperties(DataSourceProperties dataSourceProps); + DumperConfiguration createDumperConfiguration(JobDataNodeLine jobDataNodeLine); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/process/PipelineProcessConfigurationUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/process/PipelineProcessConfigurationUtils.java index a9f59d0ccbb75..03ed066825cfd 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/process/PipelineProcessConfigurationUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/config/process/PipelineProcessConfigurationUtils.java @@ -42,7 +42,7 @@ public final class PipelineProcessConfigurationUtils { * @return process configuration */ public static PipelineProcessConfiguration convertWithDefaultValue(final PipelineProcessConfiguration originalConfig) { - YamlPipelineProcessConfiguration yamlConfig = null != originalConfig ? SWAPPER.swapToYamlConfiguration(originalConfig) : new YamlPipelineProcessConfiguration(); + YamlPipelineProcessConfiguration yamlConfig = null == originalConfig ? new YamlPipelineProcessConfiguration() : SWAPPER.swapToYamlConfiguration(originalConfig); fillInDefaultValue(yamlConfig); return SWAPPER.swapToObject(yamlConfig); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineContextKey.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineContextKey.java index 1e946606dde9d..179095afb5537 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineContextKey.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/context/PipelineContextKey.java @@ -70,7 +70,7 @@ public boolean equals(final Object o) { if (this == o) { return true; } - if (o == null || getClass() != o.getClass()) { + if (null == o || getClass() != o.getClass()) { return false; } final PipelineContextKey that = (PipelineContextKey) o; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datanode/JobDataNodeLineConvertUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datanode/JobDataNodeLineConvertUtils.java index 6285d3a23b732..5cf249adc3099 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datanode/JobDataNodeLineConvertUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datanode/JobDataNodeLineConvertUtils.java @@ -20,6 +20,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.commons.lang3.tuple.Pair; +import org.apache.shardingsphere.data.pipeline.api.metadata.ActualTableName; +import org.apache.shardingsphere.data.pipeline.api.metadata.LogicTableName; import org.apache.shardingsphere.infra.datanode.DataNode; import java.util.LinkedHashMap; @@ -65,4 +67,20 @@ private static Map>> groupDataSourceDataNodes } return result; } + + /** + * Build table name map. + * + * @param dataNodeLine data node line + * @return actual table and logic table map + */ + public static Map buildTableNameMap(final JobDataNodeLine dataNodeLine) { + Map result = new LinkedHashMap<>(); + for (JobDataNodeEntry each : dataNodeLine.getEntries()) { + for (DataNode dataNode : each.getDataNodes()) { + result.put(new ActualTableName(dataNode.getTableName()), new LogicTableName(each.getLogicTableName())); + } + } + return result; + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/DefaultPipelineDataSourceManager.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/DefaultPipelineDataSourceManager.java index fee03ff045726..c637ca71d8fe1 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/DefaultPipelineDataSourceManager.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/DefaultPipelineDataSourceManager.java @@ -41,7 +41,11 @@ public PipelineDataSourceWrapper getDataSource(final PipelineDataSourceConfigura synchronized (cachedDataSources) { result = cachedDataSources.get(dataSourceConfig); if (null != result) { - return result; + if (!result.isClosed()) { + return result; + } else { + log.warn("{} is already closed, create again", result); + } } result = PipelineDataSourceFactory.newInstance(dataSourceConfig); cachedDataSources.put(dataSourceConfig, result); @@ -49,10 +53,12 @@ public PipelineDataSourceWrapper getDataSource(final PipelineDataSourceConfigura } } - // TODO monitor each DataSource close @Override public void close() { for (PipelineDataSourceWrapper each : cachedDataSources.values()) { + if (each.isClosed()) { + continue; + } try { each.close(); } catch (final SQLException ex) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapper.java index c5bbda09c0f1c..8fdac2e2d3537 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapper.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapper.java @@ -21,12 +21,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; import javax.sql.DataSource; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Logger; /** @@ -41,6 +43,17 @@ public final class PipelineDataSourceWrapper implements DataSource, AutoCloseabl private final DatabaseType databaseType; + private final AtomicBoolean closed = new AtomicBoolean(false); + + /** + * Whether underlying data source is closed or not. + * + * @return true if closed + */ + public boolean isClosed() { + return closed.get(); + } + @Override public Connection getConnection() throws SQLException { return dataSource.getConnection(); @@ -88,21 +101,20 @@ public Logger getParentLogger() throws SQLFeatureNotSupportedException { @Override public void close() throws SQLException { - if (null == dataSource) { + if (closed.get()) { + return; + } + if (!(dataSource instanceof AutoCloseable)) { + log.warn("Data source is not closed, it might cause connection leak, data source: {}", dataSource); return; } - if (dataSource instanceof AutoCloseable) { - try { - ((AutoCloseable) dataSource).close(); - } catch (final SQLException ex) { - throw ex; - // CHECKSTYLE:OFF - } catch (final Exception ex) { - // CHECKSTYLE:ON - throw new SQLException("data source close failed.", ex); - } - } else { - log.warn("dataSource is not closed, it might cause connection leak, dataSource={}", dataSource); + try { + new DataSourcePoolDestroyer(dataSource).asyncDestroy(); + closed.set(true); + // CHECKSTYLE:OFF + } catch (final RuntimeException ex) { + // CHECKSTYLE:ON + throw new SQLException("Data source close failed.", ex); } } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java index 31332bb66f212..42710de0b7b4a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/datasource/creator/StandardPipelineDataSourceCreator.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.StandardPipelineDataSourceConfiguration; import org.apache.shardingsphere.data.pipeline.spi.datasource.creator.PipelineDataSourceCreator; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import javax.sql.DataSource; @@ -31,7 +31,7 @@ public final class StandardPipelineDataSourceCreator implements PipelineDataSour @Override public DataSource createPipelineDataSource(final Object dataSourceConfig) { - return DataSourcePoolCreator.create((DataSourceProperties) dataSourceConfig); + return DataSourcePoolCreator.create((DataSourcePoolProperties) dataSourceConfig); } @Override diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/execute/ExecuteEngine.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/execute/ExecuteEngine.java index 118be941f41dd..a8c49be753982 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/execute/ExecuteEngine.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/execute/ExecuteEngine.java @@ -43,6 +43,8 @@ public final class ExecuteEngine { private static final String THREAD_SUFFIX = "-%d"; + private static final ExecutorService CALLBACK_EXECUTOR = Executors.newSingleThreadScheduledExecutor(ExecutorThreadFactoryBuilder.build(THREAD_PREFIX + "callback" + THREAD_SUFFIX)); + private final ExecutorService executorService; /** @@ -118,14 +120,14 @@ public void shutdown() { public static void trigger(final Collection> futures, final ExecuteCallback executeCallback) { BlockingQueue> futureQueue = new LinkedBlockingQueue<>(); for (CompletableFuture each : futures) { - each.whenComplete(new BiConsumer() { + each.whenCompleteAsync(new BiConsumer() { @SneakyThrows(InterruptedException.class) @Override public void accept(final Object unused, final Throwable throwable) { futureQueue.put(each); } - }); + }, CALLBACK_EXECUTOR); } for (int i = 1, count = futures.size(); i <= count; i++) { CompletableFuture future = futureQueue.take(); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/ingest/channel/memory/SimpleMemoryPipelineChannel.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/ingest/channel/memory/SimpleMemoryPipelineChannel.java index d60d04be55762..e7cd89705f872 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/ingest/channel/memory/SimpleMemoryPipelineChannel.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/ingest/channel/memory/SimpleMemoryPipelineChannel.java @@ -75,13 +75,13 @@ public List fetchRecords(final int batchSize, final long timeout, final @Override public List peekRecords() { List result = queue.peek(); - return null != result ? result : Collections.emptyList(); + return null == result ? Collections.emptyList() : result; } @Override public List pollRecords() { List result = queue.poll(); - return null != result ? result : Collections.emptyList(); + return null == result ? Collections.emptyList() : result; } @Override diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java index 7cd910f65eae2..f43db99d70f47 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/ConsistencyCheckJobItemProgress.java @@ -28,10 +28,10 @@ /** * Data consistency check job item progress. */ -// TODO move package -@Getter @RequiredArgsConstructor +@Getter @ToString +// TODO Refactor structure, List public final class ConsistencyCheckJobItemProgress implements PipelineJobItemProgress { @Setter @@ -49,5 +49,9 @@ public final class ConsistencyCheckJobItemProgress implements PipelineJobItemPro private final Long checkEndTimeMillis; - private final Map tableCheckPositions; + private final Map sourceTableCheckPositions; + + private final Map targetTableCheckPositions; + + private final String sourceDatabaseType; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/JobItemInventoryTasksProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/JobItemInventoryTasksProgress.java index cad206aba767e..962021ea3395a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/JobItemInventoryTasksProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/JobItemInventoryTasksProgress.java @@ -43,7 +43,7 @@ public final class JobItemInventoryTasksProgress { * @return inventory position */ public Map getInventoryPosition(final String tableName) { - Pattern pattern = Pattern.compile(String.format("%s(#\\d+)?", tableName)); + Pattern pattern = Pattern.compile(String.format("\\.%s#(\\d+)?", tableName)); return progresses.entrySet().stream().filter(entry -> pattern.matcher(entry.getKey()).find()).collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getPosition())); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java index 3e5b90864845f..2ed595408b3ad 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgress.java @@ -21,6 +21,7 @@ import lombok.Setter; import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; +import java.util.LinkedHashMap; import java.util.Map; /** @@ -44,5 +45,9 @@ public final class YamlConsistencyCheckJobItemProgress implements YamlConfigurat private Long checkEndTimeMillis; - private Map tableCheckPositions; + private Map sourceTableCheckPositions = new LinkedHashMap<>(); + + private Map targetTableCheckPositions = new LinkedHashMap<>(); + + private String sourceDatabaseType; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java index 96a28986c9d1c..c399505edf2bb 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlConsistencyCheckJobItemProgressSwapper.java @@ -21,9 +21,6 @@ import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; -import java.util.LinkedHashMap; -import java.util.Map; - /** * YAML data check job item progress swapper. */ @@ -39,18 +36,17 @@ public YamlConsistencyCheckJobItemProgress swapToYamlConfiguration(final Consist result.setRecordsCount(data.getRecordsCount()); result.setCheckBeginTimeMillis(data.getCheckBeginTimeMillis()); result.setCheckEndTimeMillis(data.getCheckEndTimeMillis()); - result.setTableCheckPositions(data.getTableCheckPositions()); + result.setSourceTableCheckPositions(data.getSourceTableCheckPositions()); + result.setTargetTableCheckPositions(data.getTargetTableCheckPositions()); + result.setSourceDatabaseType(data.getSourceDatabaseType()); return result; } @Override public ConsistencyCheckJobItemProgress swapToObject(final YamlConsistencyCheckJobItemProgress yamlConfig) { - Map tableCheckPositions = new LinkedHashMap<>(); - if (null != yamlConfig.getTableCheckPositions()) { - tableCheckPositions.putAll(yamlConfig.getTableCheckPositions()); - } ConsistencyCheckJobItemProgress result = new ConsistencyCheckJobItemProgress(yamlConfig.getTableNames(), yamlConfig.getIgnoredTableNames(), yamlConfig.getCheckedRecordsCount(), - yamlConfig.getRecordsCount(), yamlConfig.getCheckBeginTimeMillis(), yamlConfig.getCheckEndTimeMillis(), tableCheckPositions); + yamlConfig.getRecordsCount(), yamlConfig.getCheckBeginTimeMillis(), yamlConfig.getCheckEndTimeMillis(), + yamlConfig.getSourceTableCheckPositions(), yamlConfig.getTargetTableCheckPositions(), yamlConfig.getSourceDatabaseType()); result.setStatus(JobStatus.valueOf(yamlConfig.getStatus())); return result; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/generator/PipelineDDLGenerator.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/generator/PipelineDDLGenerator.java index 86ca680a962d0..07e770e28ff08 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/generator/PipelineDDLGenerator.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/generator/PipelineDDLGenerator.java @@ -17,10 +17,9 @@ package org.apache.shardingsphere.data.pipeline.common.metadata.generator; +import com.google.common.base.Strings; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; import org.apache.shardingsphere.data.pipeline.spi.ddlgenerator.CreateTableSQLGenerator; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.ddl.AlterTableStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.ddl.CommentStatementContext; @@ -29,6 +28,7 @@ import org.apache.shardingsphere.infra.binder.context.type.ConstraintAvailable; import org.apache.shardingsphere.infra.binder.context.type.IndexAvailable; import org.apache.shardingsphere.infra.binder.context.type.TableAvailable; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.schema.util.IndexMetaDataUtils; @@ -87,7 +87,7 @@ public String generateLogicDDL(final DatabaseType databaseType, final DataSource private Optional decorate(final DatabaseType databaseType, final DataSource dataSource, final String schemaName, final String targetTableName, final SQLParserEngine parserEngine, final String sql) throws SQLException { - if (StringUtils.isBlank(sql)) { + if (Strings.isNullOrEmpty(sql)) { return Optional.empty(); } String databaseName; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/node/PipelineMetaDataNodeWatcher.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/node/PipelineMetaDataNodeWatcher.java index 3b748b4d159cc..c02c902aaf98b 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/node/PipelineMetaDataNodeWatcher.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/metadata/node/PipelineMetaDataNodeWatcher.java @@ -50,7 +50,7 @@ public final class PipelineMetaDataNodeWatcher { private PipelineMetaDataNodeWatcher(final PipelineContextKey contextKey) { listenerMap.putAll(ShardingSphereServiceLoader.getServiceInstances(PipelineMetaDataChangedEventHandler.class) - .stream().collect(Collectors.toMap(PipelineMetaDataChangedEventHandler::getKeyPattern, each -> each, (key, value) -> value))); + .stream().collect(Collectors.toMap(PipelineMetaDataChangedEventHandler::getKeyPattern, each -> each))); PipelineAPIFactory.getGovernanceRepositoryAPI(contextKey).watch(DataPipelineConstants.DATA_PIPELINE_ROOT, this::dispatchEvent); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/ConsistencyCheckJobItemInfo.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/ConsistencyCheckJobItemInfo.java index 7478a6a3622d3..6768dfd352caf 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/ConsistencyCheckJobItemInfo.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/ConsistencyCheckJobItemInfo.java @@ -29,15 +29,19 @@ @Setter public final class ConsistencyCheckJobItemInfo { + private boolean active; + private String tableNames; private Boolean checkSuccess; private String checkFailedTableNames; - private int finishedPercentage; + private int inventoryFinishedPercentage; + + private long inventoryRemainingSeconds; - private long remainingSeconds; + private String incrementalIdleSeconds = ""; private String checkBeginTime; @@ -45,5 +49,9 @@ public final class ConsistencyCheckJobItemInfo { private long durationSeconds; + private String algorithmType; + + private String algorithmProps; + private String errorMessage; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java index cf93fe1df212f..4517de99e726c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/pojo/DataConsistencyCheckAlgorithmInfo.java @@ -32,6 +32,8 @@ public final class DataConsistencyCheckAlgorithmInfo { private final String type; + private final String typeAliases; + private final Collection supportedDatabaseTypes; private final String description; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPI.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPI.java index 813b169c9f9ab..dff2369cb601f 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPI.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPI.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.data.pipeline.common.registrycenter.repository; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.mode.repository.cluster.listener.DataChangedEventListener; import java.util.Collection; @@ -112,7 +112,7 @@ public interface GovernanceRepositoryAPI { * @param checkJobId check job id * @return check job result */ - Map getCheckJobResult(String parentJobId, String checkJobId); + Map getCheckJobResult(String parentJobId, String checkJobId); /** * Persist check job result. @@ -121,7 +121,7 @@ public interface GovernanceRepositoryAPI { * @param checkJobId check job id * @param checkResultMap check result map */ - void persistCheckJobResult(String parentJobId, String checkJobId, Map checkResultMap); + void persistCheckJobResult(String parentJobId, String checkJobId, Map checkResultMap); /** * Delete check job result. diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPIImpl.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPIImpl.java index 5250a7d8dd476..1e7b9f50b001e 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPIImpl.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/registrycenter/repository/GovernanceRepositoryAPIImpl.java @@ -22,9 +22,9 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.common.metadata.node.PipelineMetaDataNode; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlDataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlDataConsistencyCheckResultSwapper; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResultSwapper; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.mode.repository.cluster.ClusterPersistRepository; import org.apache.shardingsphere.mode.repository.cluster.listener.DataChangedEventListener; @@ -98,14 +98,14 @@ public void deleteLatestCheckJobId(final String parentJobId) { @SuppressWarnings("unchecked") @Override - public Map getCheckJobResult(final String parentJobId, final String checkJobId) { + public Map getCheckJobResult(final String parentJobId, final String checkJobId) { String yamlCheckResultMapText = repository.getDirectly(PipelineMetaDataNode.getCheckJobResultPath(parentJobId, checkJobId)); if (Strings.isNullOrEmpty(yamlCheckResultMapText)) { return Collections.emptyMap(); } - YamlDataConsistencyCheckResultSwapper swapper = new YamlDataConsistencyCheckResultSwapper(); + YamlTableDataConsistencyCheckResultSwapper swapper = new YamlTableDataConsistencyCheckResultSwapper(); Map yamlCheckResultMap = YamlEngine.unmarshal(yamlCheckResultMapText, Map.class, true); - Map result = new HashMap<>(yamlCheckResultMap.size(), 1F); + Map result = new HashMap<>(yamlCheckResultMap.size(), 1F); for (Entry entry : yamlCheckResultMap.entrySet()) { result.put(entry.getKey(), swapper.swapToObject(entry.getValue())); } @@ -113,13 +113,13 @@ public Map getCheckJobResult(final String pa } @Override - public void persistCheckJobResult(final String parentJobId, final String checkJobId, final Map checkResultMap) { + public void persistCheckJobResult(final String parentJobId, final String checkJobId, final Map checkResultMap) { if (null == checkResultMap) { return; } Map yamlCheckResultMap = new LinkedHashMap<>(); - for (Entry entry : checkResultMap.entrySet()) { - YamlDataConsistencyCheckResult yamlCheckResult = new YamlDataConsistencyCheckResultSwapper().swapToYamlConfiguration(entry.getValue()); + for (Entry entry : checkResultMap.entrySet()) { + YamlTableDataConsistencyCheckResult yamlCheckResult = new YamlTableDataConsistencyCheckResultSwapper().swapToYamlConfiguration(entry.getValue()); yamlCheckResultMap.put(entry.getKey(), YamlEngine.marshal(yamlCheckResult)); } repository.persist(PipelineMetaDataNode.getCheckJobResultPath(parentJobId, checkJobId), YamlEngine.marshal(yamlCheckResultMap)); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/PipelineLazyInitializer.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/PipelineLazyInitializer.java index f21d0adfd3e0e..df9927ecd62ff 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/PipelineLazyInitializer.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/PipelineLazyInitializer.java @@ -38,6 +38,11 @@ protected final T initialize() throws ConcurrentException { return result; } + /** + * Is initialized. + * + * @return initialized or not + */ public boolean isInitialized() { return initialized.get(); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java index 78f0b3b8991ab..1fe9015f34329 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java @@ -49,15 +49,14 @@ public final class ShardingColumnsExtractor { * @return sharding columns map */ public Map> getShardingColumnsMap(final Collection yamlRuleConfigs, final Set logicTableNames) { - Optional shardingRuleConfigOptional = ShardingRuleConfigurationConverter.findAndConvertShardingRuleConfiguration(yamlRuleConfigs); - if (!shardingRuleConfigOptional.isPresent()) { + Optional shardingRuleConfig = ShardingRuleConfigurationConverter.findAndConvertShardingRuleConfiguration(yamlRuleConfigs); + if (!shardingRuleConfig.isPresent()) { return Collections.emptyMap(); } - ShardingRuleConfiguration shardingRuleConfig = shardingRuleConfigOptional.get(); - Set defaultDatabaseShardingColumns = extractShardingColumns(shardingRuleConfig.getDefaultDatabaseShardingStrategy()); - Set defaultTableShardingColumns = extractShardingColumns(shardingRuleConfig.getDefaultTableShardingStrategy()); + Set defaultDatabaseShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultDatabaseShardingStrategy()); + Set defaultTableShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultTableShardingStrategy()); Map> result = new ConcurrentHashMap<>(); - for (ShardingTableRuleConfiguration each : shardingRuleConfig.getTables()) { + for (ShardingTableRuleConfiguration each : shardingRuleConfig.get().getTables()) { LogicTableName logicTableName = new LogicTableName(each.getLogicTable()); if (!logicTableNames.contains(logicTableName)) { continue; @@ -67,7 +66,7 @@ public Map> getShardingColumnsMap(final Collection tableCheckPositions = new ConcurrentHashMap<>(); + private final Map sourceTableCheckPositions = new ConcurrentHashMap<>(); + + private final Map targetTableCheckPositions = new ConcurrentHashMap<>(); + + private final String sourceDatabaseType; @Override public void onProgressUpdated(final PipelineJobProgressUpdatedParameter param) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java index d02af2542253f..adb79c3255405 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCheckUtils.java @@ -19,6 +19,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.builder.EqualsBuilder; @@ -27,8 +28,9 @@ import java.sql.Array; import java.sql.SQLException; import java.sql.SQLXML; -import java.util.Collection; import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; /** @@ -45,16 +47,15 @@ public final class DataConsistencyCheckUtils { * @param thatRecord that record * @param equalsBuilder equals builder * @return true if records equals, otherwise false - * @throws SQLException if getting special value failed */ - public static boolean recordsEquals(final Collection thisRecord, final Collection thatRecord, final EqualsBuilder equalsBuilder) throws SQLException { - Iterator thisRecordIterator = thisRecord.iterator(); - Iterator thatRecordIterator = thatRecord.iterator(); + public static boolean recordsEquals(final Map thisRecord, final Map thatRecord, final EqualsBuilder equalsBuilder) { + Iterator> thisRecordIterator = thisRecord.entrySet().iterator(); + Iterator> thatRecordIterator = thatRecord.entrySet().iterator(); int columnIndex = 0; while (thisRecordIterator.hasNext() && thatRecordIterator.hasNext()) { ++columnIndex; - Object thisColumnValue = thisRecordIterator.next(); - Object thatColumnValue = thatRecordIterator.next(); + Object thisColumnValue = thisRecordIterator.next().getValue(); + Object thatColumnValue = thatRecordIterator.next().getValue(); if (!isMatched(equalsBuilder, thisColumnValue, thatColumnValue)) { log.warn("Record column value not match, columnIndex={}, value1={}, value2={}, value1.class={}, value2.class={}.", columnIndex, thisColumnValue, thatColumnValue, null != thisColumnValue ? thisColumnValue.getClass().getName() : "", null == thatColumnValue ? "" : thatColumnValue.getClass().getName()); @@ -64,7 +65,9 @@ public static boolean recordsEquals(final Collection thisRecord, final C return true; } - private static boolean isMatched(final EqualsBuilder equalsBuilder, final Object thisColumnValue, final Object thatColumnValue) throws SQLException { + @SneakyThrows(SQLException.class) + private static boolean isMatched(final EqualsBuilder equalsBuilder, final Object thisColumnValue, final Object thatColumnValue) { + equalsBuilder.reset(); if (thisColumnValue instanceof SQLXML && thatColumnValue instanceof SQLXML) { return ((SQLXML) thisColumnValue).getString().equals(((SQLXML) thatColumnValue).getString()); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageNodeMetaData.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineCancellable.java similarity index 70% rename from infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageNodeMetaData.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineCancellable.java index 7fca766ca1b81..193fb6e2d0358 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageNodeMetaData.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineCancellable.java @@ -15,20 +15,22 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.metadata.database.resource; - -import lombok.Getter; -import lombok.RequiredArgsConstructor; - -import javax.sql.DataSource; -import java.util.Map; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck; /** - * Storage node meta data. + * Pipeline cancellable interface. */ -@RequiredArgsConstructor -@Getter -public final class StorageNodeMetaData { +public interface PipelineCancellable { + + /** + * Cancel calculation. + */ + void cancel(); - private final Map dataSources; + /** + * Is calculation canceling. + * + * @return canceling or not + */ + boolean isCanceling(); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineDataConsistencyChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineDataConsistencyChecker.java index da3e179765b52..8fc4972c04a81 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineDataConsistencyChecker.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/PipelineDataConsistencyChecker.java @@ -17,21 +17,22 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import java.util.Map; +import java.util.Properties; /** * Pipeline data consistency checker. */ -public interface PipelineDataConsistencyChecker { +public interface PipelineDataConsistencyChecker extends PipelineCancellable { /** * Data consistency check. * - * @param calculateAlgorithm calculate algorithm + * @param algorithmType algorithm type of {@link org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker} + * @param algorithmProps algorithm properties of {@link org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker} * @return check results. key is logic table name, value is check result. */ - Map check(DataConsistencyCalculateAlgorithm calculateAlgorithm); + Map check(String algorithmType, Properties algorithmProps); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/SingleTableInventoryDataConsistencyChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/SingleTableInventoryDataConsistencyChecker.java deleted file mode 100644 index 032ff5f2fbea9..0000000000000 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/SingleTableInventoryDataConsistencyChecker.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.data.pipeline.core.consistencycheck; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.api.job.JobOperationType; -import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; -import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; -import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; -import org.apache.shardingsphere.data.pipeline.common.job.progress.listener.PipelineJobProgressUpdatedParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCountCheckResult; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.PipelineSQLException; -import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; -import org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder; -import org.apache.shardingsphere.infra.util.close.QuietlyCloser; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.wrapper.SQLWrapperException; - -import java.sql.SQLException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; - -/** - * Single table inventory data consistency checker. - */ -@Slf4j -@RequiredArgsConstructor -public final class SingleTableInventoryDataConsistencyChecker { - - private final String jobId; - - private final PipelineDataSourceWrapper sourceDataSource; - - private final PipelineDataSourceWrapper targetDataSource; - - private final SchemaTableName sourceTable; - - private final SchemaTableName targetTable; - - private final List columnNames; - - private final PipelineColumnMetaData uniqueKey; - - private final JobRateLimitAlgorithm readRateLimitAlgorithm; - - private final ConsistencyCheckJobItemProgressContext progressContext; - - /** - * Data consistency check. - * - * @param calculateAlgorithm calculate algorithm - * @return data consistency check result - */ - public DataConsistencyCheckResult check(final DataConsistencyCalculateAlgorithm calculateAlgorithm) { - ThreadFactory threadFactory = ExecutorThreadFactoryBuilder.build("job-" + getJobIdDigest(jobId) + "-check-%d"); - ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 2, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(2), threadFactory); - try { - return check(calculateAlgorithm, executor); - } finally { - executor.shutdown(); - executor.shutdownNow(); - } - } - - private DataConsistencyCheckResult check(final DataConsistencyCalculateAlgorithm calculateAlgorithm, final ThreadPoolExecutor executor) { - String schemaName = sourceTable.getSchemaName().getOriginal(); - String sourceTableName = sourceTable.getTableName().getOriginal(); - Map tableCheckPositions = progressContext.getTableCheckPositions(); - DataConsistencyCalculateParameter sourceParam = new DataConsistencyCalculateParameter(sourceDataSource, - schemaName, sourceTableName, columnNames, sourceDataSource.getDatabaseType(), uniqueKey, tableCheckPositions.get(sourceTableName)); - String targetTableName = targetTable.getTableName().getOriginal(); - DataConsistencyCalculateParameter targetParam = new DataConsistencyCalculateParameter(targetDataSource, - targetTable.getSchemaName().getOriginal(), targetTableName, columnNames, targetDataSource.getDatabaseType(), uniqueKey, tableCheckPositions.get(targetTableName)); - Iterator sourceCalculatedResults = waitFuture(executor.submit(() -> calculateAlgorithm.calculate(sourceParam))).iterator(); - Iterator targetCalculatedResults = waitFuture(executor.submit(() -> calculateAlgorithm.calculate(targetParam))).iterator(); - try { - return check(sourceCalculatedResults, targetCalculatedResults, executor); - } finally { - QuietlyCloser.close(sourceParam.getCalculationContext()); - QuietlyCloser.close(targetParam.getCalculationContext()); - } - } - - private DataConsistencyCheckResult check(final Iterator sourceCalculatedResults, - final Iterator targetCalculatedResults, final ThreadPoolExecutor executor) { - long sourceRecordsCount = 0; - long targetRecordsCount = 0; - boolean contentMatched = true; - while (sourceCalculatedResults.hasNext() && targetCalculatedResults.hasNext()) { - if (null != readRateLimitAlgorithm) { - readRateLimitAlgorithm.intercept(JobOperationType.SELECT, 1); - } - DataConsistencyCalculatedResult sourceCalculatedResult = waitFuture(executor.submit(sourceCalculatedResults::next)); - DataConsistencyCalculatedResult targetCalculatedResult = waitFuture(executor.submit(targetCalculatedResults::next)); - sourceRecordsCount += sourceCalculatedResult.getRecordsCount(); - targetRecordsCount += targetCalculatedResult.getRecordsCount(); - contentMatched = Objects.equals(sourceCalculatedResult, targetCalculatedResult); - if (!contentMatched) { - log.info("content matched false, jobId={}, sourceTable={}, targetTable={}, uniqueKey={}", jobId, sourceTable, targetTable, uniqueKey); - break; - } - if (sourceCalculatedResult.getMaxUniqueKeyValue().isPresent()) { - progressContext.getTableCheckPositions().put(sourceTable.getTableName().getOriginal(), sourceCalculatedResult.getMaxUniqueKeyValue().get()); - } - if (targetCalculatedResult.getMaxUniqueKeyValue().isPresent()) { - progressContext.getTableCheckPositions().put(targetTable.getTableName().getOriginal(), targetCalculatedResult.getMaxUniqueKeyValue().get()); - } - progressContext.onProgressUpdated(new PipelineJobProgressUpdatedParameter(sourceCalculatedResult.getRecordsCount())); - } - if (sourceCalculatedResults.hasNext()) { - // TODO Refactor DataConsistencyCalculatedResult to represent inaccurate number - return new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(sourceRecordsCount + 1, targetRecordsCount), new DataConsistencyContentCheckResult(false)); - } - if (targetCalculatedResults.hasNext()) { - return new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(sourceRecordsCount, targetRecordsCount + 1), new DataConsistencyContentCheckResult(false)); - } - return new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(sourceRecordsCount, targetRecordsCount), new DataConsistencyContentCheckResult(contentMatched)); - } - - // TODO use digest (crc32, murmurhash) - private String getJobIdDigest(final String jobId) { - return jobId.length() <= 6 ? jobId : jobId.substring(0, 6); - } - - private T waitFuture(final Future future) { - try { - return future.get(); - } catch (final InterruptedException ex) { - Thread.currentThread().interrupt(); - throw new SQLWrapperException(new SQLException(ex)); - } catch (final ExecutionException ex) { - if (ex.getCause() instanceof PipelineSQLException) { - throw (PipelineSQLException) ex.getCause(); - } - throw new SQLWrapperException(new SQLException(ex)); - } - } -} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataMatchCalculatedResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java similarity index 74% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataMatchCalculatedResult.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java index d1bf1e67708ee..1db10f4352b37 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataMatchCalculatedResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResult.java @@ -17,35 +17,32 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result; -import lombok.AccessLevel; import lombok.Getter; -import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCheckUtils; -import java.sql.SQLException; -import java.util.Collection; import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Optional; /** - * Data match calculated result. + * Record single table inventory calculated result. */ @Getter @Slf4j -public final class DataMatchCalculatedResult implements DataConsistencyCalculatedResult { +public final class RecordSingleTableInventoryCalculatedResult implements SingleTableInventoryCalculatedResult { private final Object maxUniqueKeyValue; private final int recordsCount; - @Getter(AccessLevel.NONE) - private final Collection> records; + private final List> records; - public DataMatchCalculatedResult(final Object maxUniqueKeyValue, final Collection> records) { + public RecordSingleTableInventoryCalculatedResult(final Object maxUniqueKeyValue, final List> records) { this.maxUniqueKeyValue = maxUniqueKeyValue; recordsCount = records.size(); this.records = records; @@ -56,7 +53,6 @@ public Optional getMaxUniqueKeyValue() { return Optional.of(maxUniqueKeyValue); } - @SneakyThrows(SQLException.class) @Override public boolean equals(final Object o) { if (null == o) { @@ -65,23 +61,22 @@ public boolean equals(final Object o) { if (this == o) { return true; } - if (!(o instanceof DataMatchCalculatedResult)) { - log.warn("DataMatchCalculatedResult type not match, o.className={}.", o.getClass().getName()); + if (!(o instanceof RecordSingleTableInventoryCalculatedResult)) { + log.warn("RecordSingleTableInventoryCalculatedResult type not match, o.className={}.", o.getClass().getName()); return false; } - final DataMatchCalculatedResult that = (DataMatchCalculatedResult) o; + final RecordSingleTableInventoryCalculatedResult that = (RecordSingleTableInventoryCalculatedResult) o; if (recordsCount != that.recordsCount || !Objects.equals(maxUniqueKeyValue, that.maxUniqueKeyValue)) { log.warn("Record count or max unique key value not match, recordCount1={}, recordCount2={}, maxUniqueKeyValue1={}, maxUniqueKeyValue2={}.", recordsCount, that.recordsCount, maxUniqueKeyValue, that.maxUniqueKeyValue); return false; } EqualsBuilder equalsBuilder = new EqualsBuilder(); - Iterator> thisRecordsIterator = records.iterator(); - Iterator> thatRecordsIterator = that.records.iterator(); + Iterator> thisRecordsIterator = records.iterator(); + Iterator> thatRecordsIterator = that.records.iterator(); while (thisRecordsIterator.hasNext() && thatRecordsIterator.hasNext()) { - equalsBuilder.reset(); - Collection thisRecord = thisRecordsIterator.next(); - Collection thatRecord = thatRecordsIterator.next(); + Map thisRecord = thisRecordsIterator.next(); + Map thatRecord = thatRecordsIterator.next(); if (thisRecord.size() != thatRecord.size()) { log.warn("Record column size not match, size1={}, size2={}, record1={}, record2={}.", thisRecord.size(), thatRecord.size(), thisRecord, thatRecord); return false; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCalculatedResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/SingleTableInventoryCalculatedResult.java similarity index 92% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCalculatedResult.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/SingleTableInventoryCalculatedResult.java index 109aaee536892..69c9ca840859e 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCalculatedResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/SingleTableInventoryCalculatedResult.java @@ -20,9 +20,9 @@ import java.util.Optional; /** - * Data consistency calculated result. + * Single table inventory calculated result. */ -public interface DataConsistencyCalculatedResult { +public interface SingleTableInventoryCalculatedResult { /** * Get max unique key value. diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckIgnoredType.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckIgnoredType.java similarity index 84% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckIgnoredType.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckIgnoredType.java index a590b79c81fa9..037b2dc3e6af9 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckIgnoredType.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckIgnoredType.java @@ -21,13 +21,13 @@ import lombok.RequiredArgsConstructor; /** - * Data consistency check ignored type. + * Table data consistency check ignored type. */ @RequiredArgsConstructor @Getter -public enum DataConsistencyCheckIgnoredType { +public enum TableDataConsistencyCheckIgnoredType { - NO_UNIQUE_KEY("Data consistency check are not supported for tables without unique key"); + NO_UNIQUE_KEY("Table data consistency check are not supported for tables without unique key"); private final String message; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckResult.java similarity index 68% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckResult.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckResult.java index 4dc56bc2d9da9..7eec6445fbc35 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCheckResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCheckResult.java @@ -21,28 +21,28 @@ import lombok.ToString; /** - * Data consistency check result. + * Table data consistency check result. */ @Getter @ToString -public final class DataConsistencyCheckResult { +public final class TableDataConsistencyCheckResult { - private final DataConsistencyCheckIgnoredType ignoredType; + private final TableDataConsistencyCountCheckResult countCheckResult; - private final DataConsistencyCountCheckResult countCheckResult; + private final TableDataConsistencyContentCheckResult contentCheckResult; - private final DataConsistencyContentCheckResult contentCheckResult; + private final TableDataConsistencyCheckIgnoredType ignoredType; - public DataConsistencyCheckResult(final DataConsistencyCountCheckResult countCheckResult, final DataConsistencyContentCheckResult contentCheckResult) { - ignoredType = null; + public TableDataConsistencyCheckResult(final TableDataConsistencyCountCheckResult countCheckResult, final TableDataConsistencyContentCheckResult contentCheckResult) { this.countCheckResult = countCheckResult; this.contentCheckResult = contentCheckResult; + ignoredType = null; } - public DataConsistencyCheckResult(final DataConsistencyCheckIgnoredType ignoredType) { + public TableDataConsistencyCheckResult(final TableDataConsistencyCheckIgnoredType ignoredType) { + countCheckResult = new TableDataConsistencyCountCheckResult(-1, -1); + contentCheckResult = new TableDataConsistencyContentCheckResult(false); this.ignoredType = ignoredType; - countCheckResult = null; - contentCheckResult = null; } /** diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyContentCheckResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyContentCheckResult.java similarity index 90% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyContentCheckResult.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyContentCheckResult.java index 407f2ad7f3b0b..3995f5ad4b695 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyContentCheckResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyContentCheckResult.java @@ -22,12 +22,12 @@ import lombok.ToString; /** - * Data consistency content check result. + * Table data consistency content check result. */ @RequiredArgsConstructor @Getter @ToString -public final class DataConsistencyContentCheckResult { +public final class TableDataConsistencyContentCheckResult { private final boolean matched; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCountCheckResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCountCheckResult.java similarity index 72% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCountCheckResult.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCountCheckResult.java index 5557d61cd599e..aaf2b5ef02ccb 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/DataConsistencyCountCheckResult.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/TableDataConsistencyCountCheckResult.java @@ -18,24 +18,27 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result; import lombok.Getter; +import lombok.RequiredArgsConstructor; import lombok.ToString; /** - * Data consistency count check result. + * Table data consistency count check result. */ +@RequiredArgsConstructor @Getter @ToString -public final class DataConsistencyCountCheckResult { +public final class TableDataConsistencyCountCheckResult { private final long sourceRecordsCount; private final long targetRecordsCount; - private final boolean matched; - - public DataConsistencyCountCheckResult(final long sourceRecordsCount, final long targetRecordsCount) { - this.sourceRecordsCount = sourceRecordsCount; - this.targetRecordsCount = targetRecordsCount; - matched = sourceRecordsCount == targetRecordsCount; + /** + * Is matched. + * + * @return true if records count equals between source and target + */ + public boolean isMatched() { + return sourceRecordsCount == targetRecordsCount && sourceRecordsCount >= 0; } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResult.java deleted file mode 100644 index fc1f8813e146a..0000000000000 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResult.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml; - -import lombok.Getter; -import lombok.Setter; -import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; - -/** - * Yaml data consistency check result config. - */ -@Getter -@Setter -public final class YamlDataConsistencyCheckResult implements YamlConfiguration { - - private YamlDataConsistencyCountCheckResult countCheckResult; - - private YamlDataConsistencyContentCheckResult contentCheckResult; - - private String ignoredType; - - /** - * YAML data consistency count result. - */ - @Getter - @Setter - public static class YamlDataConsistencyCountCheckResult implements YamlConfiguration { - - private long sourceRecordsCount; - - private long targetRecordsCount; - - private boolean matched; - } - - /** - * YAML data consistency content result. - */ - @Getter - @Setter - public static class YamlDataConsistencyContentCheckResult implements YamlConfiguration { - - private boolean matched; - } -} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResult.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResult.java new file mode 100644 index 0000000000000..6dc803ec3edef --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResult.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import org.apache.shardingsphere.infra.util.yaml.YamlConfiguration; + +/** + * Yaml table data consistency check result config. + */ +@NoArgsConstructor +@Getter +@Setter +public final class YamlTableDataConsistencyCheckResult implements YamlConfiguration { + + private YamlTableDataConsistencyCountCheckResult countCheckResult; + + private YamlTableDataConsistencyContentCheckResult contentCheckResult; + + private String ignoredType; + + public YamlTableDataConsistencyCheckResult(final YamlTableDataConsistencyCountCheckResult countCheckResult, final YamlTableDataConsistencyContentCheckResult contentCheckResult) { + this.countCheckResult = countCheckResult; + this.contentCheckResult = contentCheckResult; + } + + /** + * YAML table data consistency count result. + */ + @Getter + @Setter + public static class YamlTableDataConsistencyCountCheckResult implements YamlConfiguration { + + private long sourceRecordsCount; + + private long targetRecordsCount; + + /** + * Add records count. + * + * @param delta delta count + * @param onSource add on source or target + */ + public void addRecordsCount(final long delta, final boolean onSource) { + if (onSource) { + sourceRecordsCount += delta; + } else { + targetRecordsCount += delta; + } + } + } + + /** + * YAML table data consistency content result. + */ + @NoArgsConstructor + @AllArgsConstructor + @Getter + @Setter + public static class YamlTableDataConsistencyContentCheckResult implements YamlConfiguration { + + private boolean matched; + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResultSwapper.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResultSwapper.java similarity index 53% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResultSwapper.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResultSwapper.java index d1b0d59d455bd..4003f1018134c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlDataConsistencyCheckResultSwapper.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/yaml/YamlTableDataConsistencyCheckResultSwapper.java @@ -18,50 +18,49 @@ package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml; import com.google.common.base.Strings; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckIgnoredType; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCountCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlDataConsistencyCheckResult.YamlDataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlDataConsistencyCheckResult.YamlDataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckIgnoredType; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult.YamlTableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult.YamlTableDataConsistencyCountCheckResult; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; /** - * Yaml data consistency check result swapper. + * Yaml table data consistency check result swapper. */ -public final class YamlDataConsistencyCheckResultSwapper implements YamlConfigurationSwapper { +public final class YamlTableDataConsistencyCheckResultSwapper implements YamlConfigurationSwapper { @Override - public YamlDataConsistencyCheckResult swapToYamlConfiguration(final DataConsistencyCheckResult data) { - YamlDataConsistencyCheckResult result = new YamlDataConsistencyCheckResult(); + public YamlTableDataConsistencyCheckResult swapToYamlConfiguration(final TableDataConsistencyCheckResult data) { + YamlTableDataConsistencyCheckResult result = new YamlTableDataConsistencyCheckResult(); if (data.isIgnored()) { result.setIgnoredType(data.getIgnoredType().name()); return result; } - YamlDataConsistencyCountCheckResult countCheckResult = new YamlDataConsistencyCountCheckResult(); + YamlTableDataConsistencyCountCheckResult countCheckResult = new YamlTableDataConsistencyCountCheckResult(); countCheckResult.setSourceRecordsCount(data.getCountCheckResult().getSourceRecordsCount()); countCheckResult.setTargetRecordsCount(data.getCountCheckResult().getTargetRecordsCount()); - countCheckResult.setMatched(data.getContentCheckResult().isMatched()); result.setCountCheckResult(countCheckResult); - YamlDataConsistencyContentCheckResult contentCheckResult = new YamlDataConsistencyContentCheckResult(); + YamlTableDataConsistencyContentCheckResult contentCheckResult = new YamlTableDataConsistencyContentCheckResult(); contentCheckResult.setMatched(data.getContentCheckResult().isMatched()); result.setContentCheckResult(contentCheckResult); return result; } @Override - public DataConsistencyCheckResult swapToObject(final YamlDataConsistencyCheckResult yamlConfig) { + public TableDataConsistencyCheckResult swapToObject(final YamlTableDataConsistencyCheckResult yamlConfig) { if (null == yamlConfig) { return null; } if (!Strings.isNullOrEmpty(yamlConfig.getIgnoredType())) { - return new DataConsistencyCheckResult(DataConsistencyCheckIgnoredType.valueOf(yamlConfig.getIgnoredType())); + return new TableDataConsistencyCheckResult(TableDataConsistencyCheckIgnoredType.valueOf(yamlConfig.getIgnoredType())); } - YamlDataConsistencyCountCheckResult yamlCountCheck = yamlConfig.getCountCheckResult(); - DataConsistencyCountCheckResult countCheckResult = new DataConsistencyCountCheckResult(yamlCountCheck.getSourceRecordsCount(), yamlCountCheck.getTargetRecordsCount()); - DataConsistencyContentCheckResult contentCheckResult = new DataConsistencyContentCheckResult(yamlConfig.getContentCheckResult().isMatched()); - return new DataConsistencyCheckResult(countCheckResult, contentCheckResult); + YamlTableDataConsistencyCountCheckResult yamlCountCheck = yamlConfig.getCountCheckResult(); + TableDataConsistencyCountCheckResult countCheckResult = new TableDataConsistencyCountCheckResult(yamlCountCheck.getSourceRecordsCount(), yamlCountCheck.getTargetRecordsCount()); + TableDataConsistencyContentCheckResult contentCheckResult = new TableDataConsistencyContentCheckResult(yamlConfig.getContentCheckResult().isMatched()); + return new TableDataConsistencyCheckResult(countCheckResult, contentCheckResult); } /** @@ -70,7 +69,7 @@ public DataConsistencyCheckResult swapToObject(final YamlDataConsistencyCheckRes * @param param parameter * @return data consistency check result */ - public DataConsistencyCheckResult swapToObject(final String param) { - return swapToObject(YamlEngine.unmarshal(param, YamlDataConsistencyCheckResult.class, true)); + public TableDataConsistencyCheckResult swapToObject(final String param) { + return swapToObject(YamlEngine.unmarshal(param, YamlTableDataConsistencyCheckResult.class, true)); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/CRC32MatchTableDataConsistencyChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/CRC32MatchTableDataConsistencyChecker.java new file mode 100644 index 0000000000000..7f07d1fb81ee1 --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/CRC32MatchTableDataConsistencyChecker.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.CRC32SingleTableInventoryCalculator; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.SingleTableInventoryCalculator; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; + +import java.util.Collection; +import java.util.LinkedList; + +/** + * CRC32 match table data consistency checker. + */ +@SPIDescription("Match CRC32 of records.") +public final class CRC32MatchTableDataConsistencyChecker implements TableDataConsistencyChecker { + + @Override + public TableInventoryChecker buildTableInventoryChecker(final TableInventoryCheckParameter param) { + return new CRC32MatchTableInventoryChecker(param); + } + + @Override + public Collection getSupportedDatabaseTypes() { + Collection result = new LinkedList<>(); + DatabaseType supportedDatabaseType = TypedSPILoader.getService(DatabaseType.class, "MySQL"); + result.add(supportedDatabaseType); + result.addAll(new DatabaseTypeRegistry(supportedDatabaseType).getAllBranchDatabaseTypes()); + return result; + } + + @Override + public void close() { + } + + @Override + public String getType() { + return "CRC32_MATCH"; + } + + private static final class CRC32MatchTableInventoryChecker extends MatchingTableInventoryChecker { + + CRC32MatchTableInventoryChecker(final TableInventoryCheckParameter param) { + super(param); + } + + @Override + protected SingleTableInventoryCalculator buildSingleTableInventoryCalculator() { + return new CRC32SingleTableInventoryCalculator(); + } + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/DataMatchTableDataConsistencyChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/DataMatchTableDataConsistencyChecker.java new file mode 100644 index 0000000000000..410c64c4af2fe --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/DataMatchTableDataConsistencyChecker.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; + +import com.google.common.base.Strings; +import lombok.extern.slf4j.Slf4j; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.RecordSingleTableInventoryCalculator; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.SingleTableInventoryCalculator; +import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; +import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; + +import java.util.Collection; +import java.util.Properties; + +/** + * Data match table data consistency checker. + */ +@SPIDescription("Match raw data of records.") +@Slf4j +public final class DataMatchTableDataConsistencyChecker implements TableDataConsistencyChecker { + + private static final String CHUNK_SIZE_KEY = "chunk-size"; + + private static final int DEFAULT_CHUNK_SIZE = 1000; + + private int chunkSize; + + @Override + public void init(final Properties props) { + chunkSize = getChunkSize(props); + } + + private int getChunkSize(final Properties props) { + String chunkSizeText = props.getProperty(CHUNK_SIZE_KEY); + if (Strings.isNullOrEmpty(chunkSizeText)) { + return DEFAULT_CHUNK_SIZE; + } + int result; + try { + result = Integer.parseInt(chunkSizeText); + } catch (final NumberFormatException ignore) { + throw new PipelineInvalidParameterException("'chunk-size' is not a valid number: `" + chunkSizeText + "`"); + } + if (result <= 0) { + throw new PipelineInvalidParameterException("Invalid 'chunk-size': " + result); + } + return result; + } + + @Override + public TableInventoryChecker buildTableInventoryChecker(final TableInventoryCheckParameter param) { + return new DataMatchTableInventoryChecker(param, chunkSize); + } + + @Override + public Collection getSupportedDatabaseTypes() { + return ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class); + } + + @Override + public void close() { + } + + @Override + public String getType() { + return "DATA_MATCH"; + } + + private static final class DataMatchTableInventoryChecker extends MatchingTableInventoryChecker { + + private final int chunkSize; + + DataMatchTableInventoryChecker(final TableInventoryCheckParameter param, final int chunkSize) { + super(param); + this.chunkSize = chunkSize; + } + + @Override + protected SingleTableInventoryCalculator buildSingleTableInventoryCalculator() { + return new RecordSingleTableInventoryCalculator(chunkSize); + } + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/MatchingTableInventoryChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/MatchingTableInventoryChecker.java new file mode 100644 index 0000000000000..3e485e97f52ad --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/MatchingTableInventoryChecker.java @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.shardingsphere.data.pipeline.api.job.JobOperationType; +import org.apache.shardingsphere.data.pipeline.common.job.progress.listener.PipelineJobProgressUpdatedParameter; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult.YamlTableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResult.YamlTableDataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.yaml.YamlTableDataConsistencyCheckResultSwapper; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.SingleTableInventoryCalculateParameter; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.SingleTableInventoryCalculator; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.PipelineSQLException; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.wrapper.SQLWrapperException; +import org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; + +import java.sql.SQLException; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +/** + * Matching table inventory checker. + */ +@Slf4j +@RequiredArgsConstructor +public abstract class MatchingTableInventoryChecker implements TableInventoryChecker { + + private final TableInventoryCheckParameter param; + + private final Set calculators = new HashSet<>(); + + @Override + public TableDataConsistencyCheckResult checkSingleTableInventoryData() { + ThreadFactory threadFactory = ExecutorThreadFactoryBuilder.build("job-" + getJobIdDigest(param.getJobId()) + "-matching-check-%d"); + ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 2, 60, TimeUnit.SECONDS, new ArrayBlockingQueue<>(2), threadFactory); + try { + return checkSingleTableInventoryData(param, executor); + } finally { + executor.shutdown(); + executor.shutdownNow(); + } + } + + private TableDataConsistencyCheckResult checkSingleTableInventoryData(final TableInventoryCheckParameter param, final ThreadPoolExecutor executor) { + SingleTableInventoryCalculateParameter sourceParam = new SingleTableInventoryCalculateParameter(param.getSourceDataSource(), param.getSourceTable(), + param.getColumnNames(), param.getUniqueKeys(), param.getProgressContext().getSourceTableCheckPositions().get(param.getSourceTable().getTableName().getOriginal())); + SingleTableInventoryCalculateParameter targetParam = new SingleTableInventoryCalculateParameter(param.getTargetDataSource(), param.getTargetTable(), + param.getColumnNames(), param.getUniqueKeys(), param.getProgressContext().getTargetTableCheckPositions().get(param.getTargetTable().getTableName().getOriginal())); + SingleTableInventoryCalculator sourceCalculator = buildSingleTableInventoryCalculator(); + calculators.add(sourceCalculator); + SingleTableInventoryCalculator targetCalculator = buildSingleTableInventoryCalculator(); + calculators.add(targetCalculator); + Iterator sourceCalculatedResults = waitFuture(executor.submit(() -> sourceCalculator.calculate(sourceParam))).iterator(); + Iterator targetCalculatedResults = waitFuture(executor.submit(() -> targetCalculator.calculate(targetParam))).iterator(); + try { + return checkSingleTableInventoryData(sourceCalculatedResults, targetCalculatedResults, param, executor); + } finally { + QuietlyCloser.close(sourceParam.getCalculationContext()); + QuietlyCloser.close(targetParam.getCalculationContext()); + calculators.remove(sourceCalculator); + calculators.remove(targetCalculator); + } + } + + private TableDataConsistencyCheckResult checkSingleTableInventoryData(final Iterator sourceCalculatedResults, + final Iterator targetCalculatedResults, + final TableInventoryCheckParameter param, final ThreadPoolExecutor executor) { + YamlTableDataConsistencyCheckResult checkResult = new YamlTableDataConsistencyCheckResult(new YamlTableDataConsistencyCountCheckResult(), new YamlTableDataConsistencyContentCheckResult(true)); + while (sourceCalculatedResults.hasNext() && targetCalculatedResults.hasNext()) { + if (null != param.getReadRateLimitAlgorithm()) { + param.getReadRateLimitAlgorithm().intercept(JobOperationType.SELECT, 1); + } + SingleTableInventoryCalculatedResult sourceCalculatedResult = waitFuture(executor.submit(sourceCalculatedResults::next)); + SingleTableInventoryCalculatedResult targetCalculatedResult = waitFuture(executor.submit(targetCalculatedResults::next)); + checkResult.getCountCheckResult().addRecordsCount(sourceCalculatedResult.getRecordsCount(), true); + checkResult.getCountCheckResult().addRecordsCount(targetCalculatedResult.getRecordsCount(), false); + if (!Objects.equals(sourceCalculatedResult, targetCalculatedResult)) { + checkResult.getContentCheckResult().setMatched(false); + log.info("content matched false, jobId={}, sourceTable={}, targetTable={}, uniqueKeys={}", param.getJobId(), param.getSourceTable(), param.getTargetTable(), param.getUniqueKeys()); + break; + } + if (sourceCalculatedResult.getMaxUniqueKeyValue().isPresent()) { + param.getProgressContext().getSourceTableCheckPositions().put(param.getSourceTable().getTableName().getOriginal(), sourceCalculatedResult.getMaxUniqueKeyValue().get()); + } + if (targetCalculatedResult.getMaxUniqueKeyValue().isPresent()) { + param.getProgressContext().getTargetTableCheckPositions().put(param.getTargetTable().getTableName().getOriginal(), targetCalculatedResult.getMaxUniqueKeyValue().get()); + } + param.getProgressContext().onProgressUpdated(new PipelineJobProgressUpdatedParameter(sourceCalculatedResult.getRecordsCount())); + } + if (sourceCalculatedResults.hasNext()) { + // TODO Refactor SingleTableInventoryCalculatedResult to represent inaccurate number + checkResult.getCountCheckResult().addRecordsCount(1, true); + checkResult.getContentCheckResult().setMatched(false); + return new YamlTableDataConsistencyCheckResultSwapper().swapToObject(checkResult); + } + if (targetCalculatedResults.hasNext()) { + checkResult.getCountCheckResult().addRecordsCount(1, false); + checkResult.getContentCheckResult().setMatched(false); + return new YamlTableDataConsistencyCheckResultSwapper().swapToObject(checkResult); + } + return new YamlTableDataConsistencyCheckResultSwapper().swapToObject(checkResult); + } + + // TODO use digest (crc32, murmurhash) + private String getJobIdDigest(final String jobId) { + return jobId.length() <= 6 ? jobId : jobId.substring(0, 6); + } + + private T waitFuture(final Future future) { + try { + return future.get(); + } catch (final InterruptedException ex) { + Thread.currentThread().interrupt(); + throw new SQLWrapperException(new SQLException(ex)); + } catch (final ExecutionException ex) { + if (ex.getCause() instanceof PipelineSQLException) { + throw (PipelineSQLException) ex.getCause(); + } + throw new SQLWrapperException(new SQLException(ex)); + } + } + + protected abstract SingleTableInventoryCalculator buildSingleTableInventoryCalculator(); + + @Override + public void cancel() { + for (SingleTableInventoryCalculator each : calculators) { + each.cancel(); + } + } + + @Override + public boolean isCanceling() { + return calculators.stream().anyMatch(SingleTableInventoryCalculator::isCanceling); + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataConsistencyCalculateAlgorithm.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java similarity index 59% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataConsistencyCalculateAlgorithm.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java index 3de4ae04a9c3c..010e504f66d3a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataConsistencyCalculateAlgorithm.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyChecker.java @@ -15,47 +15,42 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.algorithm.ShardingSphereAlgorithm; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import java.sql.SQLException; import java.util.Collection; /** - * Data consistency calculate algorithm. + * Table data consistency checker. */ -public interface DataConsistencyCalculateAlgorithm extends ShardingSphereAlgorithm { - - /** - * Calculate data for consistency check. - * - * @param param data consistency calculate parameter - * @return calculated result - */ - Iterable calculate(DataConsistencyCalculateParameter param); +public interface TableDataConsistencyChecker extends ShardingSphereAlgorithm, AutoCloseable { /** - * Cancel calculation. + * Build table inventory checker. * - * @throws SQLException SQL exception if cancel underlying SQL execution failure + * @param param check parameter + * @return table inventory checker */ - void cancel() throws SQLException; + TableInventoryChecker buildTableInventoryChecker(TableInventoryCheckParameter param); /** - * Is calculation canceling. + * Is break on inventory check not matched. * - * @return canceling or not + * @return break or not */ - boolean isCanceling(); + default boolean isBreakOnInventoryCheckNotMatched() { + return true; + } /** * Get supported database types. - * + * * @return supported database types */ Collection getSupportedDatabaseTypes(); + + @Override + void close(); } diff --git a/features/sharding/plugin/nanoid/src/test/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithmTest.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyCheckerFactory.java similarity index 52% rename from features/sharding/plugin/nanoid/src/test/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithmTest.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyCheckerFactory.java index f6620118d01b9..42cec80ad6abf 100644 --- a/features/sharding/plugin/nanoid/src/test/java/org/apache/shardingsphere/sharding/nanoid/algorithm/keygen/NanoIdKeyGenerateAlgorithmTest.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableDataConsistencyCheckerFactory.java @@ -15,19 +15,28 @@ * limitations under the License. */ -package org.apache.shardingsphere.sharding.nanoid.algorithm.keygen; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; -import org.junit.jupiter.api.Test; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; +import java.util.Properties; -class NanoIdKeyGenerateAlgorithmTest { +/** + * Table data consistency checker factory. + */ +@NoArgsConstructor(access = AccessLevel.NONE) +public final class TableDataConsistencyCheckerFactory { - @Test - void assertGenerateKey() { - assertThat(TypedSPILoader.getService(KeyGenerateAlgorithm.class, "NANOID").generateKey().toString().length(), is(21)); + /** + * Build table data consistency checker. + * + * @param algorithmType algorithm type + * @param algorithmProps algorithm properties + * @return table data consistency checker + */ + public static TableDataConsistencyChecker newInstance(final String algorithmType, final Properties algorithmProps) { + return TypedSPILoader.getService(TableDataConsistencyChecker.class, null == algorithmType ? "DATA_MATCH" : algorithmType, algorithmProps); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryCheckParameter.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryCheckParameter.java new file mode 100644 index 0000000000000..c89fe51a300b1 --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryCheckParameter.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; +import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; +import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; + +import java.util.List; + +/** + * Table inventory check parameter. + */ +@RequiredArgsConstructor +@Getter +public final class TableInventoryCheckParameter { + + private final String jobId; + + private final PipelineDataSourceWrapper sourceDataSource; + + private final PipelineDataSourceWrapper targetDataSource; + + private final SchemaTableName sourceTable; + + private final SchemaTableName targetTable; + + private final List columnNames; + + private final List uniqueKeys; + + private final JobRateLimitAlgorithm readRateLimitAlgorithm; + + private final ConsistencyCheckJobItemProgressContext progressContext; +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryChecker.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryChecker.java new file mode 100644 index 0000000000000..5248d0aa14a5d --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/TableInventoryChecker.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineCancellable; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; + +/** + * Table inventory checker. + */ +public interface TableInventoryChecker extends PipelineCancellable { + + /** + * Data consistency check for single table inventory data. + * + * @return check result + */ + TableDataConsistencyCheckResult checkSingleTableInventoryData(); +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractDataConsistencyCalculateAlgorithm.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractSingleTableInventoryCalculator.java similarity index 85% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractDataConsistencyCalculateAlgorithm.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractSingleTableInventoryCalculator.java index 20c4fc4adec41..631c1d3a11de1 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractDataConsistencyCalculateAlgorithm.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractSingleTableInventoryCalculator.java @@ -15,8 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import java.sql.SQLException; @@ -26,10 +27,10 @@ import java.util.concurrent.atomic.AtomicReference; /** - * Abstract data consistency calculate algorithm. + * Abstract single table inventory calculator. */ @Slf4j -public abstract class AbstractDataConsistencyCalculateAlgorithm implements DataConsistencyCalculateAlgorithm { +public abstract class AbstractSingleTableInventoryCalculator implements SingleTableInventoryCalculator { private final AtomicBoolean canceling = new AtomicBoolean(false); @@ -39,15 +40,15 @@ protected final void setCurrentStatement(final Statement statement) { currentStatement.set(statement); } + @SneakyThrows(SQLException.class) @Override - public void cancel() throws SQLException { + public void cancel() { canceling.set(true); Statement statement = currentStatement.get(); if (null == statement || statement.isClosed()) { log.info("cancel, statement is null or closed"); return; } - long startTimeMillis = System.currentTimeMillis(); try { statement.cancel(); } catch (final SQLFeatureNotSupportedException ex) { @@ -57,12 +58,11 @@ public void cancel() throws SQLException { // CHECKSTYLE:ON log.info("cancel failed: {}", ex.getMessage()); } - log.info("cancel cost {} ms", System.currentTimeMillis() - startTimeMillis); } /** * Is canceling. - * + * * @return is canceling or not */ public final boolean isCanceling() { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractStreamingDataConsistencyCalculateAlgorithm.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractStreamingSingleTableInventoryCalculator.java similarity index 67% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractStreamingDataConsistencyCalculateAlgorithm.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractStreamingSingleTableInventoryCalculator.java index e51743e1a0728..d9f945de1726c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/AbstractStreamingDataConsistencyCalculateAlgorithm.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/AbstractStreamingSingleTableInventoryCalculator.java @@ -15,13 +15,12 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; import java.util.Iterator; import java.util.NoSuchElementException; @@ -30,14 +29,14 @@ import java.util.concurrent.atomic.AtomicReference; /** - * Streaming data consistency calculate algorithm. + * Abstract streaming single table inventory calculator. */ @Getter @Slf4j -public abstract class AbstractStreamingDataConsistencyCalculateAlgorithm extends AbstractDataConsistencyCalculateAlgorithm { +public abstract class AbstractStreamingSingleTableInventoryCalculator extends AbstractSingleTableInventoryCalculator { @Override - public final Iterable calculate(final DataConsistencyCalculateParameter param) { + public final Iterable calculate(final SingleTableInventoryCalculateParameter param) { return new ResultIterable(param); } @@ -47,30 +46,30 @@ public final Iterable calculate(final DataConsi * @param param data consistency calculate parameter * @return optional calculated result, empty means there's no more result */ - protected abstract Optional calculateChunk(DataConsistencyCalculateParameter param); + protected abstract Optional calculateChunk(SingleTableInventoryCalculateParameter param); /** * It's not thread-safe, it should be executed in only one thread at the same time. */ @RequiredArgsConstructor - private final class ResultIterable implements Iterable { + private final class ResultIterable implements Iterable { - private final DataConsistencyCalculateParameter param; + private final SingleTableInventoryCalculateParameter param; @Override - public Iterator iterator() { + public Iterator iterator() { return new ResultIterator(param); } } @RequiredArgsConstructor - private final class ResultIterator implements Iterator { + private final class ResultIterator implements Iterator { private final AtomicBoolean currentChunkCalculated = new AtomicBoolean(); - private final AtomicReference> nextResult = new AtomicReference<>(); + private final AtomicReference> nextResult = new AtomicReference<>(); - private final DataConsistencyCalculateParameter param; + private final SingleTableInventoryCalculateParameter param; @Override public boolean hasNext() { @@ -79,9 +78,9 @@ public boolean hasNext() { } @Override - public DataConsistencyCalculatedResult next() { + public SingleTableInventoryCalculatedResult next() { calculateIfNecessary(); - Optional result = nextResult.get(); + Optional result = nextResult.get(); nextResult.set(null); currentChunkCalculated.set(false); return result.orElseThrow(NoSuchElementException::new); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithm.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculator.java similarity index 72% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithm.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculator.java index 99c0ff4191971..cc448a1069bde 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithm.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculator.java @@ -15,21 +15,16 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.sqlbuilder.PipelineDataConsistencyCalculateSQLBuilder; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; import org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException; -import org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedCRC32DataConsistencyCalculateAlgorithmException; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; +import org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedCRC32SingleTableInventoryCalculatorException; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.sql.Connection; import java.sql.PreparedStatement; @@ -37,28 +32,26 @@ import java.sql.SQLException; import java.util.Collection; import java.util.Collections; -import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; /** - * CRC32 match data consistency calculate algorithm. + * CRC32 single table inventory calculator. */ -@SPIDescription("Match CRC32 of records.") @Slf4j -public final class CRC32MatchDataConsistencyCalculateAlgorithm extends AbstractDataConsistencyCalculateAlgorithm { +public final class CRC32SingleTableInventoryCalculator extends AbstractSingleTableInventoryCalculator { @Override - public Iterable calculate(final DataConsistencyCalculateParameter param) { + public Iterable calculate(final SingleTableInventoryCalculateParameter param) { PipelineDataConsistencyCalculateSQLBuilder pipelineSQLBuilder = new PipelineDataConsistencyCalculateSQLBuilder(param.getDatabaseType()); List calculatedItems = param.getColumnNames().stream().map(each -> calculateCRC32(pipelineSQLBuilder, param, each)).collect(Collectors.toList()); return Collections.singletonList(new CalculatedResult(calculatedItems.get(0).getRecordsCount(), calculatedItems.stream().map(CalculatedItem::getCrc32).collect(Collectors.toList()))); } - private CalculatedItem calculateCRC32(final PipelineDataConsistencyCalculateSQLBuilder pipelineSQLBuilder, final DataConsistencyCalculateParameter param, final String columnName) { + private CalculatedItem calculateCRC32(final PipelineDataConsistencyCalculateSQLBuilder pipelineSQLBuilder, final SingleTableInventoryCalculateParameter param, final String columnName) { Optional sql = pipelineSQLBuilder.buildCRC32SQL(param.getSchemaName(), param.getLogicTableName(), columnName); - ShardingSpherePreconditions.checkState(sql.isPresent(), () -> new UnsupportedCRC32DataConsistencyCalculateAlgorithmException(param.getDatabaseType())); + ShardingSpherePreconditions.checkState(sql.isPresent(), () -> new UnsupportedCRC32SingleTableInventoryCalculatorException(param.getDatabaseType())); try ( Connection connection = param.getDataSource().getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sql.get()); @@ -73,20 +66,6 @@ private CalculatedItem calculateCRC32(final PipelineDataConsistencyCalculateSQLB } } - @Override - public String getType() { - return "CRC32_MATCH"; - } - - @Override - public Collection getSupportedDatabaseTypes() { - Collection result = new LinkedList<>(); - DatabaseType supportedDatabaseType = TypedSPILoader.getService(DatabaseType.class, "MySQL"); - result.add(supportedDatabaseType); - result.addAll(new DatabaseTypeRegistry(supportedDatabaseType).getAllBranchDatabaseTypes()); - return result; - } - @RequiredArgsConstructor @Getter private static final class CalculatedItem { @@ -98,7 +77,7 @@ private static final class CalculatedItem { @RequiredArgsConstructor @Getter - private static final class CalculatedResult implements DataConsistencyCalculatedResult { + private static final class CalculatedResult implements SingleTableInventoryCalculatedResult { private final int recordsCount; @@ -113,7 +92,7 @@ public boolean equals(final Object o) { return true; } if (getClass() != o.getClass()) { - log.warn("DataMatchCalculatedResult type not match, o.className={}", o.getClass().getName()); + log.warn("RecordSingleTableInventoryCalculatedResult type not match, o.className={}", o.getClass().getName()); return false; } final CalculatedResult that = (CalculatedResult) o; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CalculationContext.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CalculationContext.java new file mode 100644 index 0000000000000..a1183457834ba --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CalculationContext.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; + +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Calculation context. + */ +@RequiredArgsConstructor +public final class CalculationContext implements AutoCloseable { + + private final AtomicReference connection = new AtomicReference<>(); + + private final AtomicReference preparedStatement = new AtomicReference<>(); + + private final AtomicReference resultSet = new AtomicReference<>(); + + /** + * Get connection. + * + * @return connection + */ + public Connection getConnection() { + return connection.get(); + } + + /** + * Set connection. + * + * @param connection connection + */ + public void setConnection(final Connection connection) { + this.connection.set(connection); + } + + /** + * Get result set. + * + * @return result set + */ + public ResultSet getResultSet() { + return resultSet.get(); + } + + /** + * Set prepared statement. + * + * @param preparedStatement prepared statement + */ + public void setPreparedStatement(final PreparedStatement preparedStatement) { + this.preparedStatement.set(preparedStatement); + } + + /** + * Set result set. + * + * @param resultSet result set + */ + public void setResultSet(final ResultSet resultSet) { + this.resultSet.set(resultSet); + } + + @Override + public void close() { + QuietlyCloser.close(resultSet.get()); + QuietlyCloser.close(preparedStatement.get()); + QuietlyCloser.close(connection.get()); + } +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithm.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java similarity index 56% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithm.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java index 4741d555a7236..9892cdf5ea125 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithm.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculator.java @@ -15,25 +15,20 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; -import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.query.JDBCStreamQueryBuilder; import org.apache.shardingsphere.data.pipeline.common.sqlbuilder.PipelineDataConsistencyCalculateSQLBuilder; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataMatchCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.RecordSingleTableInventoryCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; import org.apache.shardingsphere.data.pipeline.core.dumper.ColumnValueReaderEngine; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.PipelineSQLException; import org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException; import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; -import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.PipelineSQLException; +import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import java.sql.Connection; import java.sql.PreparedStatement; @@ -42,62 +37,38 @@ import java.sql.SQLException; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.LinkedList; +import java.util.List; +import java.util.Map; import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicReference; /** - * Data match data consistency calculate algorithm. + * Record single table inventory calculator. */ -@SPIDescription("Match raw data of records.") +@RequiredArgsConstructor @Slf4j -public final class DataMatchDataConsistencyCalculateAlgorithm extends AbstractStreamingDataConsistencyCalculateAlgorithm { - - private static final String CHUNK_SIZE_KEY = "chunk-size"; - - private static final int DEFAULT_CHUNK_SIZE = 1000; - - private int chunkSize; - - @Override - public void init(final Properties props) { - chunkSize = getChunkSize(props); - } +public final class RecordSingleTableInventoryCalculator extends AbstractStreamingSingleTableInventoryCalculator { - private int getChunkSize(final Properties props) { - int result; - try { - result = Integer.parseInt(props.getProperty(CHUNK_SIZE_KEY, Integer.toString(DEFAULT_CHUNK_SIZE))); - } catch (final NumberFormatException ignore) { - log.warn("'chunk-size' is not a valid number, use default value {}", DEFAULT_CHUNK_SIZE); - return DEFAULT_CHUNK_SIZE; - } - if (result <= 0) { - log.warn("Invalid 'chunk-size': {}, use default value {}", result, DEFAULT_CHUNK_SIZE); - return DEFAULT_CHUNK_SIZE; - } - return result; - } + private final int chunkSize; @Override - public Optional calculateChunk(final DataConsistencyCalculateParameter param) { + public Optional calculateChunk(final SingleTableInventoryCalculateParameter param) { CalculationContext calculationContext = getOrCreateCalculationContext(param); try { - Collection> records = new LinkedList<>(); + List> records = new LinkedList<>(); Object maxUniqueKeyValue = null; ColumnValueReaderEngine columnValueReaderEngine = new ColumnValueReaderEngine(param.getDatabaseType()); ResultSet resultSet = calculationContext.getResultSet(); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); while (resultSet.next()) { ShardingSpherePreconditions.checkState(!isCanceling(), () -> new PipelineTableDataConsistencyCheckLoadingFailedException(param.getSchemaName(), param.getLogicTableName())); - ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - int columnCount = resultSetMetaData.getColumnCount(); - Collection columnRecord = new LinkedList<>(); - for (int columnIndex = 1; columnIndex <= columnCount; columnIndex++) { - columnRecord.add(columnValueReaderEngine.read(resultSet, resultSetMetaData, columnIndex)); + Map columnRecord = new LinkedHashMap<>(); + for (int columnIndex = 1, columnCount = resultSetMetaData.getColumnCount(); columnIndex <= columnCount; columnIndex++) { + columnRecord.put(resultSetMetaData.getColumnLabel(columnIndex), columnValueReaderEngine.read(resultSet, resultSetMetaData, columnIndex)); } records.add(columnRecord); - maxUniqueKeyValue = columnValueReaderEngine.read(resultSet, resultSetMetaData, param.getUniqueKey().getOrdinalPosition()); + maxUniqueKeyValue = columnValueReaderEngine.read(resultSet, resultSetMetaData, param.getFirstUniqueKey().getOrdinalPosition()); if (records.size() == chunkSize) { break; } @@ -105,7 +76,7 @@ public Optional calculateChunk(final DataConsis if (records.isEmpty()) { calculationContext.close(); } - return records.isEmpty() ? Optional.empty() : Optional.of(new DataMatchCalculatedResult(maxUniqueKeyValue, records)); + return records.isEmpty() ? Optional.empty() : Optional.of(new RecordSingleTableInventoryCalculatedResult(maxUniqueKeyValue, records)); } catch (final PipelineSQLException ex) { calculationContext.close(); throw ex; @@ -117,7 +88,7 @@ public Optional calculateChunk(final DataConsis } } - private CalculationContext getOrCreateCalculationContext(final DataConsistencyCalculateParameter param) { + private CalculationContext getOrCreateCalculationContext(final SingleTableInventoryCalculateParameter param) { CalculationContext result = (CalculationContext) param.getCalculationContext(); if (null != result) { return result; @@ -134,14 +105,15 @@ private CalculationContext getOrCreateCalculationContext(final DataConsistencyCa return result; } - private CalculationContext createCalculationContext(final DataConsistencyCalculateParameter param) throws SQLException { + private CalculationContext createCalculationContext(final SingleTableInventoryCalculateParameter param) throws SQLException { Connection connection = param.getDataSource().getConnection(); - CalculationContext result = new CalculationContext(connection); + CalculationContext result = new CalculationContext(); + result.setConnection(connection); param.setCalculationContext(result); return result; } - private void fulfillCalculationContext(final CalculationContext calculationContext, final DataConsistencyCalculateParameter param) throws SQLException { + private void fulfillCalculationContext(final CalculationContext calculationContext, final SingleTableInventoryCalculateParameter param) throws SQLException { String sql = getQuerySQL(param); PreparedStatement preparedStatement = JDBCStreamQueryBuilder.build(param.getDatabaseType(), calculationContext.getConnection(), sql); setCurrentStatement(preparedStatement); @@ -149,76 +121,25 @@ private void fulfillCalculationContext(final CalculationContext calculationConte preparedStatement.setFetchSize(chunkSize); } calculationContext.setPreparedStatement(preparedStatement); - Object tableCheckPosition = param.getTableCheckPosition(); - if (null != tableCheckPosition) { - preparedStatement.setObject(1, tableCheckPosition); - } + setParameters(preparedStatement, param); ResultSet resultSet = preparedStatement.executeQuery(); calculationContext.setResultSet(resultSet); } - private String getQuerySQL(final DataConsistencyCalculateParameter param) { - if (null == param.getUniqueKey()) { + private String getQuerySQL(final SingleTableInventoryCalculateParameter param) { + if (null == param.getFirstUniqueKey()) { throw new UnsupportedOperationException("Data consistency of DATA_MATCH type not support table without unique key and primary key now"); } PipelineDataConsistencyCalculateSQLBuilder pipelineSQLBuilder = new PipelineDataConsistencyCalculateSQLBuilder(param.getDatabaseType()); Collection columnNames = param.getColumnNames().isEmpty() ? Collections.singleton("*") : param.getColumnNames(); boolean firstQuery = null == param.getTableCheckPosition(); - return pipelineSQLBuilder.buildQueryAllOrderingSQL(param.getSchemaName(), param.getLogicTableName(), columnNames, param.getUniqueKey().getName(), firstQuery); - } - - @Override - public String getType() { - return "DATA_MATCH"; + return pipelineSQLBuilder.buildQueryAllOrderingSQL(param.getSchemaName(), param.getLogicTableName(), columnNames, param.getFirstUniqueKey().getName(), firstQuery); } - @Override - public Collection getSupportedDatabaseTypes() { - return ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class); - } - - @RequiredArgsConstructor - private static final class CalculationContext implements AutoCloseable { - - @Getter - private final Connection connection; - - private final AtomicReference preparedStatement = new AtomicReference<>(); - - private final AtomicReference resultSet = new AtomicReference<>(); - - /** - * Get result set. - * - * @return result set - */ - public ResultSet getResultSet() { - return resultSet.get(); - } - - /** - * Set prepared statement. - * - * @param preparedStatement prepared statement - */ - public void setPreparedStatement(final PreparedStatement preparedStatement) { - this.preparedStatement.set(preparedStatement); - } - - /** - * Set result set. - * - * @param resultSet result set - */ - public void setResultSet(final ResultSet resultSet) { - this.resultSet.set(resultSet); - } - - @Override - public void close() { - QuietlyCloser.close(resultSet.get()); - QuietlyCloser.close(preparedStatement.get()); - QuietlyCloser.close(connection); + private void setParameters(final PreparedStatement preparedStatement, final SingleTableInventoryCalculateParameter param) throws SQLException { + Object tableCheckPosition = param.getTableCheckPosition(); + if (null != tableCheckPosition) { + preparedStatement.setObject(1, tableCheckPosition); } } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCalculateParameter.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculateParameter.java similarity index 68% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCalculateParameter.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculateParameter.java index a98f966685fbf..72440ce712587 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataConsistencyCalculateParameter.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculateParameter.java @@ -15,10 +15,11 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; import lombok.Getter; import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; @@ -27,11 +28,11 @@ import java.util.concurrent.atomic.AtomicReference; /** - * Data consistency calculate parameter. + * Single table inventory calculate parameter. */ @RequiredArgsConstructor @Getter -public final class DataConsistencyCalculateParameter { +public final class SingleTableInventoryCalculateParameter { /** * Data source of source side or target side. @@ -39,24 +40,56 @@ public final class DataConsistencyCalculateParameter { */ private final PipelineDataSourceWrapper dataSource; - private final String schemaName; - - private final String logicTableName; + private final SchemaTableName table; private final List columnNames; - private final DatabaseType databaseType; - /** * It could be primary key. * It could be used in order by clause. */ - private final PipelineColumnMetaData uniqueKey; + private final List uniqueKeys; private final Object tableCheckPosition; private final AtomicReference calculationContext = new AtomicReference<>(); + /** + * Get database type. + * + * @return database type + */ + public DatabaseType getDatabaseType() { + return dataSource.getDatabaseType(); + } + + /** + * Get schema name. + * + * @return schema name + */ + public String getSchemaName() { + return table.getSchemaName().getOriginal(); + } + + /** + * Get logic table name. + * + * @return logic table name + */ + public String getLogicTableName() { + return table.getTableName().getOriginal(); + } + + /** + * Get first unique key. + * + * @return first unique key + */ + public PipelineColumnMetaData getFirstUniqueKey() { + return uniqueKeys.get(0); + } + /** * Get calculation context. * diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculator.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculator.java new file mode 100644 index 0000000000000..b108da56e6147 --- /dev/null +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/SingleTableInventoryCalculator.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineCancellable; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; + +/** + * Single table inventory calculator. + */ +public interface SingleTableInventoryCalculator extends PipelineCancellable { + + /** + * Calculate for single table inventory data. + * + * @param param calculate parameter + * @return calculated result + */ + Iterable calculate(SingleTableInventoryCalculateParameter param); +} diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/dumper/ColumnValueReaderEngine.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/dumper/ColumnValueReaderEngine.java index efd2fc274bec9..0756e622e251a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/dumper/ColumnValueReaderEngine.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/dumper/ColumnValueReaderEngine.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.data.pipeline.core.dumper; import org.apache.shardingsphere.data.pipeline.spi.ingest.dumper.DialectColumnValueReader; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -63,13 +63,29 @@ private static Object readStandardValue(final ResultSet resultSet, final ResultS case Types.BOOLEAN: return resultSet.getBoolean(columnIndex); case Types.TINYINT: - return metaData.isSigned(columnIndex) ? resultSet.getByte(columnIndex) : resultSet.getShort(columnIndex); + if (isSigned(metaData, columnIndex)) { + return resultSet.getByte(columnIndex); + } else { + return resultSet.getShort(columnIndex); + } case Types.SMALLINT: - return metaData.isSigned(columnIndex) ? resultSet.getShort(columnIndex) : resultSet.getInt(columnIndex); + if (isSigned(metaData, columnIndex)) { + return resultSet.getShort(columnIndex); + } else { + return resultSet.getInt(columnIndex); + } case Types.INTEGER: - return metaData.isSigned(columnIndex) ? resultSet.getInt(columnIndex) : resultSet.getLong(columnIndex); + if (isSigned(metaData, columnIndex)) { + return resultSet.getInt(columnIndex); + } else { + return resultSet.getLong(columnIndex); + } case Types.BIGINT: - return metaData.isSigned(columnIndex) ? resultSet.getLong(columnIndex) : resultSet.getBigDecimal(columnIndex); + if (isSigned(metaData, columnIndex)) { + return resultSet.getLong(columnIndex); + } else { + return resultSet.getBigDecimal(columnIndex); + } case Types.NUMERIC: case Types.DECIMAL: return resultSet.getBigDecimal(columnIndex); @@ -107,4 +123,8 @@ private static Object readStandardValue(final ResultSet resultSet, final ResultS return resultSet.getObject(columnIndex); } } + + private static boolean isSigned(final ResultSetMetaData metaData, final int columnIndex) throws SQLException { + return metaData.isSigned(columnIndex); + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32DataConsistencyCalculateAlgorithmException.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32SingleTableInventoryCalculatorException.java similarity index 83% rename from kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32DataConsistencyCalculateAlgorithmException.java rename to kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32SingleTableInventoryCalculatorException.java index 9d3970143973e..79ea2048815d2 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32DataConsistencyCalculateAlgorithmException.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/data/UnsupportedCRC32SingleTableInventoryCalculatorException.java @@ -22,13 +22,13 @@ import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; /** - * Unsupported CRC32 data consistency calculate algorithm exception. + * Unsupported CRC32 single table inventory calculator exception. */ -public final class UnsupportedCRC32DataConsistencyCalculateAlgorithmException extends PipelineSQLException { +public final class UnsupportedCRC32SingleTableInventoryCalculatorException extends PipelineSQLException { private static final long serialVersionUID = 580323508713524816L; - public UnsupportedCRC32DataConsistencyCalculateAlgorithmException(final DatabaseType databaseType) { + public UnsupportedCRC32SingleTableInventoryCalculatorException(final DatabaseType databaseType) { super(XOpenSQLState.FEATURE_NOT_SUPPORTED, 53, String.format("Unsupported CRC32 data consistency calculate algorithm with database type `%s`.", databaseType.getType())); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/sink/PipelineDataSourceSink.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/sink/PipelineDataSourceSink.java index d4d8774c7908a..b193b5324f5cf 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/sink/PipelineDataSourceSink.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/importer/sink/PipelineDataSourceSink.java @@ -240,7 +240,7 @@ private void executeUpdate(final Connection connection, final DataRecord dataRec for (int i = 0; i < conditionColumns.size(); i++) { Column keyColumn = conditionColumns.get(i); // TODO There to be compatible with PostgreSQL before value is null except primary key and unsupported updating sharding value now. - if (shardingColumns.contains(keyColumn.getName()) && keyColumn.getOldValue() == null) { + if (shardingColumns.contains(keyColumn.getName()) && null == keyColumn.getOldValue()) { preparedStatement.setObject(setColumns.size() + i + 1, keyColumn.getValue()); continue; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/PipelineJobProgressDetector.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/PipelineJobProgressDetector.java index 2dabee73cf16d..36f8cf3b46ef2 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/PipelineJobProgressDetector.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/PipelineJobProgressDetector.java @@ -21,9 +21,11 @@ import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; +import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.core.task.PipelineTask; import java.util.Collection; +import java.util.Objects; /** * Pipeline job progress detector. @@ -33,15 +35,34 @@ public final class PipelineJobProgressDetector { /** - * All inventory tasks is finished. + * Whether all inventory tasks is finished. * * @param inventoryTasks to check inventory tasks - * @return is finished + * @return finished or not */ - public static boolean allInventoryTasksFinished(final Collection inventoryTasks) { + public static boolean isAllInventoryTasksFinished(final Collection inventoryTasks) { if (inventoryTasks.isEmpty()) { log.warn("inventoryTasks is empty"); } return inventoryTasks.stream().allMatch(each -> each.getTaskProgress().getPosition() instanceof FinishedPosition); } + + /** + * Whether inventory is finished or not. + * + * @param jobShardingCount job sharding count + * @param jobItemProgresses job item progresses + * @return finished or not + */ + public static boolean isInventoryFinished(final int jobShardingCount, final Collection jobItemProgresses) { + return isAllProgressesFilled(jobShardingCount, jobItemProgresses) && isAllInventoryTasksCompleted(jobItemProgresses); + } + + private static boolean isAllProgressesFilled(final int jobShardingCount, final Collection jobItemProgresses) { + return jobShardingCount == jobItemProgresses.size() && jobItemProgresses.stream().allMatch(Objects::nonNull); + } + + private static boolean isAllInventoryTasksCompleted(final Collection jobItemProgresses) { + return jobItemProgresses.stream().flatMap(each -> each.getInventory().getProgresses().values().stream()).allMatch(each -> each.getPosition() instanceof FinishedPosition); + } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java index c071aaed9c01a..478cd13bb6c93 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/progress/persist/PipelineJobProgressPersistService.java @@ -80,11 +80,29 @@ public static void remove(final String jobId) { * @param shardingItem sharding item */ public static void notifyPersist(final String jobId, final int shardingItem) { + getPersistContext(jobId, shardingItem).ifPresent(PipelineJobProgressPersistService::notifyPersist); + } + + private static void notifyPersist(final PipelineJobProgressPersistContext persistContext) { + persistContext.getHasNewEvents().set(true); + } + + private static Optional getPersistContext(final String jobId, final int shardingItem) { Map persistContextMap = JOB_PROGRESS_PERSIST_MAP.getOrDefault(jobId, Collections.emptyMap()); - PipelineJobProgressPersistContext persistContext = persistContextMap.get(shardingItem); - if (null != persistContext) { - persistContext.getHasNewEvents().set(true); - } + return Optional.ofNullable(persistContextMap.get(shardingItem)); + } + + /** + * Persist now. + * + * @param jobId job ID + * @param shardingItem sharding item + */ + public static void persistNow(final String jobId, final int shardingItem) { + getPersistContext(jobId, shardingItem).ifPresent(persistContext -> { + notifyPersist(persistContext); + PersistJobContextRunnable.persist(jobId, shardingItem, persistContext); + }); } private static final class PersistJobContextRunnable implements Runnable { @@ -96,7 +114,7 @@ public void run() { } } - private void persist(final String jobId, final int shardingItem, final PipelineJobProgressPersistContext persistContext) { + private static synchronized void persist(final String jobId, final int shardingItem, final PipelineJobProgressPersistContext persistContext) { Long beforePersistingProgressMillis = persistContext.getBeforePersistingProgressMillis().get(); if ((null == beforePersistingProgressMillis || System.currentTimeMillis() - beforePersistingProgressMillis < TimeUnit.SECONDS.toMillis(DELAY_SECONDS)) && !persistContext.getHasNewEvents().get()) { diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java index e0c1413d86639..523b9e8997704 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/InventoryIncrementalJobAPI.java @@ -19,27 +19,30 @@ import org.apache.shardingsphere.data.pipeline.common.config.job.PipelineJobConfiguration; import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; +import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalProcessContext; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.JobOffsetInfo; import org.apache.shardingsphere.data.pipeline.common.pojo.DataConsistencyCheckAlgorithmInfo; import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import java.sql.SQLException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Properties; /** * Inventory incremental job API. */ public interface InventoryIncrementalJobAPI extends PipelineJobAPI { + @Override + InventoryIncrementalProcessContext buildPipelineProcessContext(PipelineJobConfiguration pipelineJobConfig); + /** * Alter process configuration. * @@ -99,24 +102,15 @@ public interface InventoryIncrementalJobAPI extends PipelineJobAPI { Collection listDataConsistencyCheckAlgorithms(); /** - * Build data consistency calculate algorithm. - * - * @param algorithmType algorithm type - * @param algorithmProps algorithm properties - * @return calculate algorithm - */ - DataConsistencyCalculateAlgorithm buildDataConsistencyCalculateAlgorithm(String algorithmType, Properties algorithmProps); - - /** - * Do data consistency check. + * Build pipeline data consistency checker. * * @param pipelineJobConfig job configuration - * @param calculateAlgorithm calculate algorithm + * @param processContext process context * @param progressContext consistency check job item progress context - * @return each logic table check result + * @return all logic tables check result */ - Map dataConsistencyCheck(PipelineJobConfiguration pipelineJobConfig, DataConsistencyCalculateAlgorithm calculateAlgorithm, - ConsistencyCheckJobItemProgressContext progressContext); + PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(PipelineJobConfiguration pipelineJobConfig, InventoryIncrementalProcessContext processContext, + ConsistencyCheckJobItemProgressContext progressContext); /** * Aggregate data consistency check results. @@ -125,14 +119,15 @@ Map dataConsistencyCheck(PipelineJobConfigur * @param checkResults check results * @return check success or not */ - boolean aggregateDataConsistencyCheckResults(String jobId, Map checkResults); + boolean aggregateDataConsistencyCheckResults(String jobId, Map checkResults); /** * Commit pipeline job. * * @param jobId job ID + * @throws SQLException sql exception */ - void commit(String jobId); + void commit(String jobId) throws SQLException; /** * Rollback pipeline job. diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java index 8c312688b5cad..ecf481e03cb9a 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/job/service/impl/AbstractInventoryIncrementalJobAPIImpl.java @@ -24,7 +24,6 @@ import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfiguration; import org.apache.shardingsphere.data.pipeline.common.config.process.PipelineProcessConfigurationUtils; import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalJobItemContext; -import org.apache.shardingsphere.data.pipeline.common.context.InventoryIncrementalProcessContext; import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.common.context.PipelineJobItemContext; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; @@ -41,10 +40,8 @@ import org.apache.shardingsphere.data.pipeline.common.pojo.TableBasedPipelineJobInfo; import org.apache.shardingsphere.data.pipeline.common.task.progress.IncrementalTaskProgress; import org.apache.shardingsphere.data.pipeline.common.task.progress.InventoryTaskProgress; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; @@ -54,7 +51,6 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import java.util.Collection; @@ -65,7 +61,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; -import java.util.Properties; +import java.util.stream.Collectors; import java.util.stream.IntStream; /** @@ -81,9 +77,6 @@ public abstract class AbstractInventoryIncrementalJobAPIImpl extends AbstractPip private final YamlJobOffsetInfoSwapper jobOffsetInfoSwapper = new YamlJobOffsetInfoSwapper(); - @Override - public abstract InventoryIncrementalProcessContext buildPipelineProcessContext(PipelineJobConfiguration pipelineJobConfig); - @Override public void alterProcessConfiguration(final PipelineContextKey contextKey, final PipelineProcessConfiguration processConfig) { // TODO check rateLimiter type match or not @@ -126,7 +119,7 @@ public List getJobItemInfos(final String jobId) continue; } int inventoryFinishedPercentage = 0; - if (JobStatus.EXECUTE_INCREMENTAL_TASK == jobItemProgress.getStatus()) { + if (JobStatus.EXECUTE_INCREMENTAL_TASK == jobItemProgress.getStatus() || JobStatus.FINISHED == jobItemProgress.getStatus()) { inventoryFinishedPercentage = 100; } else if (0 != jobItemProgress.getProcessedRecordsCount() && 0 != jobItemProgress.getInventoryRecordsCount()) { inventoryFinishedPercentage = (int) Math.min(100, jobItemProgress.getProcessedRecordsCount() * 100 / jobItemProgress.getInventoryRecordsCount()); @@ -209,9 +202,10 @@ public void updateJobItemStatus(final String jobId, final int shardingItem, fina @Override public Collection listDataConsistencyCheckAlgorithms() { Collection result = new LinkedList<>(); - for (DataConsistencyCalculateAlgorithm each : ShardingSphereServiceLoader.getServiceInstances(DataConsistencyCalculateAlgorithm.class)) { + for (TableDataConsistencyChecker each : ShardingSphereServiceLoader.getServiceInstances(TableDataConsistencyChecker.class)) { SPIDescription description = each.getClass().getAnnotation(SPIDescription.class); - result.add(new DataConsistencyCheckAlgorithmInfo(each.getType(), getSupportedDatabaseTypes(each.getSupportedDatabaseTypes()), null == description ? "" : description.value())); + String typeAliases = each.getTypeAliases().stream().map(Object::toString).collect(Collectors.joining(",")); + result.add(new DataConsistencyCheckAlgorithmInfo(each.getType(), typeAliases, getSupportedDatabaseTypes(each.getSupportedDatabaseTypes()), null == description ? "" : description.value())); } return result; } @@ -221,30 +215,12 @@ private Collection getSupportedDatabaseTypes(final Collection dataConsistencyCheck(final PipelineJobConfiguration jobConfig, final DataConsistencyCalculateAlgorithm calculateAlgorithm, - final ConsistencyCheckJobItemProgressContext progressContext) { - String jobId = jobConfig.getJobId(); - PipelineDataConsistencyChecker dataConsistencyChecker = buildPipelineDataConsistencyChecker(jobConfig, buildPipelineProcessContext(jobConfig), progressContext); - Map result = dataConsistencyChecker.check(calculateAlgorithm); - log.info("job {} with check algorithm '{}' data consistency checker result {}", jobId, calculateAlgorithm.getType(), result); - return result; - } - - protected abstract PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(PipelineJobConfiguration pipelineJobConfig, InventoryIncrementalProcessContext processContext, - ConsistencyCheckJobItemProgressContext progressContext); - - @Override - public boolean aggregateDataConsistencyCheckResults(final String jobId, final Map checkResults) { + public boolean aggregateDataConsistencyCheckResults(final String jobId, final Map checkResults) { if (checkResults.isEmpty()) { throw new IllegalArgumentException("checkResults empty, jobId:" + jobId); } - for (Entry entry : checkResults.entrySet()) { - DataConsistencyCheckResult checkResult = entry.getValue(); + for (Entry entry : checkResults.entrySet()) { + TableDataConsistencyCheckResult checkResult = entry.getValue(); if (!checkResult.isMatched()) { return false; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/ShardingSphereStatisticsScheduleCollector.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/ShardingSphereStatisticsScheduleCollector.java index 3aca819ae99f6..478a972202d26 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/ShardingSphereStatisticsScheduleCollector.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/listener/ShardingSphereStatisticsScheduleCollector.java @@ -21,19 +21,19 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorThreadFactoryBuilder; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; -import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereStatistics; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereDatabaseData; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereRowData; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereSchemaData; +import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereStatistics; import org.apache.shardingsphere.infra.metadata.statistics.ShardingSphereTableData; import org.apache.shardingsphere.infra.metadata.statistics.collector.ShardingSphereStatisticsCollector; -import org.apache.shardingsphere.infra.metadata.statistics.event.ShardingSphereSchemaDataAlteredEvent; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; -import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; -import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.yaml.data.swapper.YamlShardingSphereRowDataSwapper; import org.apache.shardingsphere.mode.manager.ContextManager; +import org.apache.shardingsphere.mode.manager.cluster.coordinator.registry.data.event.ShardingSphereSchemaDataAlteredEvent; import java.sql.SQLException; import java.util.ArrayList; diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java index 07781bd456d38..ad7ed42c00707 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/PipelineDataSourcePersistService.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -33,27 +33,27 @@ /** * Pipeline data source persist service. */ -public final class PipelineDataSourcePersistService implements PipelineMetaDataPersistService> { +public final class PipelineDataSourcePersistService implements PipelineMetaDataPersistService> { private final YamlDataSourceConfigurationSwapper swapper = new YamlDataSourceConfigurationSwapper(); @Override @SuppressWarnings("unchecked") - public Map load(final PipelineContextKey contextKey, final JobType jobType) { + public Map load(final PipelineContextKey contextKey, final JobType jobType) { String dataSourcesProps = PipelineAPIFactory.getGovernanceRepositoryAPI(contextKey).getMetaDataDataSources(jobType); if (Strings.isNullOrEmpty(dataSourcesProps)) { return Collections.emptyMap(); } Map> yamlDataSources = YamlEngine.unmarshal(dataSourcesProps, Map.class); - Map result = new LinkedHashMap<>(yamlDataSources.size(), 1F); - yamlDataSources.forEach((key, value) -> result.put(key, swapper.swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size(), 1F); + yamlDataSources.forEach((key, value) -> result.put(key, swapper.swapToDataSourcePoolProperties(value))); return result; } @Override - public void persist(final PipelineContextKey contextKey, final JobType jobType, final Map dataSourcePropsMap) { - Map> dataSourceMap = new LinkedHashMap<>(dataSourcePropsMap.size(), 1F); - for (Entry entry : dataSourcePropsMap.entrySet()) { + public void persist(final PipelineContextKey contextKey, final JobType jobType, final Map propsMap) { + Map> dataSourceMap = new LinkedHashMap<>(propsMap.size(), 1F); + for (Entry entry : propsMap.entrySet()) { dataSourceMap.put(entry.getKey(), swapper.swapToMap(entry.getValue())); } PipelineAPIFactory.getGovernanceRepositoryAPI(contextKey).persistMetaDataDataSources(jobType, YamlEngine.marshal(dataSourceMap)); diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java index 900561d08c3c7..02f46a3992859 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/preparer/PipelineJobPreparerUtils.java @@ -38,7 +38,7 @@ import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.parser.SQLParserEngine; @@ -183,7 +183,7 @@ public static void destroyPosition(final String jobId, final PipelineDataSourceC log.info("Cleanup database type:{}, data source type:{}", databaseType.getType(), pipelineDataSourceConfig.getType()); if (pipelineDataSourceConfig instanceof ShardingSpherePipelineDataSourceConfiguration) { ShardingSpherePipelineDataSourceConfiguration dataSourceConfig = (ShardingSpherePipelineDataSourceConfiguration) pipelineDataSourceConfig; - for (DataSourceProperties each : new YamlDataSourceConfigurationSwapper().getDataSourcePropertiesMap(dataSourceConfig.getRootConfig()).values()) { + for (DataSourcePoolProperties each : new YamlDataSourceConfigurationSwapper().getDataSourcePoolPropertiesMap(dataSourceConfig.getRootConfig()).values()) { try (PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(each), databaseType)) { positionInitializer.destroy(dataSource, jobId); } @@ -193,7 +193,7 @@ public static void destroyPosition(final String jobId, final PipelineDataSourceC StandardPipelineDataSourceConfiguration dataSourceConfig = (StandardPipelineDataSourceConfiguration) pipelineDataSourceConfig; try ( PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper( - DataSourcePoolCreator.create((DataSourceProperties) dataSourceConfig.getDataSourceConfiguration()), databaseType)) { + DataSourcePoolCreator.create((DataSourcePoolProperties) dataSourceConfig.getDataSourceConfiguration()), databaseType)) { positionInitializer.destroy(dataSource, jobId); } } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/PipelineTaskUtils.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/PipelineTaskUtils.java index 8850c511de096..c271c1f86d650 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/PipelineTaskUtils.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/PipelineTaskUtils.java @@ -44,21 +44,21 @@ public final class PipelineTaskUtils { */ public static String generateInventoryTaskId(final InventoryDumperConfiguration inventoryDumperConfig) { String result = String.format("%s.%s", inventoryDumperConfig.getDataSourceName(), inventoryDumperConfig.getActualTableName()); - return null == inventoryDumperConfig.getShardingItem() ? result : result + "#" + inventoryDumperConfig.getShardingItem(); + return result + "#" + inventoryDumperConfig.getShardingItem(); } /** * Create incremental task progress. * * @param position ingest position - * @param jobItemProgress job item progress + * @param initProgress initial job item progress * @return incremental task progress */ - public static IncrementalTaskProgress createIncrementalTaskProgress(final IngestPosition position, final InventoryIncrementalJobItemProgress jobItemProgress) { + public static IncrementalTaskProgress createIncrementalTaskProgress(final IngestPosition position, final InventoryIncrementalJobItemProgress initProgress) { IncrementalTaskProgress result = new IncrementalTaskProgress(position); - if (null != jobItemProgress && null != jobItemProgress.getIncremental()) { - Optional.ofNullable(jobItemProgress.getIncremental().getIncrementalTaskProgress()) - .ifPresent(optional -> result.setIncrementalTaskDelay(jobItemProgress.getIncremental().getIncrementalTaskProgress().getIncrementalTaskDelay())); + if (null != initProgress && null != initProgress.getIncremental()) { + Optional.ofNullable(initProgress.getIncremental().getIncrementalTaskProgress()) + .ifPresent(optional -> result.setIncrementalTaskDelay(initProgress.getIncremental().getIncrementalTaskProgress().getIncrementalTaskDelay())); } return result; } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/TaskExecuteCallback.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/TaskExecuteCallback.java index c4e88b5a79d53..82b5ec4259a15 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/TaskExecuteCallback.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/TaskExecuteCallback.java @@ -37,7 +37,7 @@ public void onSuccess() { @Override public void onFailure(final Throwable throwable) { - log.error("onFailure, task ID={}", task.getTaskId()); + log.error("onFailure, task ID={}", task.getTaskId(), throwable); task.stop(); IOUtils.closeQuietly(task); } diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java index 064a88f958870..d1af5e08d900c 100644 --- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java +++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/task/runner/InventoryIncrementalTasksRunner.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.data.pipeline.common.execute.ExecuteEngine; import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.core.job.progress.persist.PipelineJobProgressPersistService; import org.apache.shardingsphere.infra.util.close.QuietlyCloser; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.progress.PipelineJobProgressDetector; @@ -78,7 +79,7 @@ public void start() { return; } TypedSPILoader.getService(PipelineJobAPI.class, PipelineJobIdUtils.parseJobType(jobItemContext.getJobId()).getType()).persistJobItemProgress(jobItemContext); - if (PipelineJobProgressDetector.allInventoryTasksFinished(inventoryTasks)) { + if (PipelineJobProgressDetector.isAllInventoryTasksFinished(inventoryTasks)) { log.info("All inventory tasks finished."); executeIncrementalTask(); } else { @@ -124,8 +125,9 @@ private synchronized void executeIncrementalTask() { } protected void inventorySuccessCallback() { - if (PipelineJobProgressDetector.allInventoryTasksFinished(inventoryTasks)) { + if (PipelineJobProgressDetector.isAllInventoryTasksFinished(inventoryTasks)) { log.info("onSuccess, all inventory tasks finished."); + PipelineJobProgressPersistService.persistNow(jobItemContext.getJobId(), jobItemContext.getShardingItem()); executeIncrementalTask(); } else { log.info("onSuccess, inventory tasks not finished"); diff --git a/kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm b/kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm deleted file mode 100644 index 3a6d8e369e21f..0000000000000 --- a/kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm +++ /dev/null @@ -1,19 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataMatchDataConsistencyCalculateAlgorithm -org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.CRC32MatchDataConsistencyCalculateAlgorithm diff --git a/features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker similarity index 78% rename from features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker index 7ada14153e1fa..7b79dba22e430 100644 --- a/features/sharding/plugin/cosid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm +++ b/kernel/data-pipeline/core/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker @@ -15,5 +15,5 @@ # limitations under the License. # -org.apache.shardingsphere.sharding.cosid.algorithm.keygen.CosIdKeyGenerateAlgorithm -org.apache.shardingsphere.sharding.cosid.algorithm.keygen.CosIdSnowflakeKeyGenerateAlgorithm +org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.CRC32MatchTableDataConsistencyChecker +org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.DataMatchTableDataConsistencyChecker diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapperTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapperTest.java index 7a61e3bc6ea77..455a2b0dc2d59 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapperTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/PipelineDataSourceWrapperTest.java @@ -110,16 +110,4 @@ void assertSetLogWriterFailure() throws SQLException { doThrow(new SQLException("")).when(dataSource).setLogWriter(printWriter); assertThrows(SQLException.class, () -> new PipelineDataSourceWrapper(dataSource, TypedSPILoader.getService(DatabaseType.class, "FIXTURE")).setLogWriter(printWriter)); } - - @Test - void assertCloseExceptionFailure() throws Exception { - doThrow(new Exception("")).when((AutoCloseable) dataSource).close(); - assertThrows(SQLException.class, () -> new PipelineDataSourceWrapper(dataSource, TypedSPILoader.getService(DatabaseType.class, "FIXTURE")).close()); - } - - @Test - void assertCloseSQLExceptionFailure() throws Exception { - doThrow(new SQLException("")).when((AutoCloseable) dataSource).close(); - assertThrows(SQLException.class, () -> new PipelineDataSourceWrapper(dataSource, TypedSPILoader.getService(DatabaseType.class, "FIXTURE")).close()); - } } diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java index 61e7871021b53..a90da6b619166 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/datasource/config/yaml/YamlJdbcConfigurationTest.java @@ -37,10 +37,10 @@ class YamlJdbcConfigurationTest { @Test void assertConstructionWithUrl() { - assertYamlJdbcConfiguration(YamlEngine.unmarshal(YamlEngine.marshal(getDataSourcePropsWithUrl()), YamlJdbcConfiguration.class)); + assertYamlJdbcConfiguration(YamlEngine.unmarshal(YamlEngine.marshal(getDataSourcePoolPropertiesWithUrl()), YamlJdbcConfiguration.class)); } - private Map getDataSourcePropsWithUrl() { + private Map getDataSourcePoolPropertiesWithUrl() { Map result = new HashMap<>(3, 1F); result.put("url", JDBC_URL); result.put("username", USERNAME); diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgressTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgressTest.java index 823b7aa50d65b..c19e7a466cfaf 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgressTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/InventoryIncrementalJobItemProgressTest.java @@ -19,15 +19,20 @@ import org.apache.shardingsphere.data.pipeline.api.ingest.position.IngestPosition; import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; -import org.apache.shardingsphere.data.pipeline.common.ingest.position.pk.type.IntegerPrimaryKeyPosition; import org.apache.shardingsphere.data.pipeline.common.ingest.position.PlaceholderPosition; +import org.apache.shardingsphere.data.pipeline.common.ingest.position.pk.type.IntegerPrimaryKeyPosition; +import org.apache.shardingsphere.data.pipeline.common.ingest.position.pk.type.StringPrimaryKeyPosition; +import org.apache.shardingsphere.data.pipeline.common.ingest.position.pk.type.UnsupportedKeyPosition; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlInventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlInventoryIncrementalJobItemProgressSwapper; +import org.apache.shardingsphere.data.pipeline.common.task.progress.InventoryTaskProgress; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.test.util.ConfigurationFileUtils; import org.junit.jupiter.api.Test; +import java.util.HashMap; +import java.util.Map; import java.util.Optional; import static org.hamcrest.CoreMatchers.instanceOf; @@ -60,10 +65,10 @@ void assertGetIncrementalPosition() { @Test void assertGetInventoryPosition() { InventoryIncrementalJobItemProgress actual = getJobItemProgress(ConfigurationFileUtils.readFile("job-progress.yaml")); - assertThat(actual.getInventory().getInventoryPosition("ds0").size(), is(2)); - assertThat(actual.getInventory().getInventoryPosition("ds0").get("ds0.t_1"), instanceOf(FinishedPosition.class)); - assertThat(actual.getInventory().getInventoryPosition("ds1").get("ds1.t_1"), instanceOf(PlaceholderPosition.class)); - assertThat(actual.getInventory().getInventoryPosition("ds1").get("ds1.t_2"), instanceOf(IntegerPrimaryKeyPosition.class)); + assertThat(actual.getInventory().getInventoryPosition("t_1").get("ds0.t_1#1"), instanceOf(FinishedPosition.class)); + assertThat(actual.getInventory().getInventoryPosition("t_1").get("ds1.t_1#1"), instanceOf(PlaceholderPosition.class)); + assertThat(actual.getInventory().getInventoryPosition("t_2").get("ds0.t_2#2"), instanceOf(FinishedPosition.class)); + assertThat(actual.getInventory().getInventoryPosition("t_2").get("ds1.t_2#2"), instanceOf(IntegerPrimaryKeyPosition.class)); } @Test @@ -76,6 +81,26 @@ void assertGetIncrementalDataLatestActiveTimeMillis() { assertThat(getJobItemProgress(ConfigurationFileUtils.readFile("job-progress-all-finished.yaml")).getIncremental().getIncrementalLatestActiveTimeMillis(), is(50L)); } + @Test + void assertGetProgressesCorrectly() { + Map progresses = new HashMap<>(); + progresses.put("ds.order_item#0", new InventoryTaskProgress(new IntegerPrimaryKeyPosition(1, 100))); + progresses.put("ds.order_item#1", new InventoryTaskProgress(new UnsupportedKeyPosition())); + progresses.put("ds.order#0", new InventoryTaskProgress(new FinishedPosition())); + progresses.put("ds.test_order#0", new InventoryTaskProgress(new StringPrimaryKeyPosition("1", "100"))); + JobItemInventoryTasksProgress progress = new JobItemInventoryTasksProgress(progresses); + Map orderPosition = progress.getInventoryPosition("order"); + assertThat(orderPosition.size(), is(1)); + assertThat(orderPosition.get("ds.order#0"), instanceOf(FinishedPosition.class)); + Map testOrderPosition = progress.getInventoryPosition("test_order"); + assertThat(testOrderPosition.size(), is(1)); + assertThat(testOrderPosition.get("ds.test_order#0"), instanceOf(StringPrimaryKeyPosition.class)); + Map orderItemPosition = progress.getInventoryPosition("order_item"); + assertThat(orderItemPosition.size(), is(2)); + assertThat(orderItemPosition.get("ds.order_item#0"), instanceOf(IntegerPrimaryKeyPosition.class)); + assertThat(orderItemPosition.get("ds.order_item#1"), instanceOf(UnsupportedKeyPosition.class)); + } + private InventoryIncrementalJobItemProgress getJobItemProgress(final String data) { return SWAPPER.swapToObject(YamlEngine.unmarshal(data, YamlInventoryIncrementalJobItemProgress.class)); } diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapperTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapperTest.java index 109080d638a27..31eac91f4f885 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapperTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/common/job/progress/yaml/YamlInventoryIncrementalJobItemProgressSwapperTest.java @@ -38,10 +38,10 @@ void assertFullSwapToYamlConfiguration() { assertThat(actual.getSourceDatabaseType(), is("H2")); assertThat(actual.getDataSourceName(), is("ds_0")); assertThat(actual.getInventory().getFinished().length, is(2)); - assertThat(actual.getInventory().getFinished(), is(new String[]{"ds0.t_2", "ds0.t_1"})); + assertThat(actual.getInventory().getFinished(), is(new String[]{"ds0.t_2#2", "ds0.t_1#1"})); assertThat(actual.getInventory().getUnfinished().size(), is(2)); - assertThat(actual.getInventory().getUnfinished().get("ds1.t_2"), is("i,1,2")); - assertThat(actual.getInventory().getUnfinished().get("ds1.t_1"), is("")); + assertThat(actual.getInventory().getUnfinished().get("ds1.t_2#2"), is("i,1,2")); + assertThat(actual.getInventory().getUnfinished().get("ds1.t_1#1"), is("")); assertThat(actual.getIncremental().getPosition().length(), is(0)); } diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/ConsistencyCheckDataBuilder.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/ConsistencyCheckDataBuilder.java new file mode 100644 index 0000000000000..9284bdff825cc --- /dev/null +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/ConsistencyCheckDataBuilder.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.LinkedHashMap; +import java.util.Map; + +public final class ConsistencyCheckDataBuilder { + + /** + * Build fixed full type record. + * + * @param id id + * @return built record + */ + public static Map buildFixedFullTypeRecord(final int id) { + Map result = new LinkedHashMap<>(); + result.put("id", id); + result.put("c_bool", true); + result.put("c_int1", Byte.MAX_VALUE); + result.put("c_int2", Short.MAX_VALUE); + result.put("c_int4", Integer.MAX_VALUE); + result.put("c_int8", Long.MAX_VALUE); + result.put("c_float", 1.23F); + result.put("c_double", 2.3456D); + result.put("c_decimal", BigDecimal.valueOf(1.23456789D)); + result.put("c_varchar", "ok"); + result.put("c_time", new Time(123456789L)); + result.put("c_date", new Date(123456789L)); + result.put("c_timestamp", new Timestamp(123456789L)); + result.put("c_array", new int[]{1, 2, 3}); + result.put("c_blob", null); + return result; + } + + /** + * Modify column value randomly. + * + * @param record record + * @param key which key will be modified + * @return original record + */ + public static Map modifyColumnValueRandomly(final Map record, final String key) { + Object value = record.get(key); + record.put(key, getModifiedValue(value)); + return record; + } + + private static Object getModifiedValue(final Object value) { + if (null == value) { + return new Object(); + } + if (value instanceof Boolean) { + return !((Boolean) value); + } + if (value instanceof Byte) { + return (byte) ((Byte) value - 1); + } + if (value instanceof Short) { + return (short) ((Short) value - 1); + } + if (value instanceof Integer) { + return (Integer) value - 1; + } + if (value instanceof Long) { + return (Long) value - 1L; + } + if (value instanceof Float) { + return (Float) value - 1F; + } + if (value instanceof Double) { + return (Double) value - 1D; + } + if (value instanceof BigDecimal) { + return ((BigDecimal) value).subtract(BigDecimal.ONE); + } + if (value instanceof String) { + return value + "-"; + } + if (value instanceof Time) { + return new Time(((Time) value).getTime() - 1); + } + if (value instanceof Date) { + return new Date(((Date) value).getTime() - 1); + } + if (value instanceof Timestamp) { + return new Timestamp(((Timestamp) value).getTime() - 1); + } + if (value instanceof int[]) { + int[] result = ((int[]) value).clone(); + result[0] = result[0] - 1; + return result; + } + return value; + } +} diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataMatchCalculatedResultTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataMatchCalculatedResultTest.java deleted file mode 100644 index 49948981762a2..0000000000000 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/DataMatchCalculatedResultTest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.data.pipeline.core.consistencycheck; - -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataMatchCalculatedResult; -import org.junit.jupiter.api.Test; - -import java.math.BigDecimal; -import java.math.RoundingMode; -import java.sql.Date; -import java.sql.Time; -import java.sql.Timestamp; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertNotEquals; - -class DataMatchCalculatedResultTest { - - @Test - void assertEmptyRecordsEquals() { - DataMatchCalculatedResult actual = new DataMatchCalculatedResult(0, Collections.emptyList()); - DataMatchCalculatedResult expected = new DataMatchCalculatedResult(0, Collections.emptyList()); - assertThat(actual, is(expected)); - } - - @Test - void assertFullTypeRecordsEquals() { - DataMatchCalculatedResult actual = new DataMatchCalculatedResult(1000, Arrays.asList(buildFixedFullTypeRecord(), buildFixedFullTypeRecord())); - DataMatchCalculatedResult expected = new DataMatchCalculatedResult(1000, Arrays.asList(buildFixedFullTypeRecord(), buildFixedFullTypeRecord())); - assertThat(actual, is(expected)); - } - - private List buildFixedFullTypeRecord() { - return Arrays.asList(true, Byte.MAX_VALUE, Short.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE, 1.23F, 2.3456D, BigDecimal.valueOf(1.23456789D), "ok", - new Time(123456789L), new Date(123456789L), new Timestamp(123456789L), new int[]{1, 2, 3}, null); - } - - @Test - void assertFullTypeRecordsEqualsWithDifferentDecimalScale() { - DataMatchCalculatedResult expected = new DataMatchCalculatedResult(1000, Collections.singleton(buildFixedFullTypeRecord())); - List record = buildFixedFullTypeRecord(); - for (int index = 0; index < record.size(); index++) { - if (record.get(index) instanceof BigDecimal) { - BigDecimal decimal = (BigDecimal) record.get(index); - record.set(index, decimal.setScale(decimal.scale() + 1, RoundingMode.CEILING)); - } - } - DataMatchCalculatedResult actual = new DataMatchCalculatedResult(1000, Collections.singleton(record)); - assertThat(actual, is(expected)); - } - - @Test - void assertRecordsCountNotEquals() { - DataMatchCalculatedResult result1 = new DataMatchCalculatedResult(1000, Collections.singleton(Collections.singleton(buildFixedFullTypeRecord()))); - DataMatchCalculatedResult result2 = new DataMatchCalculatedResult(1000, Collections.emptyList()); - assertNotEquals(result1, result2); - } - - @Test - void assertMaxUniqueKeyValueNotEquals() { - DataMatchCalculatedResult result1 = new DataMatchCalculatedResult(1000, Collections.singleton(Collections.singleton(buildFixedFullTypeRecord()))); - DataMatchCalculatedResult result2 = new DataMatchCalculatedResult(1001, Collections.singleton(Collections.singleton(buildFixedFullTypeRecord()))); - assertNotEquals(result1, result2); - } - - @Test - void assertRandomColumnValueNotEquals() { - List record = buildFixedFullTypeRecord(); - DataMatchCalculatedResult result1 = new DataMatchCalculatedResult(1000, Collections.singleton(record)); - for (int index = 0; index < record.size(); index++) { - DataMatchCalculatedResult result2 = new DataMatchCalculatedResult(1000, Collections.singleton(modifyColumnValueRandomly(buildFixedFullTypeRecord(), index))); - assertNotEquals(result1, result2); - } - } - - private List modifyColumnValueRandomly(final List record, final int index) { - Object value = record.get(index); - record.set(index, getModifiedValue(value)); - return record; - } - - private Object getModifiedValue(final Object value) { - if (null == value) { - return new Object(); - } - if (value instanceof Boolean) { - return !((Boolean) value); - } - if (value instanceof Byte) { - return (Byte) value - 1; - } - if (value instanceof Short) { - return (Short) value - 1; - } - if (value instanceof Integer) { - return (Integer) value - 1; - } - if (value instanceof Long) { - return (Long) value - 1; - } - if (value instanceof Float) { - return (Float) value - 1; - } - if (value instanceof Double) { - return (Double) value - 1; - } - if (value instanceof BigDecimal) { - return ((BigDecimal) value).subtract(BigDecimal.ONE); - } - if (value instanceof String) { - return value + "-"; - } - if (value instanceof Time) { - return new Time(((Time) value).getTime() - 1); - } - if (value instanceof Date) { - return new Date(((Date) value).getTime() - 1); - } - if (value instanceof Timestamp) { - return new Timestamp(((Timestamp) value).getTime() - 1); - } - if (value instanceof int[]) { - int[] result = ((int[]) value).clone(); - result[0] = result[0] - 1; - return result; - } - return value; - } -} diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResultTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResultTest.java new file mode 100644 index 0000000000000..7603b8de76256 --- /dev/null +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/result/RecordSingleTableInventoryCalculatedResultTest.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.result; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckDataBuilder; +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + +class RecordSingleTableInventoryCalculatedResultTest { + + @Test + void assertEmptyRecordsEquals() { + RecordSingleTableInventoryCalculatedResult actual = new RecordSingleTableInventoryCalculatedResult(0, Collections.emptyList()); + RecordSingleTableInventoryCalculatedResult expected = new RecordSingleTableInventoryCalculatedResult(0, Collections.emptyList()); + assertThat(actual, is(expected)); + } + + @Test + void assertFullTypeRecordsEquals() { + RecordSingleTableInventoryCalculatedResult actual = new RecordSingleTableInventoryCalculatedResult(1000, Arrays.asList(buildFixedFullTypeRecord(), buildFixedFullTypeRecord())); + RecordSingleTableInventoryCalculatedResult expected = new RecordSingleTableInventoryCalculatedResult(1000, Arrays.asList(buildFixedFullTypeRecord(), buildFixedFullTypeRecord())); + assertThat(actual, is(expected)); + } + + @Test + void assertFullTypeRecordsEqualsWithDifferentDecimalScale() { + RecordSingleTableInventoryCalculatedResult expected = new RecordSingleTableInventoryCalculatedResult(1000, Collections.singletonList(buildFixedFullTypeRecord())); + Map record = buildFixedFullTypeRecord(); + record.forEach((key, value) -> { + if (value instanceof BigDecimal) { + BigDecimal decimal = (BigDecimal) value; + record.put(key, decimal.setScale(decimal.scale() + 1, RoundingMode.CEILING)); + } + }); + RecordSingleTableInventoryCalculatedResult actual = new RecordSingleTableInventoryCalculatedResult(1000, Collections.singletonList(record)); + assertThat(actual, is(expected)); + } + + @Test + void assertRecordsCountNotEquals() { + RecordSingleTableInventoryCalculatedResult result1 = new RecordSingleTableInventoryCalculatedResult(1000, Collections.singletonList(buildFixedFullTypeRecord())); + RecordSingleTableInventoryCalculatedResult result2 = new RecordSingleTableInventoryCalculatedResult(1000, Collections.emptyList()); + assertNotEquals(result1, result2); + } + + @Test + void assertMaxUniqueKeyValueNotEquals() { + RecordSingleTableInventoryCalculatedResult result1 = new RecordSingleTableInventoryCalculatedResult(1000, Collections.singletonList(buildFixedFullTypeRecord())); + RecordSingleTableInventoryCalculatedResult result2 = new RecordSingleTableInventoryCalculatedResult(1001, Collections.singletonList(buildFixedFullTypeRecord())); + assertNotEquals(result1, result2); + } + + @Test + void assertRandomColumnValueNotEquals() { + Map record = buildFixedFullTypeRecord(); + RecordSingleTableInventoryCalculatedResult result1 = new RecordSingleTableInventoryCalculatedResult(1000, Collections.singletonList(record)); + record.forEach((key, value) -> { + RecordSingleTableInventoryCalculatedResult result2 = new RecordSingleTableInventoryCalculatedResult(1000, + Collections.singletonList(modifyColumnValueRandomly(buildFixedFullTypeRecord(), key))); + assertNotEquals(result1, result2); + }); + } + + private Map buildFixedFullTypeRecord() { + return ConsistencyCheckDataBuilder.buildFixedFullTypeRecord(1); + } + + private Map modifyColumnValueRandomly(final Map record, final String key) { + return ConsistencyCheckDataBuilder.modifyColumnValueRandomly(record, key); + } +} diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithmTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculatorTest.java similarity index 80% rename from kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithmTest.java rename to kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculatorTest.java index 7a3d27a723eaa..425cb081a4539 100644 --- a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/algorithm/CRC32MatchDataConsistencyCalculateAlgorithmTest.java +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/CRC32SingleTableInventoryCalculatorTest.java @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm; +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; +import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; import org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -39,6 +39,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Iterator; +import java.util.List; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -50,9 +51,9 @@ import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) -class CRC32MatchDataConsistencyCalculateAlgorithmTest { +class CRC32SingleTableInventoryCalculatorTest { - private DataConsistencyCalculateParameter parameter; + private SingleTableInventoryCalculateParameter parameter; @Mock private PipelineDataSourceWrapper pipelineDataSource; @@ -63,8 +64,9 @@ class CRC32MatchDataConsistencyCalculateAlgorithmTest { @BeforeEach void setUp() throws SQLException { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); - PipelineColumnMetaData uniqueKey = new PipelineColumnMetaData(1, "id", Types.INTEGER, "integer", false, true, true); - parameter = new DataConsistencyCalculateParameter(pipelineDataSource, null, "foo_tbl", Arrays.asList("foo_col", "bar_col"), databaseType, uniqueKey, Collections.emptyMap()); + List uniqueKeys = Collections.singletonList(new PipelineColumnMetaData(1, "id", Types.INTEGER, "integer", false, true, true)); + parameter = new SingleTableInventoryCalculateParameter(pipelineDataSource, new SchemaTableName(null, "foo_tbl"), Arrays.asList("foo_col", "bar_col"), uniqueKeys, Collections.emptyMap()); + when(pipelineDataSource.getDatabaseType()).thenReturn(databaseType); when(pipelineDataSource.getConnection()).thenReturn(connection); } @@ -74,7 +76,7 @@ void assertCalculateSuccess() throws SQLException { when(connection.prepareStatement("SELECT CRC32(foo_col) FROM foo_tbl")).thenReturn(preparedStatement0); PreparedStatement preparedStatement1 = mockPreparedStatement(456L, 10); when(connection.prepareStatement("SELECT CRC32(bar_col) FROM foo_tbl")).thenReturn(preparedStatement1); - Iterator actual = new CRC32MatchDataConsistencyCalculateAlgorithm().calculate(parameter).iterator(); + Iterator actual = new CRC32SingleTableInventoryCalculator().calculate(parameter).iterator(); assertThat(actual.next().getRecordsCount(), is(10)); assertFalse(actual.hasNext()); } @@ -91,6 +93,6 @@ private PreparedStatement mockPreparedStatement(final long expectedCRC32Result, @Test void assertCalculateFailed() throws SQLException { when(connection.prepareStatement(anyString())).thenThrow(new SQLException()); - assertThrows(PipelineTableDataConsistencyCheckLoadingFailedException.class, () -> new CRC32MatchDataConsistencyCalculateAlgorithm().calculate(parameter)); + assertThrows(PipelineTableDataConsistencyCheckLoadingFailedException.class, () -> new CRC32SingleTableInventoryCalculator().calculate(parameter)); } } diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/DataMatchTableDataConsistencyCheckerTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/DataMatchTableDataConsistencyCheckerTest.java new file mode 100644 index 0000000000000..01a3dddc40ec2 --- /dev/null +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/DataMatchTableDataConsistencyCheckerTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.DataMatchTableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +class DataMatchTableDataConsistencyCheckerTest { + + @Test + void assertInitSuccess() { + for (String each : Arrays.asList("1", "1000")) { + new DataMatchTableDataConsistencyChecker().init(buildAlgorithmProperties(each)); + } + } + + @Test + void assertInitFailure() { + assertThrows(PipelineInvalidParameterException.class, () -> new DataMatchTableDataConsistencyChecker().init(buildAlgorithmProperties("xyz"))); + for (String each : Arrays.asList("0", "-1")) { + assertThrows(PipelineInvalidParameterException.class, () -> new DataMatchTableDataConsistencyChecker().init(buildAlgorithmProperties(each))); + } + } + + private Properties buildAlgorithmProperties(final String chunkSize) { + Properties result = new Properties(); + result.put("chunk-size", chunkSize); + return result; + } +} diff --git a/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/TableDataConsistencyCheckerFactoryTest.java b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/TableDataConsistencyCheckerFactoryTest.java new file mode 100644 index 0000000000000..0db13255e7fe7 --- /dev/null +++ b/kernel/data-pipeline/core/src/test/java/org/apache/shardingsphere/data/pipeline/core/consistencycheck/table/calculator/TableDataConsistencyCheckerFactoryTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.CRC32MatchTableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.DataMatchTableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyCheckerFactory; +import org.junit.jupiter.api.Test; + +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + +class TableDataConsistencyCheckerFactoryTest { + + @Test + void assertNewInstanceTypeMatched() { + assertInstanceOf(DataMatchTableDataConsistencyChecker.class, TableDataConsistencyCheckerFactory.newInstance(null, new Properties())); + assertInstanceOf(DataMatchTableDataConsistencyChecker.class, TableDataConsistencyCheckerFactory.newInstance("DATA_MATCH", new Properties())); + assertInstanceOf(CRC32MatchTableDataConsistencyChecker.class, TableDataConsistencyCheckerFactory.newInstance("CRC32_MATCH", new Properties())); + } + + @Test + void assertNewInstancesDifferent() { + TableDataConsistencyChecker actual1 = TableDataConsistencyCheckerFactory.newInstance("DATA_MATCH", new Properties()); + TableDataConsistencyChecker actual2 = TableDataConsistencyCheckerFactory.newInstance("DATA_MATCH", new Properties()); + assertNotEquals(actual1, actual2); + } +} diff --git a/kernel/data-pipeline/core/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm b/kernel/data-pipeline/core/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm deleted file mode 100644 index 4cd6bf66a43b5..0000000000000 --- a/kernel/data-pipeline/core/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm +++ /dev/null @@ -1,19 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.CRC32MatchDataConsistencyCalculateAlgorithm -org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataMatchDataConsistencyCalculateAlgorithm diff --git a/kernel/data-pipeline/core/src/test/resources/job-progress.yaml b/kernel/data-pipeline/core/src/test/resources/job-progress.yaml index fa82043980c47..b9267ec1f2db6 100644 --- a/kernel/data-pipeline/core/src/test/resources/job-progress.yaml +++ b/kernel/data-pipeline/core/src/test/resources/job-progress.yaml @@ -23,10 +23,10 @@ incremental: position: '' inventory: finished: - - ds0.t_2 - - ds0.t_1 + - ds0.t_2#2 + - ds0.t_1#1 unfinished: - ds1.t_2: i,1,2 - ds1.t_1: '' + ds1.t_1#1: '' + ds1.t_2#2: i,1,2 sourceDatabaseType: H2 status: RUNNING diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/H2PipelineSQLBuilder.java b/kernel/data-pipeline/dialect/h2/src/main/java/org/apache/shardingsphere/data/pipeline/h2/sqlbuilder/H2PipelineSQLBuilder.java similarity index 94% rename from test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/H2PipelineSQLBuilder.java rename to kernel/data-pipeline/dialect/h2/src/main/java/org/apache/shardingsphere/data/pipeline/h2/sqlbuilder/H2PipelineSQLBuilder.java index e50a85f8c896b..6025f61e06bc2 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/H2PipelineSQLBuilder.java +++ b/kernel/data-pipeline/dialect/h2/src/main/java/org/apache/shardingsphere/data/pipeline/h2/sqlbuilder/H2PipelineSQLBuilder.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.test.it.data.pipeline.core.fixture; +package org.apache.shardingsphere.data.pipeline.h2.sqlbuilder; import org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder; diff --git a/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder b/kernel/data-pipeline/dialect/h2/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder similarity index 90% rename from test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder rename to kernel/data-pipeline/dialect/h2/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder index 582509d7fc440..525cc3bb50722 100644 --- a/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder +++ b/kernel/data-pipeline/dialect/h2/src/main/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.spi.sqlbuilder.DialectPipelineSQLBuilder @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.test.it.data.pipeline.core.fixture.H2PipelineSQLBuilder +org.apache.shardingsphere.data.pipeline.h2.sqlbuilder.H2PipelineSQLBuilder diff --git a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java index 0814054fa88d4..6c5cde7993a4c 100644 --- a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java +++ b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumper.java @@ -47,10 +47,10 @@ import org.apache.shardingsphere.data.pipeline.mysql.ingest.client.MySQLClient; import org.apache.shardingsphere.data.pipeline.mysql.ingest.column.value.MySQLDataTypeHandler; import org.apache.shardingsphere.db.protocol.mysql.packet.binlog.row.column.value.string.MySQLBinaryString; -import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.io.Serializable; @@ -88,10 +88,10 @@ public MySQLIncrementalDumper(final DumperConfiguration dumperConfig, final Inge this.channel = channel; this.metaDataLoader = metaDataLoader; YamlJdbcConfiguration jdbcConfig = ((StandardPipelineDataSourceConfiguration) dumperConfig.getDataSourceConfig()).getJdbcConfig(); - log.info("incremental dump, jdbcUrl={}", jdbcConfig.getUrl()); ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, TypedSPILoader.getService(DatabaseType.class, "MySQL")); ConnectionProperties connectionProps = parser.parse(jdbcConfig.getUrl(), null, null); ConnectInfo connectInfo = new ConnectInfo(generateServerId(), connectionProps.getHostname(), connectionProps.getPort(), jdbcConfig.getUsername(), jdbcConfig.getPassword()); + log.info("incremental dump, jdbcUrl={}, serverId={}, hostname={}, port={}", jdbcConfig.getUrl(), connectInfo.getServerId(), connectInfo.getHost(), connectInfo.getPort()); client = new MySQLClient(connectInfo, dumperConfig.isDecodeWithTX()); catalog = connectionProps.getCatalog(); } @@ -237,6 +237,7 @@ private DataRecord createDataRecord(final String type, final AbstractRowsEvent r String tableName = dumperConfig.getLogicTableName(rowsEvent.getTableName()).getOriginal(); IngestPosition position = new BinlogPosition(rowsEvent.getFileName(), rowsEvent.getPosition(), rowsEvent.getServerId()); DataRecord result = new DataRecord(type, tableName, position, columnCount); + result.setActualTableName(rowsEvent.getTableName()); result.setCommitTime(rowsEvent.getTimestamp() * 1000); return result; } diff --git a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/PasswordEncryption.java b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/PasswordEncryption.java index ef3ac6086b7ab..7fa8fd3028f58 100644 --- a/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/PasswordEncryption.java +++ b/kernel/data-pipeline/dialect/mysql/src/main/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/PasswordEncryption.java @@ -87,7 +87,7 @@ public static byte[] encryptWithSha2(final byte[] password, final byte[] seed) t */ @SneakyThrows(GeneralSecurityException.class) public static byte[] encryptWithRSAPublicKey(final String password, final byte[] seed, final String transformation, final String publicKey) { - byte[] formattedPassword = password != null ? Bytes.concat(password.getBytes(), new byte[]{0}) : new byte[]{0}; + byte[] formattedPassword = null == password ? new byte[]{0} : Bytes.concat(password.getBytes(), new byte[]{0}); return encryptWithRSAPublicKey(xor(formattedPassword, seed, formattedPassword.length), parseRSAPublicKey(publicKey), transformation); } diff --git a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumperTest.java b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumperTest.java index 9a8c137f5bbab..1426037ebdd50 100644 --- a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumperTest.java +++ b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLIncrementalDumperTest.java @@ -31,6 +31,7 @@ import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData; import org.apache.shardingsphere.data.pipeline.common.datasource.DefaultPipelineDataSourceManager; import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceManager; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; import org.apache.shardingsphere.data.pipeline.common.ingest.IngestDataChangeType; import org.apache.shardingsphere.data.pipeline.common.ingest.channel.EmptyAckCallback; import org.apache.shardingsphere.data.pipeline.common.ingest.channel.memory.SimpleMemoryPipelineChannel; @@ -40,7 +41,6 @@ import org.apache.shardingsphere.data.pipeline.mysql.ingest.binlog.event.PlaceholderEvent; import org.apache.shardingsphere.data.pipeline.mysql.ingest.binlog.event.UpdateRowsEvent; import org.apache.shardingsphere.data.pipeline.mysql.ingest.binlog.event.WriteRowsEvent; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -49,7 +49,6 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import javax.sql.DataSource; import java.io.Serializable; import java.lang.reflect.Method; import java.sql.Connection; @@ -76,8 +75,6 @@ @SuppressWarnings("unchecked") class MySQLIncrementalDumperTest { - private final PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); - private DumperConfiguration dumperConfig; private MySQLIncrementalDumper incrementalDumper; @@ -107,8 +104,9 @@ private DumperConfiguration mockDumperConfiguration() { @SneakyThrows(SQLException.class) private void initTableData(final DumperConfiguration dumperConfig) { - DataSource dataSource = new DefaultPipelineDataSourceManager().getDataSource(dumperConfig.getDataSourceConfig()); try ( + PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); + PipelineDataSourceWrapper dataSource = dataSourceManager.getDataSource(dumperConfig.getDataSourceConfig()); Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.execute("DROP TABLE IF EXISTS t_order"); @@ -129,11 +127,6 @@ private List mockOrderColumnsMetaDataList() { return result; } - @AfterEach - void tearDown() { - dataSourceManager.close(); - } - @Test void assertWriteRowsEventWithoutCustomColumns() throws ReflectiveOperationException { assertWriteRowsEvent0(null, 3); diff --git a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLPositionInitializerTest.java b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLPositionInitializerTest.java index 6b9d5c6e434f6..10b7976f75998 100644 --- a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLPositionInitializerTest.java +++ b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/MySQLPositionInitializerTest.java @@ -61,8 +61,8 @@ void setUp() throws SQLException { @Test void assertGetCurrentPosition() throws SQLException { - MySQLPositionInitializer mySQLPositionInitializer = new MySQLPositionInitializer(); - BinlogPosition actual = mySQLPositionInitializer.init(dataSource, ""); + MySQLPositionInitializer positionInitializer = new MySQLPositionInitializer(); + BinlogPosition actual = positionInitializer.init(dataSource, ""); assertThat(actual.getServerId(), is(SERVER_ID)); assertThat(actual.getFilename(), is(LOG_FILE_NAME)); assertThat(actual.getPosition(), is(LOG_POSITION)); diff --git a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/netty/MySQLNegotiateHandlerTest.java b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/netty/MySQLNegotiateHandlerTest.java index 87a71a1d68765..08da85668ea94 100644 --- a/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/netty/MySQLNegotiateHandlerTest.java +++ b/kernel/data-pipeline/dialect/mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/ingest/client/netty/MySQLNegotiateHandlerTest.java @@ -40,10 +40,12 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; +import java.sql.SQLException; + import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -101,7 +103,8 @@ void assertChannelReadOkPacket() throws ReflectiveOperationException { @Test void assertChannelReadErrorPacket() { - MySQLErrPacket errorPacket = new MySQLErrPacket(MySQLVendorError.ER_NO_DB_ERROR); + MySQLErrPacket errorPacket = new MySQLErrPacket( + new SQLException(MySQLVendorError.ER_NO_DB_ERROR.getReason(), MySQLVendorError.ER_NO_DB_ERROR.getSqlState().getValue(), MySQLVendorError.ER_NO_DB_ERROR.getVendorCode())); assertThrows(RuntimeException.class, () -> mysqlNegotiateHandler.channelRead(channelHandlerContext, errorPacket)); } } diff --git a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppTableData.java b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppTableData.java index 9743cd785c4e6..0e5ec3717a3cc 100644 --- a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppTableData.java +++ b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppTableData.java @@ -22,7 +22,7 @@ import lombok.Setter; /** - * Mppdb decoding Gson related class. + * Mppdb decoding json related class. */ @Setter @Getter diff --git a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppdbDecodingPlugin.java b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppdbDecodingPlugin.java index b33f2b2e733c1..19fca8ed5a979 100644 --- a/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppdbDecodingPlugin.java +++ b/kernel/data-pipeline/dialect/opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/ingest/wal/decode/MppdbDecodingPlugin.java @@ -96,7 +96,7 @@ private AbstractWALEvent decodeDataIgnoreTX(final String dataText) { private AbstractRowEvent readTableEvent(final String mppData) { MppTableData mppTableData; - mppTableData = JsonUtils.readValue(mppData, MppTableData.class); + mppTableData = JsonUtils.fromJsonString(mppData, MppTableData.class); AbstractRowEvent result; String rowEventType = mppTableData.getOpType(); switch (rowEventType) { @@ -113,7 +113,7 @@ private AbstractRowEvent readTableEvent(final String mppData) { throw new IngestException("Unknown rowEventType: " + rowEventType); } String[] tableMetaData = mppTableData.getTableName().split("\\."); - result.setDatabaseName(tableMetaData[0]); + result.setSchemaName(tableMetaData[0]); result.setTableName(tableMetaData[1]); return result; } diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLColumnPropertiesAppender.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLColumnPropertiesAppender.java index 01824305842da..ea99972f0bd0d 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLColumnPropertiesAppender.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLColumnPropertiesAppender.java @@ -203,7 +203,7 @@ private String getFullDataType(final Map column) { String namespace = (String) column.get("typnspname"); String typeName = (String) column.get("typname"); Integer numdims = (Integer) column.get("attndims"); - String schema = null != namespace ? namespace : ""; + String schema = null == namespace ? "" : namespace; String name = checkSchemaInName(typeName, schema); if (name.startsWith("_")) { if (null == numdims || 0 == numdims) { diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLIndexSQLGenerator.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLIndexSQLGenerator.java index f9f1a44d19c4b..775e327961c1b 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLIndexSQLGenerator.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ddlgenerator/PostgreSQLIndexSQLGenerator.java @@ -80,8 +80,8 @@ private String doGenerateIndexSql(final Map indexData) { } private Map getIndexData(final Map context, final Map indexNode) { - Collection> indexProperties = fetchIndexProperties(context, indexNode); - Map result = indexProperties.iterator().next(); + Collection> indexProps = fetchIndexProperties(context, indexNode); + Map result = indexProps.iterator().next(); result.put("schema", context.get("schema")); result.put("table", context.get("name")); return result; diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverter.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverter.java index fc909f1eec01c..bd261596024ef 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverter.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverter.java @@ -121,7 +121,8 @@ private DataRecord handleDeleteRowEvent(final DeleteRowEvent event, final Pipeli private DataRecord createDataRecord(final String type, final AbstractRowEvent rowsEvent, final int columnCount) { String tableName = dumperConfig.getLogicTableName(rowsEvent.getTableName()).getOriginal(); - DataRecord result = new DataRecord(type, tableName, new WALPosition(rowsEvent.getLogSequenceNumber()), columnCount); + DataRecord result = new DataRecord(type, rowsEvent.getSchemaName(), tableName, new WALPosition(rowsEvent.getLogSequenceNumber()), columnCount); + result.setActualTableName(rowsEvent.getTableName()); result.setCsn(rowsEvent.getCsn()); return result; } diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/decode/TestDecodingPlugin.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/decode/TestDecodingPlugin.java index 07bf1e4338432..a8c5499c6586b 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/decode/TestDecodingPlugin.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/decode/TestDecodingPlugin.java @@ -81,7 +81,7 @@ private AbstractRowEvent readTableEvent(final ByteBuffer data) { throw new IngestException("Unknown rowEventType: " + rowEventType); } String[] tableMetaData = tableName.split("\\."); - result.setDatabaseName(tableMetaData[0]); + result.setSchemaName(tableMetaData[0]); result.setTableName(tableMetaData[1].substring(0, tableMetaData[1].length() - 1)); return result; } diff --git a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/event/AbstractRowEvent.java b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/event/AbstractRowEvent.java index 6cba665d907e9..bc57fbf4c1851 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/event/AbstractRowEvent.java +++ b/kernel/data-pipeline/dialect/postgresql/src/main/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/event/AbstractRowEvent.java @@ -29,7 +29,7 @@ @ToString(callSuper = true) public abstract class AbstractRowEvent extends AbstractWALEvent { - private String databaseName; + private String schemaName; private String tableName; diff --git a/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverterTest.java b/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverterTest.java index 252e22a8e1d2f..1cd12fdb8b872 100644 --- a/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverterTest.java +++ b/kernel/data-pipeline/dialect/postgresql/src/test/java/org/apache/shardingsphere/data/pipeline/postgresql/ingest/wal/WALEventConverterTest.java @@ -31,6 +31,7 @@ import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData; import org.apache.shardingsphere.data.pipeline.common.datasource.DefaultPipelineDataSourceManager; import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceManager; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; import org.apache.shardingsphere.data.pipeline.common.ingest.IngestDataChangeType; import org.apache.shardingsphere.data.pipeline.common.metadata.loader.StandardPipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.postgresql.ingest.wal.decode.PostgreSQLLogSequenceNumber; @@ -42,13 +43,11 @@ import org.apache.shardingsphere.data.pipeline.postgresql.ingest.wal.event.UpdateRowEvent; import org.apache.shardingsphere.data.pipeline.postgresql.ingest.wal.event.WriteRowEvent; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.internal.configuration.plugins.Plugins; import org.postgresql.replication.LogSequenceNumber; -import javax.sql.DataSource; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.SQLException; @@ -76,8 +75,6 @@ class WALEventConverterTest { private WALEventConverter walEventConverter; - private final PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); - private final LogSequenceNumber logSequenceNumber = LogSequenceNumber.valueOf("0/14EFDB8"); private PipelineTableMetaData pipelineTableMetaData; @@ -85,16 +82,12 @@ class WALEventConverterTest { @BeforeEach void setUp() { dumperConfig = mockDumperConfiguration(); + PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); walEventConverter = new WALEventConverter(dumperConfig, new StandardPipelineTableMetaDataLoader(dataSourceManager.getDataSource(dumperConfig.getDataSourceConfig()))); initTableData(dumperConfig); pipelineTableMetaData = new PipelineTableMetaData("t_order", mockOrderColumnsMetaDataMap(), Collections.emptyList()); } - @AfterEach - void tearDown() { - dataSourceManager.close(); - } - private DumperConfiguration mockDumperConfiguration() { DumperConfiguration result = new DumperConfiguration(); result.setDataSourceConfig(new StandardPipelineDataSourceConfiguration("jdbc:h2:mem:test;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false;MODE=PostgreSQL", "root", "root")); @@ -105,8 +98,9 @@ private DumperConfiguration mockDumperConfiguration() { @SneakyThrows(SQLException.class) private void initTableData(final DumperConfiguration dumperConfig) { - DataSource dataSource = new DefaultPipelineDataSourceManager().getDataSource(dumperConfig.getDataSourceConfig()); try ( + PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); + PipelineDataSourceWrapper dataSource = dataSourceManager.getDataSource(dumperConfig.getDataSourceConfig()); Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.execute("DROP TABLE IF EXISTS t_order"); @@ -140,7 +134,7 @@ void assertWriteRowEventWithCustomColumns() throws ReflectiveOperationException private void assertWriteRowEvent0(final Map> targetTableColumnsMap, final int expectedColumnCount) throws ReflectiveOperationException { dumperConfig.setTargetTableColumnsMap(targetTableColumnsMap); WriteRowEvent rowsEvent = new WriteRowEvent(); - rowsEvent.setDatabaseName(""); + rowsEvent.setSchemaName(""); rowsEvent.setTableName("t_order"); rowsEvent.setAfterRow(Arrays.asList(101, 1, "OK")); Method method = WALEventConverter.class.getDeclaredMethod("handleWriteRowEvent", WriteRowEvent.class, PipelineTableMetaData.class); @@ -207,14 +201,14 @@ void assertUnknownTable() { void assertConvertFailure() { AbstractRowEvent event = new AbstractRowEvent() { }; - event.setDatabaseName(""); + event.setSchemaName(""); event.setTableName("t_order"); assertThrows(UnsupportedSQLOperationException.class, () -> walEventConverter.convert(event)); } private AbstractRowEvent mockWriteRowEvent() { WriteRowEvent result = new WriteRowEvent(); - result.setDatabaseName(""); + result.setSchemaName(""); result.setTableName("t_order"); result.setAfterRow(Arrays.asList("id", "user_id")); return result; @@ -222,7 +216,7 @@ private AbstractRowEvent mockWriteRowEvent() { private AbstractRowEvent mockUpdateRowEvent() { UpdateRowEvent result = new UpdateRowEvent(); - result.setDatabaseName(""); + result.setSchemaName(""); result.setTableName("t_order"); result.setAfterRow(Arrays.asList("id", "user_id")); return result; @@ -230,7 +224,7 @@ private AbstractRowEvent mockUpdateRowEvent() { private AbstractRowEvent mockDeleteRowEvent() { DeleteRowEvent result = new DeleteRowEvent(); - result.setDatabaseName(""); + result.setSchemaName(""); result.setTableName("t_order"); result.setPrimaryKeys(Collections.singletonList("id")); return result; @@ -238,7 +232,7 @@ private AbstractRowEvent mockDeleteRowEvent() { private AbstractRowEvent mockUnknownTableEvent() { WriteRowEvent result = new WriteRowEvent(); - result.setDatabaseName(""); + result.setSchemaName(""); result.setTableName("t_other"); return result; } diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java index 5828efe938154..6b99ab543536c 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutor.java @@ -38,14 +38,14 @@ public final class ShowMigrationCheckAlgorithmsExecutor implements QueryableRALE public Collection getRows(final ShowMigrationCheckAlgorithmsStatement sqlStatement) { InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, "MIGRATION"); return jobAPI.listDataConsistencyCheckAlgorithms().stream().map( - each -> new LocalDataQueryResultRow(each.getType(), + each -> new LocalDataQueryResultRow(each.getType(), each.getTypeAliases(), each.getSupportedDatabaseTypes().stream().map(DatabaseType::getType).collect(Collectors.joining(",")), each.getDescription())) .collect(Collectors.toList()); } @Override public Collection getColumnNames() { - return Arrays.asList("type", "supported_database_types", "description"); + return Arrays.asList("type", "type_aliases", "supported_database_types", "description"); } @Override diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutor.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutor.java index 6b27af0a606a2..c5b117fd9f7db 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutor.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutor.java @@ -49,14 +49,16 @@ public Collection getRows(final ShowMigrationCheckStatu private LocalDataQueryResultRow convert(final ConsistencyCheckJobItemInfo info) { String checkResult = null == info.getCheckSuccess() ? "" : info.getCheckSuccess().toString(); return new LocalDataQueryResultRow(Optional.ofNullable(info.getTableNames()).orElse(""), checkResult, Optional.ofNullable(info.getCheckFailedTableNames()).orElse(""), - String.valueOf(info.getFinishedPercentage()), info.getRemainingSeconds(), - Optional.ofNullable(info.getCheckBeginTime()).orElse(""), Optional.ofNullable(info.getCheckEndTime()).orElse(""), - info.getDurationSeconds(), Optional.ofNullable(info.getErrorMessage()).orElse("")); + info.isActive() ? Boolean.TRUE.toString() : Boolean.FALSE.toString(), + String.valueOf(info.getInventoryFinishedPercentage()), info.getInventoryRemainingSeconds(), info.getIncrementalIdleSeconds(), + Optional.ofNullable(info.getCheckBeginTime()).orElse(""), Optional.ofNullable(info.getCheckEndTime()).orElse(""), info.getDurationSeconds(), + info.getAlgorithmType(), Optional.ofNullable(info.getAlgorithmProps()).orElse(""), Optional.ofNullable(info.getErrorMessage()).orElse("")); } @Override public Collection getColumnNames() { - return Arrays.asList("tables", "result", "check_failed_tables", "finished_percentage", "remaining_seconds", "check_begin_time", "check_end_time", "duration_seconds", "error_message"); + return Arrays.asList("tables", "result", "check_failed_tables", "active", "inventory_finished_percentage", "inventory_remaining_seconds", "incremental_idle_seconds", + "check_begin_time", "check_end_time", "duration_seconds", "algorithm_type", "algorithm_props", "error_message"); } @Override diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java index b18055ded38dc..0c2ec012b5f6f 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/CheckMigrationJobUpdater.java @@ -17,10 +17,15 @@ package org.apache.shardingsphere.migration.distsql.handler.update; +import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; +import org.apache.shardingsphere.data.pipeline.core.job.progress.PipelineJobProgressDetector; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.impl.ConsistencyCheckJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.api.pojo.CreateConsistencyCheckJobParameter; +import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; +import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; import org.apache.shardingsphere.distsql.handler.ral.update.RALUpdater; import org.apache.shardingsphere.distsql.parser.segment.AlgorithmSegment; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.migration.distsql.statement.CheckMigrationStatement; import java.sql.SQLException; @@ -31,14 +36,24 @@ */ public final class CheckMigrationJobUpdater implements RALUpdater { - private final ConsistencyCheckJobAPI jobAPI = new ConsistencyCheckJobAPI(); + private final ConsistencyCheckJobAPI checkJobAPI = new ConsistencyCheckJobAPI(); + + private final MigrationJobAPI migrationJobAPI = new MigrationJobAPI(); @Override public void executeUpdate(final String databaseName, final CheckMigrationStatement sqlStatement) throws SQLException { AlgorithmSegment typeStrategy = sqlStatement.getTypeStrategy(); String algorithmTypeName = null == typeStrategy ? null : typeStrategy.getName(); Properties algorithmProps = null == typeStrategy ? null : typeStrategy.getProps(); - jobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(sqlStatement.getJobId(), algorithmTypeName, algorithmProps)); + String jobId = sqlStatement.getJobId(); + MigrationJobConfiguration jobConfig = migrationJobAPI.getJobConfiguration(jobId); + verifyInventoryFinished(jobConfig); + checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(jobId, algorithmTypeName, algorithmProps, jobConfig.getSourceDatabaseType(), jobConfig.getTargetDatabaseType())); + } + + private void verifyInventoryFinished(final MigrationJobConfiguration jobConfig) { + ShardingSpherePreconditions.checkState(PipelineJobProgressDetector.isInventoryFinished(jobConfig.getJobShardingCount(), migrationJobAPI.getJobProgress(jobConfig).values()), + () -> new PipelineInvalidParameterException("Inventory is not finished.")); } @Override diff --git a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java index c62fa995f6603..3f89bbe8794ec 100644 --- a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java +++ b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/RegisterMigrationSourceStorageUnitUpdater.java @@ -20,14 +20,14 @@ import org.apache.shardingsphere.data.pipeline.common.context.PipelineContextKey; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; import org.apache.shardingsphere.distsql.handler.ral.update.RALUpdater; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.converter.DataSourceSegmentsConverter; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.migration.distsql.statement.RegisterMigrationSourceStorageUnitStatement; @@ -43,7 +43,7 @@ public final class RegisterMigrationSourceStorageUnitUpdater implements RALUpdat private final MigrationJobAPI jobAPI = new MigrationJobAPI(); - private final DataSourcePropertiesValidateHandler validateHandler = new DataSourcePropertiesValidateHandler(); + private final DataSourcePoolPropertiesValidateHandler validateHandler = new DataSourcePoolPropertiesValidateHandler(); @Override public void executeUpdate(final String databaseName, final RegisterMigrationSourceStorageUnitStatement sqlStatement) { @@ -52,9 +52,9 @@ public void executeUpdate(final String databaseName, final RegisterMigrationSour () -> new UnsupportedSQLOperationException("Not currently support add hostname and port, please use url")); URLBasedDataSourceSegment urlBasedDataSourceSegment = (URLBasedDataSourceSegment) dataSources.get(0); DatabaseType databaseType = DatabaseTypeFactory.get(urlBasedDataSourceSegment.getUrl()); - Map sourcePropertiesMap = DataSourceSegmentsConverter.convert(databaseType, dataSources); - validateHandler.validate(sourcePropertiesMap); - jobAPI.addMigrationSourceResources(PipelineContextKey.buildForProxy(), sourcePropertiesMap); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, dataSources); + validateHandler.validate(propsMap); + jobAPI.addMigrationSourceResources(PipelineContextKey.buildForProxy(), propsMap); } @Override diff --git a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutorTest.java b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutorTest.java index c458446260a75..2cca2b0748650 100644 --- a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutorTest.java +++ b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckAlgorithmsExecutorTest.java @@ -31,9 +31,10 @@ class ShowMigrationCheckAlgorithmsExecutorTest { void assertGetColumnNames() { ShowMigrationCheckAlgorithmsExecutor executor = new ShowMigrationCheckAlgorithmsExecutor(); Collection columns = executor.getColumnNames(); - assertThat(columns.size(), is(3)); + assertThat(columns.size(), is(4)); Iterator iterator = columns.iterator(); assertThat(iterator.next(), is("type")); + assertThat(iterator.next(), is("type_aliases")); assertThat(iterator.next(), is("supported_database_types")); assertThat(iterator.next(), is("description")); } diff --git a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutorTest.java b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutorTest.java index 64deb6be45942..4f2c4b66145f1 100644 --- a/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutorTest.java +++ b/kernel/data-pipeline/distsql/handler/src/test/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusExecutorTest.java @@ -32,16 +32,20 @@ class ShowMigrationCheckStatusExecutorTest { @Test void assertGetColumnNames() { Collection columns = executor.getColumnNames(); - assertThat(columns.size(), is(9)); + assertThat(columns.size(), is(13)); Iterator iterator = columns.iterator(); assertThat(iterator.next(), is("tables")); assertThat(iterator.next(), is("result")); assertThat(iterator.next(), is("check_failed_tables")); - assertThat(iterator.next(), is("finished_percentage")); - assertThat(iterator.next(), is("remaining_seconds")); + assertThat(iterator.next(), is("active")); + assertThat(iterator.next(), is("inventory_finished_percentage")); + assertThat(iterator.next(), is("inventory_remaining_seconds")); + assertThat(iterator.next(), is("incremental_idle_seconds")); assertThat(iterator.next(), is("check_begin_time")); assertThat(iterator.next(), is("check_end_time")); assertThat(iterator.next(), is("duration_seconds")); + assertThat(iterator.next(), is("algorithm_type")); + assertThat(iterator.next(), is("algorithm_props")); assertThat(iterator.next(), is("error_message")); } } diff --git a/kernel/data-pipeline/distsql/parser/src/main/java/org/apache/shardingsphere/migration/distsql/parser/core/MigrationDistSQLStatementVisitor.java b/kernel/data-pipeline/distsql/parser/src/main/java/org/apache/shardingsphere/migration/distsql/parser/core/MigrationDistSQLStatementVisitor.java index 995d434f187d9..cef053312ebfc 100644 --- a/kernel/data-pipeline/distsql/parser/src/main/java/org/apache/shardingsphere/migration/distsql/parser/core/MigrationDistSQLStatementVisitor.java +++ b/kernel/data-pipeline/distsql/parser/src/main/java/org/apache/shardingsphere/migration/distsql/parser/core/MigrationDistSQLStatementVisitor.java @@ -36,11 +36,13 @@ import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.ShowMigrationListContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.ShowMigrationSourceStorageUnitsContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.ShowMigrationStatusContext; +import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.SourceTableNameContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.StartMigrationCheckContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.StartMigrationContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.StopMigrationCheckContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.StopMigrationContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.StorageUnitDefinitionContext; +import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.TargetTableNameContext; import org.apache.shardingsphere.distsql.parser.autogen.MigrationDistSQLStatementParser.UnregisterMigrationSourceStorageUnitContext; import org.apache.shardingsphere.distsql.parser.segment.AlgorithmSegment; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; @@ -82,16 +84,21 @@ public final class MigrationDistSQLStatementVisitor extends MigrationDistSQLStat @Override public ASTNode visitMigrateTable(final MigrateTableContext ctx) { - List source = Splitter.on('.').splitToList(getRequiredIdentifierValue(ctx.sourceTableName())); - List target = Splitter.on('.').splitToList(getRequiredIdentifierValue(ctx.targetTableName())); + SourceTargetEntry sourceTargetEntry = buildSourceTargetEntry(ctx.sourceTableName(), ctx.targetTableName()); + return new MigrateTableStatement(Collections.singletonList(sourceTargetEntry), sourceTargetEntry.getTargetDatabaseName()); + } + + private SourceTargetEntry buildSourceTargetEntry(final SourceTableNameContext sourceContext, final TargetTableNameContext targetContext) { + List source = Splitter.on('.').splitToList(getRequiredIdentifierValue(sourceContext)); + List target = Splitter.on('.').splitToList(getRequiredIdentifierValue(targetContext)); String sourceResourceName = source.get(0); String sourceSchemaName = 3 == source.size() ? source.get(1) : null; String sourceTableName = source.get(source.size() - 1); String targetDatabaseName = target.size() > 1 ? target.get(0) : null; String targetTableName = target.get(target.size() - 1); - SourceTargetEntry sourceTargetEntry = new SourceTargetEntry(targetDatabaseName, new DataNode(sourceResourceName, sourceTableName), targetTableName); - sourceTargetEntry.getSource().setSchemaName(sourceSchemaName); - return new MigrateTableStatement(Collections.singletonList(sourceTargetEntry), targetDatabaseName); + SourceTargetEntry result = new SourceTargetEntry(targetDatabaseName, new DataNode(sourceResourceName, sourceTableName), targetTableName); + result.getSource().setSchemaName(sourceSchemaName); + return result; } private String getRequiredIdentifierValue(final ParseTree context) { @@ -152,10 +159,10 @@ public ASTNode visitStorageUnitDefinition(final MigrationDistSQLStatementParser. String user = getIdentifierValue(ctx.user()); String password = null == ctx.password() ? "" : getPassword(ctx.password()); Properties props = getProperties(ctx.propertiesDefinition()); - return null != ctx.urlSource() ? new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), - getIdentifierValue(ctx.urlSource().url()), user, password, props) - : new HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), - getIdentifierValue(ctx.simpleSource().hostname()), ctx.simpleSource().port().getText(), getIdentifierValue(ctx.simpleSource().dbName()), user, password, props); + return null == ctx.urlSource() + ? new HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), + getIdentifierValue(ctx.simpleSource().hostname()), ctx.simpleSource().port().getText(), getIdentifierValue(ctx.simpleSource().dbName()), user, password, props) + : new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(ctx.urlSource().url()), user, password, props); } private String getPassword(final PasswordContext ctx) { diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java index 27a80862a2ff0..3830feca91c56 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/api/impl/CDCJobAPI.java @@ -76,12 +76,11 @@ import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.pojo.YamlRootConfiguration; -import org.apache.shardingsphere.infra.yaml.config.pojo.rule.YamlRuleConfiguration; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; @@ -160,15 +159,14 @@ private YamlCDCJobConfiguration getYamlCDCJobConfiguration(final StreamDataParam } private ShardingSpherePipelineDataSourceConfiguration getDataSourceConfiguration(final ShardingSphereDatabase database) { - Map> dataSourceProps = new HashMap<>(); - for (Entry entry : database.getResourceMetaData().getDataSourcePropsMap().entrySet()) { - dataSourceProps.put(entry.getKey(), dataSourceConfigSwapper.swapToMap(entry.getValue())); + Map> dataSourcePoolProps = new HashMap<>(); + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet()) { + dataSourcePoolProps.put(entry.getKey(), dataSourceConfigSwapper.swapToMap(entry.getValue().getDataSourcePoolProperties())); } YamlRootConfiguration targetRootConfig = new YamlRootConfiguration(); targetRootConfig.setDatabaseName(database.getName()); - targetRootConfig.setDataSources(dataSourceProps); - Collection yamlRuleConfigurations = ruleConfigSwapperEngine.swapToYamlRuleConfigurations(database.getRuleMetaData().getConfigurations()); - targetRootConfig.setRules(yamlRuleConfigurations); + targetRootConfig.setDataSources(dataSourcePoolProps); + targetRootConfig.setRules(ruleConfigSwapperEngine.swapToYamlRuleConfigurations(database.getRuleMetaData().getConfigurations())); return new ShardingSpherePipelineDataSourceConfiguration(targetRootConfig); } @@ -368,8 +366,8 @@ public void rollback(final String jobId) throws SQLException { } @Override - protected PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(final PipelineJobConfiguration pipelineJobConfig, final InventoryIncrementalProcessContext processContext, - final ConsistencyCheckJobItemProgressContext progressContext) { + public PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(final PipelineJobConfiguration pipelineJobConfig, final InventoryIncrementalProcessContext processContext, + final ConsistencyCheckJobItemProgressContext progressContext) { throw new UnsupportedOperationException(); } diff --git a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java index dfa41e4f924f3..f91a67dc1dff0 100644 --- a/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java +++ b/kernel/data-pipeline/scenario/cdc/core/src/main/java/org/apache/shardingsphere/data/pipeline/cdc/core/prepare/CDCJobPreparer.java @@ -25,13 +25,14 @@ import org.apache.shardingsphere.data.pipeline.api.ingest.position.IngestPosition; import org.apache.shardingsphere.data.pipeline.cdc.api.impl.CDCJobAPI; import org.apache.shardingsphere.data.pipeline.cdc.config.task.CDCTaskConfiguration; -import org.apache.shardingsphere.data.pipeline.cdc.context.CDCProcessContext; import org.apache.shardingsphere.data.pipeline.cdc.context.CDCJobItemContext; +import org.apache.shardingsphere.data.pipeline.cdc.context.CDCProcessContext; import org.apache.shardingsphere.data.pipeline.cdc.core.importer.CDCChannelProgressPair; import org.apache.shardingsphere.data.pipeline.cdc.core.importer.CDCImporter; import org.apache.shardingsphere.data.pipeline.cdc.core.task.CDCIncrementalTask; import org.apache.shardingsphere.data.pipeline.cdc.core.task.CDCInventoryTask; import org.apache.shardingsphere.data.pipeline.common.config.ImporterConfiguration; +import org.apache.shardingsphere.data.pipeline.common.ingest.position.FinishedPosition; import org.apache.shardingsphere.data.pipeline.common.job.progress.InventoryIncrementalJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.JobItemIncrementalTasksProgress; import org.apache.shardingsphere.data.pipeline.common.task.progress.IncrementalTaskProgress; @@ -126,7 +127,9 @@ private void initInventoryTasks(final CDCJobItemContext jobItemContext, final At importerConfig.getRateLimitAlgorithm()); jobItemContext.getInventoryTasks().add(new CDCInventoryTask(PipelineTaskUtils.generateInventoryTaskId(each), processContext.getInventoryDumperExecuteEngine(), processContext.getInventoryImporterExecuteEngine(), dumper, importer, position)); - importerUsed.set(true); + if (!(each.getPosition() instanceof FinishedPosition)) { + importerUsed.set(true); + } } log.info("initInventoryTasks cost {} ms", System.currentTimeMillis() - startTimeMillis); } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java index d2d441e2f5e2d..8077fb46dff0f 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java @@ -31,13 +31,15 @@ import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlConsistencyCheckJobItemProgress; import org.apache.shardingsphere.data.pipeline.common.job.progress.yaml.YamlConsistencyCheckJobItemProgressSwapper; -import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.common.job.type.JobCodeRegistry; +import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; import org.apache.shardingsphere.data.pipeline.common.pojo.ConsistencyCheckJobItemInfo; import org.apache.shardingsphere.data.pipeline.common.pojo.PipelineJobInfo; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyCheckerFactory; +import org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedPipelineDatabaseTypeException; import org.apache.shardingsphere.data.pipeline.core.exception.job.ConsistencyCheckJobNotFoundException; import org.apache.shardingsphere.data.pipeline.core.exception.job.UncompletedConsistencyCheckJobExistsException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; @@ -55,6 +57,7 @@ import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context.ConsistencyCheckJobItemContext; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.util.ConsistencyCheckSequence; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; @@ -108,6 +111,7 @@ public String createJobAndStart(final CreateConsistencyCheckJobParameter param) throw new UncompletedConsistencyCheckJobExistsException(latestCheckJobId.get()); } } + verifyPipelineDatabaseType(param); PipelineContextKey contextKey = PipelineJobIdUtils.parseContextKey(parentJobId); String result = marshalJobId(latestCheckJobId.map(s -> new ConsistencyCheckJobId(contextKey, parentJobId, s)).orElseGet(() -> new ConsistencyCheckJobId(contextKey, parentJobId))); repositoryAPI.persistLatestCheckJobId(parentJobId, result); @@ -118,17 +122,24 @@ public String createJobAndStart(final CreateConsistencyCheckJobParameter param) yamlConfig.setParentJobId(parentJobId); yamlConfig.setAlgorithmTypeName(param.getAlgorithmTypeName()); yamlConfig.setAlgorithmProps(param.getAlgorithmProps()); + yamlConfig.setSourceDatabaseType(param.getSourceDatabaseType().getType()); start(new YamlConsistencyCheckJobConfigurationSwapper().swapToObject(yamlConfig)); return result; } + private void verifyPipelineDatabaseType(final CreateConsistencyCheckJobParameter param) { + Collection supportedDatabaseTypes = TableDataConsistencyCheckerFactory.newInstance(param.getAlgorithmTypeName(), param.getAlgorithmProps()).getSupportedDatabaseTypes(); + ShardingSpherePreconditions.checkState(supportedDatabaseTypes.contains(param.getSourceDatabaseType()), () -> new UnsupportedPipelineDatabaseTypeException(param.getSourceDatabaseType())); + ShardingSpherePreconditions.checkState(supportedDatabaseTypes.contains(param.getTargetDatabaseType()), () -> new UnsupportedPipelineDatabaseTypeException(param.getTargetDatabaseType())); + } + /** * Get latest data consistency check result. * * @param parentJobId parent job id * @return latest data consistency check result */ - public Map getLatestDataConsistencyCheckResult(final String parentJobId) { + public Map getLatestDataConsistencyCheckResult(final String parentJobId) { GovernanceRepositoryAPI governanceRepositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(parentJobId)); Optional latestCheckJobId = governanceRepositoryAPI.getLatestCheckJobId(parentJobId); if (!latestCheckJobId.isPresent()) { @@ -149,7 +160,8 @@ private String convertJobItemProgress(final PipelineJobItemContext jobItemContex String tableNames = String.join(",", progressContext.getTableNames()); String ignoredTableNames = String.join(",", progressContext.getIgnoredTableNames()); ConsistencyCheckJobItemProgress jobItemProgress = new ConsistencyCheckJobItemProgress(tableNames, ignoredTableNames, progressContext.getCheckedRecordsCount().get(), - progressContext.getRecordsCount(), progressContext.getCheckBeginTimeMillis(), progressContext.getCheckEndTimeMillis(), progressContext.getTableCheckPositions()); + progressContext.getRecordsCount(), progressContext.getCheckBeginTimeMillis(), progressContext.getCheckEndTimeMillis(), + progressContext.getSourceTableCheckPositions(), progressContext.getTargetTableCheckPositions(), progressContext.getSourceDatabaseType()); jobItemProgress.setStatus(context.getStatus()); return YamlEngine.marshal(swapper.swapToYamlConfiguration(jobItemProgress)); } @@ -246,14 +258,14 @@ public List getJobItemInfos(final String parentJobI Optional latestCheckJobId = governanceRepositoryAPI.getLatestCheckJobId(parentJobId); ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(), () -> new ConsistencyCheckJobNotFoundException(parentJobId)); String checkJobId = latestCheckJobId.get(); - Optional progressOptional = getJobItemProgress(checkJobId, 0); - if (!progressOptional.isPresent()) { + Optional progress = getJobItemProgress(checkJobId, 0); + if (!progress.isPresent()) { return Collections.emptyList(); } List result = new LinkedList<>(); - ConsistencyCheckJobItemProgress jobItemProgress = progressOptional.get(); + ConsistencyCheckJobItemProgress jobItemProgress = progress.get(); if (!Strings.isNullOrEmpty(jobItemProgress.getIgnoredTableNames())) { - Map checkJobResult = governanceRepositoryAPI.getCheckJobResult(parentJobId, latestCheckJobId.get()); + Map checkJobResult = governanceRepositoryAPI.getCheckJobResult(parentJobId, latestCheckJobId.get()); result.addAll(buildIgnoredTableInfo(jobItemProgress.getIgnoredTableNames().split(","), checkJobResult)); } if (Objects.equals(jobItemProgress.getIgnoredTableNames(), jobItemProgress.getTableNames())) { @@ -263,7 +275,7 @@ public List getJobItemInfos(final String parentJobI return result; } - private List buildIgnoredTableInfo(final String[] ignoredTables, final Map checkJobResult) { + private List buildIgnoredTableInfo(final String[] ignoredTables, final Map checkJobResult) { if (null == ignoredTables) { return Collections.emptyList(); } @@ -272,7 +284,7 @@ private List buildIgnoredTableInfo(final String[] i ConsistencyCheckJobItemInfo info = new ConsistencyCheckJobItemInfo(); info.setTableNames(each); info.setCheckSuccess(null); - DataConsistencyCheckResult checkResult = checkJobResult.get(each); + TableDataConsistencyCheckResult checkResult = checkJobResult.get(each); if (null != checkResult && checkResult.isIgnored()) { info.setErrorMessage(checkResult.getIgnoredType().getMessage()); } @@ -286,14 +298,16 @@ private ConsistencyCheckJobItemInfo getJobItemInfo(final String parentJobId) { Optional latestCheckJobId = governanceRepositoryAPI.getLatestCheckJobId(parentJobId); ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(), () -> new ConsistencyCheckJobNotFoundException(parentJobId)); String checkJobId = latestCheckJobId.get(); - Optional progressOptional = getJobItemProgress(checkJobId, 0); + Optional progress = getJobItemProgress(checkJobId, 0); ConsistencyCheckJobItemInfo result = new ConsistencyCheckJobItemInfo(); - if (!progressOptional.isPresent()) { + JobConfigurationPOJO jobConfigPOJO = getElasticJobConfigPOJO(checkJobId); + result.setActive(!jobConfigPOJO.isDisabled()); + if (!progress.isPresent()) { return result; } - ConsistencyCheckJobItemProgress jobItemProgress = progressOptional.get(); + ConsistencyCheckJobItemProgress jobItemProgress = progress.get(); if (null == jobItemProgress.getRecordsCount() || null == jobItemProgress.getCheckedRecordsCount()) { - result.setFinishedPercentage(0); + result.setInventoryFinishedPercentage(0); result.setCheckSuccess(null); return result; } @@ -301,15 +315,14 @@ private ConsistencyCheckJobItemInfo getJobItemInfo(final String parentJobId) { long recordsCount = jobItemProgress.getRecordsCount(); long checkedRecordsCount = Math.min(jobItemProgress.getCheckedRecordsCount(), recordsCount); if (JobStatus.FINISHED == jobItemProgress.getStatus()) { - result.setFinishedPercentage(100); + result.setInventoryFinishedPercentage(100); LocalDateTime checkEndTime = new Timestamp(jobItemProgress.getCheckEndTimeMillis()).toLocalDateTime(); Duration duration = Duration.between(checkBeginTime, checkEndTime); result.setDurationSeconds(duration.getSeconds()); result.setCheckEndTime(DATE_TIME_FORMATTER.format(checkEndTime)); - result.setRemainingSeconds(0L); + result.setInventoryRemainingSeconds(0L); } else if (0 != recordsCount && 0 != checkedRecordsCount) { - result.setFinishedPercentage((int) (checkedRecordsCount * 100 / recordsCount)); - JobConfigurationPOJO jobConfigPOJO = getElasticJobConfigPOJO(checkJobId); + result.setInventoryFinishedPercentage((int) (checkedRecordsCount * 100 / recordsCount)); Long stopTimeMillis = jobConfigPOJO.isDisabled() ? Long.parseLong(jobConfigPOJO.getProps().getProperty("stop_time_millis")) : null; long durationMillis = (null != stopTimeMillis ? stopTimeMillis : System.currentTimeMillis()) - jobItemProgress.getCheckBeginTimeMillis(); result.setDurationSeconds(TimeUnit.MILLISECONDS.toSeconds(durationMillis)); @@ -317,13 +330,18 @@ private ConsistencyCheckJobItemInfo getJobItemInfo(final String parentJobId) { result.setCheckEndTime(DATE_TIME_FORMATTER.format(new Timestamp(stopTimeMillis).toLocalDateTime())); } long remainingMills = Math.max(0, (long) ((recordsCount - checkedRecordsCount) * 1.0D / checkedRecordsCount * durationMillis)); - result.setRemainingSeconds(remainingMills / 1000); + result.setInventoryRemainingSeconds(remainingMills / 1000); } String tableNames = jobItemProgress.getTableNames(); result.setTableNames(Optional.ofNullable(tableNames).orElse("")); result.setCheckBeginTime(DATE_TIME_FORMATTER.format(checkBeginTime)); + ConsistencyCheckJobConfiguration jobConfig = getJobConfiguration(checkJobId); + result.setAlgorithmType(jobConfig.getAlgorithmTypeName()); + if (null != jobConfig.getAlgorithmProps()) { + result.setAlgorithmProps(jobConfig.getAlgorithmProps().entrySet().stream().map(entry -> String.format("'%s'='%s'", entry.getKey(), entry.getValue())).collect(Collectors.joining(","))); + } result.setErrorMessage(getJobItemErrorMessage(checkJobId, 0)); - Map checkJobResult = governanceRepositoryAPI.getCheckJobResult(parentJobId, checkJobId); + Map checkJobResult = governanceRepositoryAPI.getCheckJobResult(parentJobId, checkJobId); if (checkJobResult.isEmpty()) { result.setCheckSuccess(null); } else { diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/pojo/CreateConsistencyCheckJobParameter.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/pojo/CreateConsistencyCheckJobParameter.java index f48442b2ada1f..92413dad07988 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/pojo/CreateConsistencyCheckJobParameter.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/pojo/CreateConsistencyCheckJobParameter.java @@ -19,6 +19,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import java.util.Properties; @@ -34,4 +35,8 @@ public final class CreateConsistencyCheckJobParameter { private final String algorithmTypeName; private final Properties algorithmProps; + + private final DatabaseType sourceDatabaseType; + + private final DatabaseType targetDatabaseType; } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/ConsistencyCheckJobConfiguration.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/ConsistencyCheckJobConfiguration.java index c0aeda4475860..a969417d96fc9 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/ConsistencyCheckJobConfiguration.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/ConsistencyCheckJobConfiguration.java @@ -41,10 +41,7 @@ public final class ConsistencyCheckJobConfiguration implements PipelineJobConfig private final Properties algorithmProps; - @Override - public DatabaseType getSourceDatabaseType() { - throw new UnsupportedOperationException(""); - } + private final DatabaseType sourceDatabaseType; /** * Get job sharding count. diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfiguration.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfiguration.java index 06e840d24381d..3bf659400d3f9 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfiguration.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfiguration.java @@ -38,6 +38,8 @@ public final class YamlConsistencyCheckJobConfiguration implements YamlPipelineJ private Properties algorithmProps; + private String sourceDatabaseType; + @Override public String getDatabaseName() { throw new UnsupportedOperationException(""); diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java index 66736fc384f2e..5431c935f213a 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/config/yaml/YamlConsistencyCheckJobConfigurationSwapper.java @@ -18,6 +18,8 @@ package org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.yaml; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.util.yaml.swapper.YamlConfigurationSwapper; @@ -33,12 +35,14 @@ public YamlConsistencyCheckJobConfiguration swapToYamlConfiguration(final Consis result.setParentJobId(data.getParentJobId()); result.setAlgorithmTypeName(data.getAlgorithmTypeName()); result.setAlgorithmProps(data.getAlgorithmProps()); + result.setSourceDatabaseType(null == data.getSourceDatabaseType() ? null : data.getSourceDatabaseType().getType()); return result; } @Override public ConsistencyCheckJobConfiguration swapToObject(final YamlConsistencyCheckJobConfiguration yamlConfig) { - return new ConsistencyCheckJobConfiguration(yamlConfig.getJobId(), yamlConfig.getParentJobId(), yamlConfig.getAlgorithmTypeName(), yamlConfig.getAlgorithmProps()); + DatabaseType databaseType = null == yamlConfig.getSourceDatabaseType() ? null : TypedSPILoader.getService(DatabaseType.class, yamlConfig.getSourceDatabaseType()); + return new ConsistencyCheckJobConfiguration(yamlConfig.getJobId(), yamlConfig.getParentJobId(), yamlConfig.getAlgorithmTypeName(), yamlConfig.getAlgorithmProps(), databaseType); } /** diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java index fa2bce7cdbad7..5dd7865398b8a 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContext.java @@ -56,10 +56,11 @@ public ConsistencyCheckJobItemContext(final ConsistencyCheckJobConfiguration job jobId = jobConfig.getJobId(); this.shardingItem = shardingItem; this.status = status; - progressContext = new ConsistencyCheckJobItemProgressContext(jobId, shardingItem); + progressContext = new ConsistencyCheckJobItemProgressContext(jobId, shardingItem, jobConfig.getSourceDatabaseType().getType()); if (null != jobItemProgress) { progressContext.getCheckedRecordsCount().set(Optional.ofNullable(jobItemProgress.getCheckedRecordsCount()).orElse(0L)); - Optional.ofNullable(jobItemProgress.getTableCheckPositions()).ifPresent(progressContext.getTableCheckPositions()::putAll); + Optional.ofNullable(jobItemProgress.getSourceTableCheckPositions()).ifPresent(progressContext.getSourceTableCheckPositions()::putAll); + Optional.ofNullable(jobItemProgress.getTargetTableCheckPositions()).ifPresent(progressContext.getTargetTableCheckPositions()::putAll); } processContext = new ConsistencyCheckProcessContext(jobId); } diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java index ec36925cb8907..bfb36635f800d 100644 --- a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java +++ b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/task/ConsistencyCheckTasksRunner.java @@ -26,8 +26,8 @@ import org.apache.shardingsphere.data.pipeline.common.execute.ExecuteEngine; import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.job.type.JobType; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.InventoryIncrementalJobAPI; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; @@ -38,7 +38,6 @@ import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context.ConsistencyCheckJobItemContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import java.sql.SQLException; import java.util.Collections; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -63,7 +62,7 @@ public final class ConsistencyCheckTasksRunner implements PipelineTasksRunner { private final LifecycleExecutor checkExecutor; - private final AtomicReference calculateAlgorithm = new AtomicReference<>(); + private final AtomicReference consistencyChecker = new AtomicReference<>(); public ConsistencyCheckTasksRunner(final ConsistencyCheckJobItemContext jobItemContext) { this.jobItemContext = jobItemContext; @@ -97,22 +96,23 @@ protected void runBlocking() { JobType jobType = PipelineJobIdUtils.parseJobType(parentJobId); InventoryIncrementalJobAPI jobAPI = (InventoryIncrementalJobAPI) TypedSPILoader.getService(PipelineJobAPI.class, jobType.getType()); PipelineJobConfiguration parentJobConfig = jobAPI.getJobConfiguration(parentJobId); - DataConsistencyCalculateAlgorithm calculateAlgorithm = jobAPI.buildDataConsistencyCalculateAlgorithm(checkJobConfig.getAlgorithmTypeName(), checkJobConfig.getAlgorithmProps()); - ConsistencyCheckTasksRunner.this.calculateAlgorithm.set(calculateAlgorithm); - Map dataConsistencyCheckResult; try { - dataConsistencyCheckResult = jobAPI.dataConsistencyCheck(parentJobConfig, calculateAlgorithm, jobItemContext.getProgressContext()); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(parentJobId)).persistCheckJobResult(parentJobId, checkJobId, dataConsistencyCheckResult); + PipelineDataConsistencyChecker checker = jobAPI.buildPipelineDataConsistencyChecker( + parentJobConfig, jobAPI.buildPipelineProcessContext(parentJobConfig), jobItemContext.getProgressContext()); + consistencyChecker.set(checker); + Map checkResultMap = checker.check(checkJobConfig.getAlgorithmTypeName(), checkJobConfig.getAlgorithmProps()); + log.info("job {} with check algorithm '{}' data consistency checker result: {}", parentJobId, checkJobConfig.getAlgorithmTypeName(), checkResultMap); + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineJobIdUtils.parseContextKey(parentJobId)).persistCheckJobResult(parentJobId, checkJobId, checkResultMap); } finally { jobItemContext.getProgressContext().setCheckEndTimeMillis(System.currentTimeMillis()); } } @Override - protected void doStop() throws SQLException { - DataConsistencyCalculateAlgorithm algorithm = calculateAlgorithm.get(); - if (null != algorithm) { - algorithm.cancel(); + protected void doStop() { + PipelineDataConsistencyChecker checker = consistencyChecker.get(); + if (null != checker) { + checker.cancel(); } } } @@ -129,8 +129,8 @@ public void onSuccess() { @Override public void onFailure(final Throwable throwable) { - DataConsistencyCalculateAlgorithm algorithm = calculateAlgorithm.get(); - if (null != algorithm && algorithm.isCanceling()) { + PipelineDataConsistencyChecker checker = consistencyChecker.get(); + if (null != checker && checker.isCanceling()) { log.info("onFailure, canceling, check job id: {}, parent job id: {}", checkJobId, parentJobId); checkJobAPI.stop(checkJobId); return; diff --git a/kernel/data-pipeline/scenario/consistencycheck/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContextTest.java b/kernel/data-pipeline/scenario/consistencycheck/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContextTest.java new file mode 100644 index 0000000000000..b84f69bff9e6e --- /dev/null +++ b/kernel/data-pipeline/scenario/consistencycheck/src/test/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/context/ConsistencyCheckJobItemContextTest.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context; + +import com.google.common.collect.ImmutableMap; +import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; +import org.apache.shardingsphere.data.pipeline.common.job.progress.ConsistencyCheckJobItemProgress; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; +import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertNotSame; + +class ConsistencyCheckJobItemContextTest { + + private static final String TABLE = "t_order"; + + private final DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "H2"); + + @Test + void assertConstructWithoutTableCheckPositions() { + Map sourceTableCheckPositions = Collections.emptyMap(); + Map targetTableCheckPositions = Collections.emptyMap(); + ConsistencyCheckJobItemProgress jobItemProgress = new ConsistencyCheckJobItemProgress(TABLE, null, 0L, 10L, null, null, sourceTableCheckPositions, targetTableCheckPositions, "H2"); + ConsistencyCheckJobItemContext actual = new ConsistencyCheckJobItemContext(new ConsistencyCheckJobConfiguration("", "", "DATA_MATCH", null, databaseType), + 0, JobStatus.RUNNING, jobItemProgress); + verifyProgressContext(actual.getProgressContext(), 0, sourceTableCheckPositions, targetTableCheckPositions); + } + + @Test + void assertConstructWithTableCheckPositions() { + Map sourceTableCheckPositions = ImmutableMap.of(TABLE, 6); + Map targetTableCheckPositions = ImmutableMap.of(TABLE, 5); + ConsistencyCheckJobItemProgress jobItemProgress = new ConsistencyCheckJobItemProgress(TABLE, null, 0L, 10L, null, null, sourceTableCheckPositions, targetTableCheckPositions, "H2"); + ConsistencyCheckJobItemContext actual = new ConsistencyCheckJobItemContext(new ConsistencyCheckJobConfiguration("", "", "DATA_MATCH", null, databaseType), + 0, JobStatus.RUNNING, jobItemProgress); + verifyProgressContext(actual.getProgressContext(), 1, sourceTableCheckPositions, targetTableCheckPositions); + assertThat(actual.getProgressContext().getSourceTableCheckPositions().get(TABLE), is(6)); + assertThat(actual.getProgressContext().getTargetTableCheckPositions().get(TABLE), is(5)); + } + + private void verifyProgressContext(final ConsistencyCheckJobItemProgressContext progressContext, final int expectedSize, + final Map sourceTableCheckPositions, final Map targetTableCheckPositions) { + assertThat(progressContext.getSourceTableCheckPositions().size(), is(expectedSize)); + assertThat(progressContext.getTargetTableCheckPositions().size(), is(expectedSize)); + assertNotSame(progressContext.getSourceTableCheckPositions(), sourceTableCheckPositions); + assertNotSame(progressContext.getTargetTableCheckPositions(), targetTableCheckPositions); + } +} diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java index d04386d1587a2..e6fb8ca124f42 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/impl/MigrationJobAPI.java @@ -17,7 +17,6 @@ package org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl; -import com.google.gson.Gson; import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; import org.apache.shardingsphere.data.pipeline.api.config.TableNameSchemaNameMapping; @@ -26,11 +25,8 @@ import org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.ShardingSpherePipelineDataSourceConfiguration; import org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.StandardPipelineDataSourceConfiguration; import org.apache.shardingsphere.data.pipeline.api.datasource.config.yaml.YamlPipelineDataSourceConfiguration; -import org.apache.shardingsphere.data.pipeline.api.metadata.ActualTableName; import org.apache.shardingsphere.data.pipeline.api.metadata.LogicTableName; -import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaName; import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; -import org.apache.shardingsphere.data.pipeline.api.metadata.TableName; import org.apache.shardingsphere.data.pipeline.common.config.CreateTableConfiguration; import org.apache.shardingsphere.data.pipeline.common.config.CreateTableConfiguration.CreateTableEntry; import org.apache.shardingsphere.data.pipeline.common.config.ImporterConfiguration; @@ -71,6 +67,7 @@ import org.apache.shardingsphere.data.pipeline.scenario.migration.check.consistency.MigrationDataConsistencyChecker; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationTaskConfiguration; +import org.apache.shardingsphere.data.pipeline.scenario.migration.config.ingest.MigrationIncrementalDumperConfigurationCreator; import org.apache.shardingsphere.data.pipeline.scenario.migration.context.MigrationProcessContext; import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; import org.apache.shardingsphere.data.pipeline.yaml.job.YamlMigrationJobConfiguration; @@ -85,13 +82,13 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; import org.apache.shardingsphere.infra.datanode.DataNode; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; -import org.apache.shardingsphere.infra.util.json.JsonUtils; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.infra.yaml.config.pojo.YamlRootConfiguration; -import org.apache.shardingsphere.infra.yaml.config.pojo.rule.YamlRuleConfiguration; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; import org.apache.shardingsphere.migration.distsql.statement.MigrateTableStatement; @@ -139,7 +136,7 @@ public String createJobAndStart(final PipelineContextKey contextKey, final Migra private YamlMigrationJobConfiguration buildYamlJobConfiguration(final PipelineContextKey contextKey, final MigrateTableStatement param) { YamlMigrationJobConfiguration result = new YamlMigrationJobConfiguration(); result.setTargetDatabaseName(param.getTargetDatabaseName()); - Map metaDataDataSource = dataSourcePersistService.load(contextKey, new MigrationJobType()); + Map metaDataDataSource = dataSourcePersistService.load(contextKey, new MigrationJobType()); Map> sourceDataNodes = new LinkedHashMap<>(); Map configSources = new LinkedHashMap<>(); List sourceTargetEntries = new ArrayList<>(new HashSet<>(param.getSourceTargetEntries())).stream().sorted(Comparator.comparing(SourceTargetEntry::getTargetTableName) @@ -155,8 +152,8 @@ private YamlMigrationJobConfiguration buildYamlJobConfiguration(final PipelineCo } ShardingSpherePreconditions.checkState(metaDataDataSource.containsKey(dataSourceName), () -> new PipelineInvalidParameterException(dataSourceName + " doesn't exist. Run `SHOW MIGRATION SOURCE STORAGE UNITS;` to verify it.")); - Map sourceDataSourceProps = dataSourceConfigSwapper.swapToMap(metaDataDataSource.get(dataSourceName)); - StandardPipelineDataSourceConfiguration sourceDataSourceConfig = new StandardPipelineDataSourceConfiguration(sourceDataSourceProps); + Map sourceDataSourcePoolProps = dataSourceConfigSwapper.swapToMap(metaDataDataSource.get(dataSourceName)); + StandardPipelineDataSourceConfiguration sourceDataSourceConfig = new StandardPipelineDataSourceConfiguration(sourceDataSourcePoolProps); configSources.put(dataSourceName, buildYamlPipelineDataSourceConfiguration(sourceDataSourceConfig.getType(), sourceDataSourceConfig.getParameter())); DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(sourceDataSourceConfig.getDatabaseType()).getDialectDatabaseMetaData(); if (null == each.getSource().getSchemaName() && dialectDatabaseMetaData.isSchemaAvailable()) { @@ -192,13 +189,12 @@ private YamlPipelineDataSourceConfiguration buildYamlPipelineDataSourceConfigura } private PipelineDataSourceConfiguration buildTargetPipelineDataSourceConfiguration(final ShardingSphereDatabase targetDatabase) { - Map> targetDataSourceProps = new HashMap<>(); + Map> targetPoolProps = new HashMap<>(); YamlDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlDataSourceConfigurationSwapper(); - for (Entry entry : targetDatabase.getResourceMetaData().getDataSourcePropsMap().entrySet()) { - Map dataSourceProps = dataSourceConfigSwapper.swapToMap(entry.getValue()); - targetDataSourceProps.put(entry.getKey(), dataSourceProps); + for (Entry entry : targetDatabase.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet()) { + targetPoolProps.put(entry.getKey(), dataSourceConfigSwapper.swapToMap(entry.getValue().getDataSourcePoolProperties())); } - YamlRootConfiguration targetRootConfig = buildYamlRootConfiguration(targetDatabase.getName(), targetDataSourceProps, targetDatabase.getRuleMetaData().getConfigurations()); + YamlRootConfiguration targetRootConfig = buildYamlRootConfiguration(targetDatabase.getName(), targetPoolProps, targetDatabase.getRuleMetaData().getConfigurations()); return new ShardingSpherePipelineDataSourceConfiguration(targetRootConfig); } @@ -209,8 +205,7 @@ private YamlRootConfiguration buildYamlRootConfiguration(final String databaseNa YamlRootConfiguration result = new YamlRootConfiguration(); result.setDatabaseName(databaseName); result.setDataSources(yamlDataSources); - Collection yamlRuleConfigurations = new YamlRuleConfigurationSwapperEngine().swapToYamlRuleConfigurations(rules); - result.setRules(yamlRuleConfigurations); + result.setRules(new YamlRuleConfigurationSwapperEngine().swapToYamlRuleConfigurations(rules)); return result; } @@ -267,31 +262,17 @@ protected MigrationJobConfiguration getJobConfiguration(final JobConfigurationPO @Override public MigrationTaskConfiguration buildTaskConfiguration(final PipelineJobConfiguration pipelineJobConfig, final int jobShardingItem, final PipelineProcessConfiguration pipelineProcessConfig) { MigrationJobConfiguration jobConfig = (MigrationJobConfiguration) pipelineJobConfig; - JobDataNodeLine dataNodeLine = jobConfig.getJobShardingDataNodes().get(jobShardingItem); - Map tableNameMap = buildTableNameMap(dataNodeLine); - TableNameSchemaNameMapping tableNameSchemaNameMapping = new TableNameSchemaNameMapping(jobConfig.getTargetTableSchemaMap()); - CreateTableConfiguration createTableConfig = buildCreateTableConfiguration(jobConfig, tableNameSchemaNameMapping); - String dataSourceName = dataNodeLine.getEntries().get(0).getDataNodes().get(0).getDataSourceName(); - DumperConfiguration dumperConfig = buildDumperConfiguration(jobConfig.getJobId(), dataSourceName, jobConfig.getSources().get(dataSourceName), tableNameMap, tableNameSchemaNameMapping); + DumperConfiguration dumperConfig = new MigrationIncrementalDumperConfigurationCreator(jobConfig).createDumperConfiguration(jobConfig.getJobShardingDataNodes().get(jobShardingItem)); + CreateTableConfiguration createTableConfig = buildCreateTableConfiguration(jobConfig, dumperConfig.getTableNameSchemaNameMapping()); Set targetTableNames = jobConfig.getTargetTableNames().stream().map(LogicTableName::new).collect(Collectors.toSet()); Map> shardingColumnsMap = new ShardingColumnsExtractor().getShardingColumnsMap( ((ShardingSpherePipelineDataSourceConfiguration) jobConfig.getTarget()).getRootConfig().getRules(), targetTableNames); - ImporterConfiguration importerConfig = buildImporterConfiguration(jobConfig, pipelineProcessConfig, shardingColumnsMap, tableNameSchemaNameMapping); - MigrationTaskConfiguration result = new MigrationTaskConfiguration(dataSourceName, createTableConfig, dumperConfig, importerConfig); + ImporterConfiguration importerConfig = buildImporterConfiguration(jobConfig, pipelineProcessConfig, shardingColumnsMap, dumperConfig.getTableNameSchemaNameMapping()); + MigrationTaskConfiguration result = new MigrationTaskConfiguration(dumperConfig.getDataSourceName(), createTableConfig, dumperConfig, importerConfig); log.info("buildTaskConfiguration, result={}", result); return result; } - private Map buildTableNameMap(final JobDataNodeLine dataNodeLine) { - Map result = new LinkedHashMap<>(); - for (JobDataNodeEntry each : dataNodeLine.getEntries()) { - for (DataNode dataNode : each.getDataNodes()) { - result.put(new ActualTableName(dataNode.getTableName()), new LogicTableName(each.getLogicTableName())); - } - } - return result; - } - private CreateTableConfiguration buildCreateTableConfiguration(final MigrationJobConfiguration jobConfig, final TableNameSchemaNameMapping tableNameSchemaNameMapping) { Collection createTableEntries = new LinkedList<>(); @@ -302,8 +283,8 @@ private CreateTableConfiguration buildCreateTableConfiguration(final MigrationJo DataNode dataNode = each.getDataNodes().get(0); PipelineDataSourceConfiguration sourceDataSourceConfig = jobConfig.getSources().get(dataNode.getDataSourceName()); CreateTableEntry createTableEntry = new CreateTableEntry( - sourceDataSourceConfig, new SchemaTableName(new SchemaName(sourceSchemaName), new TableName(dataNode.getTableName())), - jobConfig.getTarget(), new SchemaTableName(new SchemaName(targetSchemaName), new TableName(each.getLogicTableName()))); + sourceDataSourceConfig, new SchemaTableName(sourceSchemaName, dataNode.getTableName()), + jobConfig.getTarget(), new SchemaTableName(targetSchemaName, each.getLogicTableName())); createTableEntries.add(createTableEntry); } CreateTableConfiguration result = new CreateTableConfiguration(createTableEntries); @@ -311,17 +292,6 @@ sourceDataSourceConfig, new SchemaTableName(new SchemaName(sourceSchemaName), ne return result; } - private DumperConfiguration buildDumperConfiguration(final String jobId, final String dataSourceName, final PipelineDataSourceConfiguration sourceDataSource, - final Map tableNameMap, final TableNameSchemaNameMapping tableNameSchemaNameMapping) { - DumperConfiguration result = new DumperConfiguration(); - result.setJobId(jobId); - result.setDataSourceName(dataSourceName); - result.setDataSourceConfig(sourceDataSource); - result.setTableNameMap(tableNameMap); - result.setTableNameSchemaNameMapping(tableNameSchemaNameMapping); - return result; - } - private ImporterConfiguration buildImporterConfiguration(final MigrationJobConfiguration jobConfig, final PipelineProcessConfiguration pipelineProcessConfig, final Map> shardingColumnsMap, final TableNameSchemaNameMapping tableNameSchemaNameMapping) { MigrationProcessContext processContext = new MigrationProcessContext(jobConfig.getJobId(), pipelineProcessConfig); @@ -339,8 +309,8 @@ public MigrationProcessContext buildPipelineProcessContext(final PipelineJobConf } @Override - protected PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(final PipelineJobConfiguration pipelineJobConfig, final InventoryIncrementalProcessContext processContext, - final ConsistencyCheckJobItemProgressContext progressContext) { + public PipelineDataConsistencyChecker buildPipelineDataConsistencyChecker(final PipelineJobConfiguration pipelineJobConfig, final InventoryIncrementalProcessContext processContext, + final ConsistencyCheckJobItemProgressContext progressContext) { return new MigrationDataConsistencyChecker((MigrationJobConfiguration) pipelineJobConfig, processContext, progressContext); } @@ -432,19 +402,19 @@ public void commit(final String jobId) { * Add migration source resources. * * @param contextKey context key - * @param dataSourcePropsMap data source properties map + * @param propsMap data source pool properties map */ - public void addMigrationSourceResources(final PipelineContextKey contextKey, final Map dataSourcePropsMap) { - Map existDataSources = dataSourcePersistService.load(contextKey, getJobType()); - Collection duplicateDataSourceNames = new HashSet<>(dataSourcePropsMap.size(), 1F); - for (Entry entry : dataSourcePropsMap.entrySet()) { + public void addMigrationSourceResources(final PipelineContextKey contextKey, final Map propsMap) { + Map existDataSources = dataSourcePersistService.load(contextKey, getJobType()); + Collection duplicateDataSourceNames = new HashSet<>(propsMap.size(), 1F); + for (Entry entry : propsMap.entrySet()) { if (existDataSources.containsKey(entry.getKey())) { duplicateDataSourceNames.add(entry.getKey()); } } ShardingSpherePreconditions.checkState(duplicateDataSourceNames.isEmpty(), () -> new RegisterMigrationSourceStorageUnitException(duplicateDataSourceNames)); - Map result = new LinkedHashMap<>(existDataSources); - result.putAll(dataSourcePropsMap); + Map result = new LinkedHashMap<>(existDataSources); + result.putAll(propsMap); dataSourcePersistService.persist(contextKey, getJobType(), result); } @@ -455,7 +425,7 @@ public void addMigrationSourceResources(final PipelineContextKey contextKey, fin * @param resourceNames resource names */ public void dropMigrationSourceResources(final PipelineContextKey contextKey, final Collection resourceNames) { - Map metaDataDataSource = dataSourcePersistService.load(contextKey, getJobType()); + Map metaDataDataSource = dataSourcePersistService.load(contextKey, getJobType()); List noExistResources = resourceNames.stream().filter(each -> !metaDataDataSource.containsKey(each)).collect(Collectors.toList()); ShardingSpherePreconditions.checkState(noExistResources.isEmpty(), () -> new UnregisterMigrationSourceStorageUnitException(noExistResources)); for (String each : resourceNames) { @@ -471,11 +441,11 @@ public void dropMigrationSourceResources(final PipelineContextKey contextKey, fi * @return migration source resources */ public Collection> listMigrationSourceResources(final PipelineContextKey contextKey) { - Map dataSourcePropertiesMap = dataSourcePersistService.load(contextKey, getJobType()); - Collection> result = new ArrayList<>(dataSourcePropertiesMap.size()); - for (Entry entry : dataSourcePropertiesMap.entrySet()) { + Map propsMap = dataSourcePersistService.load(contextKey, getJobType()); + Collection> result = new ArrayList<>(propsMap.size()); + for (Entry entry : propsMap.entrySet()) { String dataSourceName = entry.getKey(); - DataSourceProperties value = entry.getValue(); + DataSourcePoolProperties value = entry.getValue(); Collection props = new LinkedList<>(); props.add(dataSourceName); String url = String.valueOf(value.getConnectionPropertySynonyms().getStandardProperties().get("url")); @@ -492,8 +462,8 @@ public Collection> listMigrationSourceResources(final Pipelin props.add(getStandardProperty(standardProps, "maxPoolSize")); props.add(getStandardProperty(standardProps, "minPoolSize")); props.add(getStandardProperty(standardProps, "readOnly")); - Map otherProps = value.getCustomDataSourceProperties().getProperties(); - props.add(otherProps.isEmpty() ? "" : new Gson().toJson(otherProps)); + Map otherProps = value.getCustomProperties().getProperties(); + props.add(otherProps.isEmpty() ? "" : JsonUtils.toJsonString(otherProps)); result.add(props); } return result; diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java index 7c53af5f9fc73..846f9b430e0db 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyChecker.java @@ -18,10 +18,7 @@ package org.apache.shardingsphere.data.pipeline.scenario.migration.check.consistency; import lombok.extern.slf4j.Slf4j; -import org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfiguration; -import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaName; import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; -import org.apache.shardingsphere.data.pipeline.api.metadata.TableName; import org.apache.shardingsphere.data.pipeline.api.metadata.loader.PipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData; @@ -38,11 +35,12 @@ import org.apache.shardingsphere.data.pipeline.common.metadata.loader.StandardPipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.PipelineDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.SingleTableInventoryDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyCheckerFactory; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryCheckParameter; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryChecker; import org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException; -import org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedPipelineDatabaseTypeException; import org.apache.shardingsphere.data.pipeline.scenario.migration.api.impl.MigrationJobAPI; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm; @@ -53,8 +51,11 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; /** * Data consistency checker for migration job. @@ -68,6 +69,8 @@ public final class MigrationDataConsistencyChecker implements PipelineDataConsis private final ConsistencyCheckJobItemProgressContext progressContext; + private final AtomicReference currentTableInventoryChecker = new AtomicReference<>(); + public MigrationDataConsistencyChecker(final MigrationJobConfiguration jobConfig, final InventoryIncrementalProcessContext processContext, final ConsistencyCheckJobItemProgressContext progressContext) { this.jobConfig = jobConfig; @@ -76,66 +79,78 @@ public MigrationDataConsistencyChecker(final MigrationJobConfiguration jobConfig } @Override - public Map check(final DataConsistencyCalculateAlgorithm calculateAlgorithm) { - verifyPipelineDatabaseType(calculateAlgorithm, jobConfig.getSources().values().iterator().next()); - verifyPipelineDatabaseType(calculateAlgorithm, jobConfig.getTarget()); + public Map check(final String algorithmType, final Properties algorithmProps) { List sourceTableNames = new LinkedList<>(); jobConfig.getJobShardingDataNodes().forEach(each -> each.getEntries().forEach(entry -> entry.getDataNodes() .forEach(dataNode -> sourceTableNames.add(DataNodeUtils.formatWithSchema(dataNode))))); progressContext.setRecordsCount(getRecordsCount()); progressContext.getTableNames().addAll(sourceTableNames); progressContext.onProgressUpdated(new PipelineJobProgressUpdatedParameter(0)); - Map result = new LinkedHashMap<>(); - PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); - try { - AtomicBoolean checkFailed = new AtomicBoolean(false); + Map result = new LinkedHashMap<>(); + try ( + PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); + TableDataConsistencyChecker tableChecker = TableDataConsistencyCheckerFactory.newInstance(algorithmType, algorithmProps)) { for (JobDataNodeLine each : jobConfig.getJobShardingDataNodes()) { - each.getEntries().forEach(entry -> entry.getDataNodes().forEach(dataNode -> check(calculateAlgorithm, result, dataSourceManager, checkFailed, each, entry, dataNode))); + checkTableInventoryData(each, tableChecker, result, dataSourceManager); } - } finally { - dataSourceManager.close(); } - return result; + return result.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().marshal(), Entry::getValue)); } - private void check(final DataConsistencyCalculateAlgorithm calculateAlgorithm, final Map checkResults, final PipelineDataSourceManager dataSourceManager, - final AtomicBoolean checkFailed, final JobDataNodeLine jobDataNodeLine, final JobDataNodeEntry entry, final DataNode dataNode) { - if (checkFailed.get()) { - return; - } - DataConsistencyCheckResult checkResult = checkSingleTable(entry.getLogicTableName(), dataNode, calculateAlgorithm, dataSourceManager); - checkResults.put(DataNodeUtils.formatWithSchema(dataNode), checkResult); - if (!checkResult.isMatched()) { - log.info("unmatched on table '{}', ignore left tables", jobDataNodeLine); - checkFailed.set(true); + private long getRecordsCount() { + Map jobProgress = new MigrationJobAPI().getJobProgress(jobConfig); + return jobProgress.values().stream().filter(Objects::nonNull).mapToLong(InventoryIncrementalJobItemProgress::getProcessedRecordsCount).sum(); + } + + private void checkTableInventoryData(final JobDataNodeLine jobDataNodeLine, final TableDataConsistencyChecker tableChecker, + final Map checkResultMap, final PipelineDataSourceManager dataSourceManager) { + for (JobDataNodeEntry entry : jobDataNodeLine.getEntries()) { + for (DataNode each : entry.getDataNodes()) { + TableDataConsistencyCheckResult checkResult = checkSingleTableInventoryData(entry.getLogicTableName(), each, tableChecker, dataSourceManager); + checkResultMap.put(new SchemaTableName(each.getSchemaName(), each.getTableName()), checkResult); + if (!checkResult.isMatched() && tableChecker.isBreakOnInventoryCheckNotMatched()) { + log.info("Unmatched on table '{}', ignore left tables", DataNodeUtils.formatWithSchema(each)); + return; + } + } } } - private DataConsistencyCheckResult checkSingleTable(final String targetTableName, final DataNode dataNode, - final DataConsistencyCalculateAlgorithm calculateAlgorithm, final PipelineDataSourceManager dataSourceManager) { - SchemaTableName sourceTable = new SchemaTableName(new SchemaName(dataNode.getSchemaName()), new TableName(dataNode.getTableName())); - SchemaTableName targetTable = new SchemaTableName(new SchemaName(dataNode.getSchemaName()), new TableName(targetTableName)); + private TableDataConsistencyCheckResult checkSingleTableInventoryData(final String targetTableName, final DataNode dataNode, + final TableDataConsistencyChecker tableChecker, final PipelineDataSourceManager dataSourceManager) { + SchemaTableName sourceTable = new SchemaTableName(dataNode.getSchemaName(), dataNode.getTableName()); + SchemaTableName targetTable = new SchemaTableName(dataNode.getSchemaName(), targetTableName); PipelineDataSourceWrapper sourceDataSource = dataSourceManager.getDataSource(jobConfig.getSources().get(dataNode.getDataSourceName())); PipelineDataSourceWrapper targetDataSource = dataSourceManager.getDataSource(jobConfig.getTarget()); PipelineTableMetaDataLoader metaDataLoader = new StandardPipelineTableMetaDataLoader(sourceDataSource); PipelineTableMetaData tableMetaData = metaDataLoader.getTableMetaData(dataNode.getSchemaName(), dataNode.getTableName()); ShardingSpherePreconditions.checkNotNull(tableMetaData, () -> new PipelineTableDataConsistencyCheckLoadingFailedException(dataNode.getSchemaName(), dataNode.getTableName())); List columnNames = tableMetaData.getColumnNames(); - List uniqueKeyColumns = PipelineTableMetaDataUtils.getUniqueKeyColumns( + List uniqueKeys = PipelineTableMetaDataUtils.getUniqueKeyColumns( sourceTable.getSchemaName().getOriginal(), sourceTable.getTableName().getOriginal(), metaDataLoader); - PipelineColumnMetaData uniqueKey = uniqueKeyColumns.isEmpty() ? null : uniqueKeyColumns.get(0); - SingleTableInventoryDataConsistencyChecker singleTableInventoryChecker = new SingleTableInventoryDataConsistencyChecker( - jobConfig.getJobId(), sourceDataSource, targetDataSource, sourceTable, targetTable, columnNames, uniqueKey, readRateLimitAlgorithm, progressContext); - return singleTableInventoryChecker.check(calculateAlgorithm); + TableInventoryCheckParameter param = new TableInventoryCheckParameter( + jobConfig.getJobId(), sourceDataSource, targetDataSource, sourceTable, targetTable, columnNames, uniqueKeys, readRateLimitAlgorithm, progressContext); + TableInventoryChecker tableInventoryChecker = tableChecker.buildTableInventoryChecker(param); + currentTableInventoryChecker.set(tableInventoryChecker); + TableDataConsistencyCheckResult result = tableInventoryChecker.checkSingleTableInventoryData(); + currentTableInventoryChecker.set(null); + return result; } - private void verifyPipelineDatabaseType(final DataConsistencyCalculateAlgorithm calculateAlgorithm, final PipelineDataSourceConfiguration dataSourceConfig) { - ShardingSpherePreconditions.checkState(calculateAlgorithm.getSupportedDatabaseTypes().contains(dataSourceConfig.getDatabaseType()), - () -> new UnsupportedPipelineDatabaseTypeException(dataSourceConfig.getDatabaseType())); + @Override + public void cancel() { + TableInventoryChecker checker = currentTableInventoryChecker.get(); + if (null != checker) { + checker.cancel(); + } } - private long getRecordsCount() { - Map jobProgress = new MigrationJobAPI().getJobProgress(jobConfig); - return jobProgress.values().stream().filter(Objects::nonNull).mapToLong(InventoryIncrementalJobItemProgress::getProcessedRecordsCount).sum(); + @Override + public boolean isCanceling() { + TableInventoryChecker checker = currentTableInventoryChecker.get(); + if (null == checker) { + return false; + } + return checker.isCanceling(); } } diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/config/ingest/MigrationIncrementalDumperConfigurationCreator.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/config/ingest/MigrationIncrementalDumperConfigurationCreator.java new file mode 100644 index 0000000000000..4763f25aa040a --- /dev/null +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/config/ingest/MigrationIncrementalDumperConfigurationCreator.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.data.pipeline.scenario.migration.config.ingest; + +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.data.pipeline.api.config.TableNameSchemaNameMapping; +import org.apache.shardingsphere.data.pipeline.api.config.ingest.DumperConfiguration; +import org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfiguration; +import org.apache.shardingsphere.data.pipeline.api.metadata.ActualTableName; +import org.apache.shardingsphere.data.pipeline.api.metadata.LogicTableName; +import org.apache.shardingsphere.data.pipeline.common.config.ingest.IncrementalDumperConfigurationCreator; +import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLine; +import org.apache.shardingsphere.data.pipeline.common.datanode.JobDataNodeLineConvertUtils; +import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; + +import java.util.Map; + +/** + * Migration incremental dumper configuration creator. + */ +@RequiredArgsConstructor +public final class MigrationIncrementalDumperConfigurationCreator implements IncrementalDumperConfigurationCreator { + + private final MigrationJobConfiguration jobConfig; + + @Override + public DumperConfiguration createDumperConfiguration(final JobDataNodeLine jobDataNodeLine) { + Map tableNameMap = JobDataNodeLineConvertUtils.buildTableNameMap(jobDataNodeLine); + TableNameSchemaNameMapping tableNameSchemaNameMapping = new TableNameSchemaNameMapping(jobConfig.getTargetTableSchemaMap()); + String dataSourceName = jobDataNodeLine.getEntries().get(0).getDataNodes().get(0).getDataSourceName(); + return buildDumperConfiguration(jobConfig.getJobId(), dataSourceName, jobConfig.getSources().get(dataSourceName), tableNameMap, tableNameSchemaNameMapping); + } + + private DumperConfiguration buildDumperConfiguration(final String jobId, final String dataSourceName, final PipelineDataSourceConfiguration sourceDataSource, + final Map tableNameMap, final TableNameSchemaNameMapping tableNameSchemaNameMapping) { + DumperConfiguration result = new DumperConfiguration(); + result.setJobId(jobId); + result.setDataSourceName(dataSourceName); + result.setDataSourceConfig(sourceDataSource); + result.setTableNameMap(tableNameMap); + result.setTableNameSchemaNameMapping(tableNameSchemaNameMapping); + return result; + } +} diff --git a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java index 18da66b0bb0d4..33a5d97ff3cd5 100644 --- a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java +++ b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/yaml/job/YamlMigrationJobConfigurationSwapper.java @@ -44,7 +44,7 @@ public YamlMigrationJobConfiguration swapToYamlConfiguration(final MigrationJobC result.setSourceDatabaseType(data.getSourceDatabaseType().getType()); result.setTargetDatabaseType(data.getTargetDatabaseType().getType()); result.setSources(data.getSources().entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> dataSourceConfigSwapper.swapToYamlConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new))); + entry -> dataSourceConfigSwapper.swapToYamlConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new))); result.setTarget(dataSourceConfigSwapper.swapToYamlConfiguration(data.getTarget())); result.setTargetTableNames(data.getTargetTableNames()); result.setTargetTableSchemaMap(data.getTargetTableSchemaMap()); @@ -60,7 +60,7 @@ public MigrationJobConfiguration swapToObject(final YamlMigrationJobConfiguratio return new MigrationJobConfiguration(yamlConfig.getJobId(), yamlConfig.getDatabaseName(), TypedSPILoader.getService(DatabaseType.class, yamlConfig.getSourceDatabaseType()), TypedSPILoader.getService(DatabaseType.class, yamlConfig.getTargetDatabaseType()), yamlConfig.getSources().entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> dataSourceConfigSwapper.swapToObject(entry.getValue()), (key, value) -> value, LinkedHashMap::new)), + entry -> dataSourceConfigSwapper.swapToObject(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)), dataSourceConfigSwapper.swapToObject(yamlConfig.getTarget()), yamlConfig.getTargetTableNames(), yamlConfig.getTargetTableSchemaMap(), JobDataNodeLine.unmarshal(yamlConfig.getTablesFirstDataNodes()), yamlConfig.getJobShardingDataNodes().stream().map(JobDataNodeLine::unmarshal).collect(Collectors.toList()), diff --git a/kernel/global-clock/core/src/main/java/org/apache/shardingsphere/globalclock/core/rule/GlobalClockRule.java b/kernel/global-clock/core/src/main/java/org/apache/shardingsphere/globalclock/core/rule/GlobalClockRule.java index 88966cdde00cc..7703ae0af3dcc 100644 --- a/kernel/global-clock/core/src/main/java/org/apache/shardingsphere/globalclock/core/rule/GlobalClockRule.java +++ b/kernel/global-clock/core/src/main/java/org/apache/shardingsphere/globalclock/core/rule/GlobalClockRule.java @@ -23,15 +23,16 @@ import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.rule.identifier.scope.GlobalRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.transaction.spi.TransactionHook; -import javax.sql.DataSource; import java.util.Collection; +import java.util.Collections; import java.util.Map; +import java.util.Optional; import java.util.Properties; -import java.util.stream.Collectors; /** * Global clock rule. @@ -45,13 +46,13 @@ public GlobalClockRule(final GlobalClockRuleConfiguration ruleConfig, final Map< configuration = ruleConfig; if (ruleConfig.isEnabled()) { TypedSPILoader.getService(GlobalClockProvider.class, getGlobalClockProviderType(), configuration.getProps()); - TypedSPILoader.getService(TransactionHook.class, "GLOBAL_CLOCK", getProps(databases)); + TypedSPILoader.getService(TransactionHook.class, "GLOBAL_CLOCK", createProperties(databases)); } } - private Properties getProps(final Map databases) { + private Properties createProperties(final Map databases) { Properties result = new Properties(); - DatabaseType storageType = DatabaseTypeEngine.getStorageType(getDataSources(databases)); + DatabaseType storageType = findStorageType(databases.values()).orElseGet(() -> DatabaseTypeEngine.getStorageType(Collections.emptyList())); result.setProperty("trunkType", storageType.getTrunkDatabaseType().orElse(storageType).getType()); result.setProperty("enabled", String.valueOf(configuration.isEnabled())); result.setProperty("type", configuration.getType()); @@ -59,9 +60,8 @@ private Properties getProps(final Map databases) return result; } - private Collection getDataSources(final Map databases) { - return databases.values().stream().filter(each -> !each.getResourceMetaData().getDataSources().isEmpty()) - .flatMap(each -> each.getResourceMetaData().getDataSources().values().stream()).collect(Collectors.toList()); + private Optional findStorageType(final Collection databases) { + return databases.stream().flatMap(each -> each.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().values().stream()).findFirst().map(StorageUnit::getStorageType); } /** @@ -72,9 +72,4 @@ private Collection getDataSources(final Map getSQLLogger(final RuleMetaData glo /** * Get ShardingSphere-SQL logger. * - * @param loggingRuleConfiguration logging global rule configuration + * @param loggingRuleConfig logging global rule configuration * @return ShardingSphere-SQL logger */ - public static Optional getSQLLogger(final LoggingRuleConfiguration loggingRuleConfiguration) { - return loggingRuleConfiguration.getLoggers().stream() + public static Optional getSQLLogger(final LoggingRuleConfiguration loggingRuleConfig) { + return loggingRuleConfig.getLoggers().stream() .filter(each -> LoggingConstants.SQL_LOG_TOPIC.equalsIgnoreCase(each.getLoggerName())).findFirst(); } @@ -60,32 +60,32 @@ public static Optional getSQLLogger(final LoggingRuleConfi * Synchronize the log-related configuration in logging rule and props. * Use the configuration in the logging rule first. * - * @param loggingRuleConfiguration logging global rule configuration + * @param loggingRuleConfig logging global rule configuration * @param props configuration properties */ - public static void syncLoggingConfig(final LoggingRuleConfiguration loggingRuleConfiguration, final ConfigurationProperties props) { - LoggingUtils.getSQLLogger(loggingRuleConfiguration).ifPresent(option -> { - Properties loggerProperties = option.getProps(); - syncPropsToLoggingRule(loggerProperties, props); - syncLoggingRuleToProps(loggerProperties, props); + public static void syncLoggingConfig(final LoggingRuleConfiguration loggingRuleConfig, final ConfigurationProperties props) { + LoggingUtils.getSQLLogger(loggingRuleConfig).ifPresent(option -> { + Properties loggerProps = option.getProps(); + syncPropsToLoggingRule(loggerProps, props); + syncLoggingRuleToProps(loggerProps, props); }); } - private static void syncPropsToLoggingRule(final Properties loggerProperties, final ConfigurationProperties props) { - if (!loggerProperties.containsKey(LoggingConstants.SQL_LOG_ENABLE) && props.getProps().containsKey(LoggingConstants.SQL_SHOW)) { - loggerProperties.setProperty(LoggingConstants.SQL_LOG_ENABLE, props.getProps().get(LoggingConstants.SQL_SHOW).toString()); + private static void syncPropsToLoggingRule(final Properties loggerProps, final ConfigurationProperties props) { + if (!loggerProps.containsKey(LoggingConstants.SQL_LOG_ENABLE) && props.getProps().containsKey(LoggingConstants.SQL_SHOW)) { + loggerProps.setProperty(LoggingConstants.SQL_LOG_ENABLE, props.getProps().get(LoggingConstants.SQL_SHOW).toString()); } - if (!loggerProperties.containsKey(LoggingConstants.SQL_LOG_SIMPLE) && props.getProps().containsKey(LoggingConstants.SQL_SIMPLE)) { - loggerProperties.setProperty(LoggingConstants.SQL_LOG_SIMPLE, props.getProps().get(LoggingConstants.SQL_SIMPLE).toString()); + if (!loggerProps.containsKey(LoggingConstants.SQL_LOG_SIMPLE) && props.getProps().containsKey(LoggingConstants.SQL_SIMPLE)) { + loggerProps.setProperty(LoggingConstants.SQL_LOG_SIMPLE, props.getProps().get(LoggingConstants.SQL_SIMPLE).toString()); } } - private static void syncLoggingRuleToProps(final Properties loggerProperties, final ConfigurationProperties props) { - if (loggerProperties.containsKey(LoggingConstants.SQL_LOG_ENABLE)) { - props.getProps().setProperty(LoggingConstants.SQL_SHOW, loggerProperties.get(LoggingConstants.SQL_LOG_ENABLE).toString()); + private static void syncLoggingRuleToProps(final Properties loggerProps, final ConfigurationProperties props) { + if (loggerProps.containsKey(LoggingConstants.SQL_LOG_ENABLE)) { + props.getProps().setProperty(LoggingConstants.SQL_SHOW, loggerProps.get(LoggingConstants.SQL_LOG_ENABLE).toString()); } - if (loggerProperties.containsKey(LoggingConstants.SQL_LOG_SIMPLE)) { - props.getProps().setProperty(LoggingConstants.SQL_SIMPLE, loggerProperties.get(LoggingConstants.SQL_LOG_SIMPLE).toString()); + if (loggerProps.containsKey(LoggingConstants.SQL_LOG_SIMPLE)) { + props.getProps().setProperty(LoggingConstants.SQL_SIMPLE, loggerProps.get(LoggingConstants.SQL_LOG_SIMPLE).toString()); } } } diff --git a/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/rule/LoggingRuleTest.java b/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/rule/LoggingRuleTest.java index ce0031467e17b..17ff3eb3a79b5 100644 --- a/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/rule/LoggingRuleTest.java +++ b/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/rule/LoggingRuleTest.java @@ -38,11 +38,6 @@ void setup() { Collections.singleton(new ShardingSphereAppender("console", "ch.qos.logback.core.ConsoleAppender", "[%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %logger{36} - %msg%n")))); } - @Test - void assertGetType() { - assertThat(loggingRule.getType(), is(LoggingRule.class.getSimpleName())); - } - @Test void assertFields() { assertThat(loggingRule.getConfiguration().getLoggers().size(), is(1)); diff --git a/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/yaml/swapper/YamlLoggingRuleConfigurationSwapperTest.java b/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/yaml/swapper/YamlLoggingRuleConfigurationSwapperTest.java index f69cc5433c3de..a15e73b75e830 100644 --- a/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/yaml/swapper/YamlLoggingRuleConfigurationSwapperTest.java +++ b/kernel/logging/core/src/test/java/org/apache/shardingsphere/logging/yaml/swapper/YamlLoggingRuleConfigurationSwapperTest.java @@ -38,9 +38,9 @@ class YamlLoggingRuleConfigurationSwapperTest { @Test void assertSwapToYamlConfiguration() { - YamlLoggingRuleConfiguration yamlLoggingRuleConfiguration = swapper.swapToYamlConfiguration(createLoggingRuleConfiguration()); - assertThat(yamlLoggingRuleConfiguration.getLoggers().size(), is(1)); - assertThat(yamlLoggingRuleConfiguration.getAppenders().size(), is(1)); + YamlLoggingRuleConfiguration yamlLoggingRuleConfig = swapper.swapToYamlConfiguration(createLoggingRuleConfiguration()); + assertThat(yamlLoggingRuleConfig.getLoggers().size(), is(1)); + assertThat(yamlLoggingRuleConfig.getAppenders().size(), is(1)); } private LoggingRuleConfiguration createLoggingRuleConfiguration() { @@ -50,9 +50,9 @@ private LoggingRuleConfiguration createLoggingRuleConfiguration() { @Test void assertSwapToObject() { - LoggingRuleConfiguration loggingRuleConfiguration = swapper.swapToObject(createYamlLoggingRuleConfiguration()); - assertThat(loggingRuleConfiguration.getLoggers().size(), is(1)); - assertThat(loggingRuleConfiguration.getAppenders().size(), is(1)); + LoggingRuleConfiguration loggingRuleConfig = swapper.swapToObject(createYamlLoggingRuleConfiguration()); + assertThat(loggingRuleConfig.getLoggers().size(), is(1)); + assertThat(loggingRuleConfig.getAppenders().size(), is(1)); } private YamlLoggingRuleConfiguration createYamlLoggingRuleConfiguration() { diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java index 5a83744f7d359..f4b6d6966595f 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataBasedPersistService.java @@ -19,8 +19,8 @@ import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataBasedPersistService; import org.apache.shardingsphere.metadata.persist.service.config.database.DatabaseBasedPersistService; @@ -52,14 +52,7 @@ public interface MetaDataBasedPersistService { * * @return persist service */ - DatabaseBasedPersistService> getDataSourceUnitService(); - - /** - * Get data source node service. - * - * @return persist service - */ - DatabaseBasedPersistService> getDataSourceNodeService(); + DatabaseBasedPersistService> getDataSourceUnitService(); /** * Get database meta data service. diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java index c824bf27693f6..9bb4a1961827b 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistService.java @@ -22,9 +22,10 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.decorator.RuleConfigurationDecorator; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; @@ -92,11 +93,11 @@ public void persistGlobalRuleConfiguration(final Collection g @Override public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, final Map dataSources, final Collection rules) { - Map dataSourcePropertiesMap = getDataSourcePropertiesMap(databaseConfigs); - if (dataSourcePropertiesMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { + Map propsMap = getDataSourcePoolPropertiesMap(databaseConfigs); + if (propsMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { databaseMetaDataService.addDatabase(databaseName); } else { - dataSourceUnitService.persist(databaseName, dataSourcePropertiesMap); + dataSourceUnitService.persist(databaseName, propsMap); databaseRulePersistService.persist(databaseName, decorateRuleConfigs(databaseName, dataSources, rules)); } } @@ -112,28 +113,28 @@ private Collection decorateRuleConfigs(final String databaseN return result; } - private Map getDataSourcePropertiesMap(final DatabaseConfiguration databaseConfigs) { - if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePropsMap().isEmpty()) { - return getDataSourcePropertiesMap(databaseConfigs.getStorageResource().getStorageNodes()); + private Map getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) { + if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) { + return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); } - return databaseConfigs.getDataSourcePropsMap(); + return databaseConfigs.getDataSourcePoolPropertiesMap(); } - private Map getDataSourcePropertiesMap(final Map dataSourceMap) { - Map result = new LinkedHashMap<>(dataSourceMap.size(), 1F); - for (Entry entry : dataSourceMap.entrySet()) { - result.put(entry.getKey(), DataSourcePropertiesCreator.create(entry.getValue())); + private Map getDataSourcePoolPropertiesMap(final Map storageNodeDataSources) { + Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + for (Entry entry : storageNodeDataSources.entrySet()) { + result.put(entry.getKey().getName(), DataSourcePoolPropertiesCreator.create(entry.getValue())); } return result; } @Override public Map getEffectiveDataSources(final String databaseName, final Map databaseConfigs) { - Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); + Map propsMap = dataSourceUnitService.load(databaseName); if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) { - databaseConfigs.get(databaseName).getStorageResource().getStorageNodes().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); + databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); } - return persistedDataPropsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> DataSourcePropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); + return propsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, + entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java index 40aa0000a6b5c..460be290390c0 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/NewMetaDataPersistService.java @@ -21,10 +21,11 @@ import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.decorator.RuleConfigurationDecorator; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; @@ -97,13 +98,12 @@ public void persistGlobalRuleConfiguration(final Collection g } @Override - public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, - final Map dataSources, final Collection rules) { - Map dataSourcePropertiesMap = getDataSourcePropertiesMap(databaseConfigs); - if (dataSourcePropertiesMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { + public void persistConfigurations(final String databaseName, final DatabaseConfiguration databaseConfigs, final Map dataSources, final Collection rules) { + Map propsMap = getDataSourcePoolPropertiesMap(databaseConfigs); + if (propsMap.isEmpty() && databaseConfigs.getRuleConfigurations().isEmpty()) { databaseMetaDataService.addDatabase(databaseName); } else { - dataSourceUnitService.persist(databaseName, dataSourcePropertiesMap); + dataSourceUnitService.persist(databaseName, propsMap); databaseRulePersistService.persist(databaseName, decorateRuleConfigs(databaseName, dataSources, rules)); } } @@ -119,17 +119,17 @@ private Collection decorateRuleConfigs(final String databaseN return result; } - private Map getDataSourcePropertiesMap(final DatabaseConfiguration databaseConfigs) { - if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePropsMap().isEmpty()) { - return getDataSourcePropertiesMap(databaseConfigs.getStorageResource().getStorageNodes()); + private Map getDataSourcePoolPropertiesMap(final DatabaseConfiguration databaseConfigs) { + if (!databaseConfigs.getDataSources().isEmpty() && databaseConfigs.getDataSourcePoolPropertiesMap().isEmpty()) { + return getDataSourcePoolPropertiesMap(databaseConfigs.getStorageResource().getStorageNodeDataSources()); } - return databaseConfigs.getDataSourcePropsMap(); + return databaseConfigs.getDataSourcePoolPropertiesMap(); } - private Map getDataSourcePropertiesMap(final Map dataSourceMap) { - Map result = new LinkedHashMap<>(dataSourceMap.size(), 1F); - for (Entry entry : dataSourceMap.entrySet()) { - result.put(entry.getKey(), DataSourcePropertiesCreator.create(entry.getValue())); + private Map getDataSourcePoolPropertiesMap(final Map storageNodeDataSources) { + Map result = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + for (Entry entry : storageNodeDataSources.entrySet()) { + result.put(entry.getKey().getName(), DataSourcePoolPropertiesCreator.create(entry.getValue())); } return result; } @@ -143,11 +143,11 @@ private Map getDataSourcePropertiesMap(final Map getEffectiveDataSources(final String databaseName, final Map databaseConfigs) { - Map persistedDataPropsMap = dataSourceUnitService.load(databaseName); + Map propsMap = dataSourceUnitService.load(databaseName); if (databaseConfigs.containsKey(databaseName) && !databaseConfigs.get(databaseName).getDataSources().isEmpty()) { - databaseConfigs.get(databaseName).getStorageResource().getStorageNodes().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); + databaseConfigs.get(databaseName).getStorageResource().getStorageNodeDataSources().values().forEach(each -> new DataSourcePoolDestroyer(each).asyncDestroy()); } - return persistedDataPropsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, - entry -> DataSourcePropertiesCreator.createConfiguration(entry.getValue()), (key, value) -> value, LinkedHashMap::new)); + return propsMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, + entry -> DataSourcePoolPropertiesCreator.createConfiguration(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java index 3fe9cb3255006..0341199192633 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/DatabaseBasedPersistService.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.metadata.persist.service.config.database; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import java.util.Collection; @@ -92,8 +92,8 @@ default Collection persistConfig(String databaseName, T configs * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source pool properties map to be appended */ - default void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { + default void append(final String databaseName, final Map toBeAppendedPropsMap) { } } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java index 1e0e6884907ea..f706213e71ad0 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceNodePersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.metadata.persist.node.DatabaseMetaDataNode; @@ -35,14 +35,14 @@ * Data source node persist service. */ @RequiredArgsConstructor -public final class DataSourceNodePersistService implements DatabaseBasedPersistService> { +public final class DataSourceNodePersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { + public void persist(final String databaseName, final Map dataSourceConfigs) { if (Strings.isNullOrEmpty(getDatabaseActiveVersion(databaseName))) { repository.persist(DatabaseMetaDataNode.getActiveVersionPath(databaseName), DEFAULT_VERSION); } @@ -50,14 +50,14 @@ public void persist(final String databaseName, final Map> swapYamlDataSourceConfiguration(final Map dataSourcePropsMap) { - return dataSourcePropsMap.entrySet().stream() + private Map> swapYamlDataSourceConfiguration(final Map propsMap) { + return propsMap.entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToMap(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override - public Map load(final String databaseName) { - return isExisted(databaseName) ? getDataSourceProperties(repository.getDirectly( + public Map load(final String databaseName) { + return isExisted(databaseName) ? getDataSourcePoolProperties(repository.getDirectly( DatabaseMetaDataNode.getMetaDataDataSourceNodesPath(databaseName, getDatabaseActiveVersion(databaseName)))) : new LinkedHashMap<>(); } @@ -67,13 +67,13 @@ private boolean isExisted(final String databaseName) { } @SuppressWarnings("unchecked") - private Map getDataSourceProperties(final String yamlContent) { + private Map getDataSourcePoolProperties(final String yamlContent) { Map> yamlDataSources = YamlEngine.unmarshal(yamlContent, Map.class); if (yamlDataSources.isEmpty()) { return new LinkedHashMap<>(); } - Map result = new LinkedHashMap<>(yamlDataSources.size()); - yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size()); + yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(value))); return result; } @@ -81,12 +81,12 @@ private Map getDataSourceProperties(final String y * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source properties map to be appended */ @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - Map dataSourceConfigs = load(databaseName); - dataSourceConfigs.putAll(toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + Map dataSourceConfigs = load(databaseName); + dataSourceConfigs.putAll(toBeAppendedPropsMap); persist(databaseName, dataSourceConfigs); } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java index e8edd7e613b10..1aadc20d0bdec 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/DataSourceUnitPersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; import org.apache.shardingsphere.metadata.persist.node.DatabaseMetaDataNode; @@ -35,14 +35,14 @@ * Data source unit persist service. */ @RequiredArgsConstructor -public final class DataSourceUnitPersistService implements DatabaseBasedPersistService> { +public final class DataSourceUnitPersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { + public void persist(final String databaseName, final Map dataSourceConfigs) { if (Strings.isNullOrEmpty(getDatabaseActiveVersion(databaseName))) { repository.persist(DatabaseMetaDataNode.getActiveVersionPath(databaseName), DEFAULT_VERSION); } @@ -50,14 +50,14 @@ public void persist(final String databaseName, final Map> swapYamlDataSourceConfiguration(final Map dataSourcePropsMap) { - return dataSourcePropsMap.entrySet().stream() + private Map> swapYamlDataSourceConfiguration(final Map propsMap) { + return propsMap.entrySet().stream() .collect(Collectors.toMap(Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToMap(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } @Override - public Map load(final String databaseName) { - return isExisted(databaseName) ? getDataSourceProperties(repository.getDirectly( + public Map load(final String databaseName) { + return isExisted(databaseName) ? getDataSourcePoolProperties(repository.getDirectly( DatabaseMetaDataNode.getMetaDataDataSourceUnitsPath(databaseName, getDatabaseActiveVersion(databaseName)))) : new LinkedHashMap<>(); } @@ -67,13 +67,13 @@ private boolean isExisted(final String databaseName) { } @SuppressWarnings("unchecked") - private Map getDataSourceProperties(final String yamlContent) { + private Map getDataSourcePoolProperties(final String yamlContent) { Map> yamlDataSources = YamlEngine.unmarshal(yamlContent, Map.class); if (yamlDataSources.isEmpty()) { return new LinkedHashMap<>(); } - Map result = new LinkedHashMap<>(yamlDataSources.size()); - yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(value))); + Map result = new LinkedHashMap<>(yamlDataSources.size()); + yamlDataSources.forEach((key, value) -> result.put(key, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(value))); return result; } @@ -81,12 +81,12 @@ private Map getDataSourceProperties(final String y * Append data source properties map. * * @param databaseName database name - * @param toBeAppendedDataSourcePropsMap data source properties map to be appended + * @param toBeAppendedPropsMap data source properties map to be appended */ @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - Map dataSourceConfigs = load(databaseName); - dataSourceConfigs.putAll(toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + Map dataSourceConfigs = load(databaseName); + dataSourceConfigs.putAll(toBeAppendedPropsMap); persist(databaseName, dataSourceConfigs); } diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java index d3cdcb5ee315c..03b716ffee0fd 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceNodePersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -39,15 +39,15 @@ * New Data source node persist service. */ @RequiredArgsConstructor -public final class NewDataSourceNodePersistService implements DatabaseBasedPersistService> { +public final class NewDataSourceNodePersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void persist(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { String activeVersion = getDataSourceActiveVersion(databaseName, entry.getKey()); List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodeVersionsNode(databaseName, entry.getKey())); repository.persist(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, entry.getKey(), versions.isEmpty() @@ -60,16 +60,16 @@ public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void delete(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { repository.delete(NewDatabaseMetaDataNode.getDataSourceNode(databaseName, entry.getKey())); } } @Override - public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { + public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourceConfigs.entrySet()) { + for (Entry entry : dataSourceConfigs.entrySet()) { List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodeVersionsNode(databaseName, entry.getKey())); String nextActiveVersion = versions.isEmpty() ? DEFAULT_VERSION : String.valueOf(Integer.parseInt(versions.get(0)) + 1); repository.persist(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, entry.getKey(), nextActiveVersion), @@ -85,12 +85,12 @@ public Collection persistConfig(final String databaseName, fina @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName) { + Map result = new LinkedHashMap<>(); for (String each : repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceNodesNode(databaseName))) { String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, each, getDataSourceActiveVersion(databaseName, each))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } } return result; @@ -98,18 +98,18 @@ public Map load(final String databaseName) { @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName, final String name) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName, final String name) { + Map result = new LinkedHashMap<>(); String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceNodeWithVersion(databaseName, name, getDataSourceActiveVersion(databaseName, name))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } return result; } @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - persist(databaseName, toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + persist(databaseName, toBeAppendedPropsMap); } private String getDataSourceActiveVersion(final String databaseName, final String dataSourceName) { diff --git a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java index 266a716a23b9e..eac8654fe27ef 100644 --- a/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java +++ b/kernel/metadata/core/src/main/java/org/apache/shardingsphere/metadata/persist/service/config/database/datasource/NewDataSourceUnitPersistService.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -39,15 +39,15 @@ * New Data source unit persist service. */ @RequiredArgsConstructor -public final class NewDataSourceUnitPersistService implements DatabaseBasedPersistService> { +public final class NewDataSourceUnitPersistService implements DatabaseBasedPersistService> { private static final String DEFAULT_VERSION = "0"; private final PersistRepository repository; @Override - public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void persist(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { String activeVersion = getDataSourceActiveVersion(databaseName, entry.getKey()); List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitVersionsNode(databaseName, entry.getKey())); repository.persist(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, entry.getKey(), versions.isEmpty() @@ -60,16 +60,16 @@ public void persist(final String databaseName, final Map dataSourceConfigs) { - for (Entry entry : dataSourceConfigs.entrySet()) { + public void delete(final String databaseName, final Map dataSourceConfigs) { + for (Entry entry : dataSourceConfigs.entrySet()) { repository.delete(NewDatabaseMetaDataNode.getDataSourceUnitNode(databaseName, entry.getKey())); } } @Override - public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { + public Collection persistConfig(final String databaseName, final Map dataSourceConfigs) { Collection result = new LinkedList<>(); - for (Entry entry : dataSourceConfigs.entrySet()) { + for (Entry entry : dataSourceConfigs.entrySet()) { List versions = repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitVersionsNode(databaseName, entry.getKey())); String nextActiveVersion = versions.isEmpty() ? DEFAULT_VERSION : String.valueOf(Integer.parseInt(versions.get(0)) + 1); repository.persist(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, entry.getKey(), nextActiveVersion), @@ -85,12 +85,12 @@ public Collection persistConfig(final String databaseName, fina @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName) { + Map result = new LinkedHashMap<>(); for (String each : repository.getChildrenKeys(NewDatabaseMetaDataNode.getDataSourceUnitsNode(databaseName))) { String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, each, getDataSourceActiveVersion(databaseName, each))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(each, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } } return result; @@ -98,18 +98,18 @@ public Map load(final String databaseName) { @SuppressWarnings("unchecked") @Override - public Map load(final String databaseName, final String name) { - Map result = new LinkedHashMap<>(); + public Map load(final String databaseName, final String name) { + Map result = new LinkedHashMap<>(); String dataSourceValue = repository.getDirectly(NewDatabaseMetaDataNode.getDataSourceUnitNodeWithVersion(databaseName, name, getDataSourceActiveVersion(databaseName, name))); if (!Strings.isNullOrEmpty(dataSourceValue)) { - result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); + result.put(name, new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(YamlEngine.unmarshal(dataSourceValue, Map.class))); } return result; } @Override - public void append(final String databaseName, final Map toBeAppendedDataSourcePropsMap) { - persist(databaseName, toBeAppendedDataSourcePropsMap); + public void append(final String databaseName, final Map toBeAppendedPropsMap) { + persist(databaseName, toBeAppendedPropsMap); } private String getDataSourceActiveVersion(final String databaseName, final String dataSourceName) { diff --git a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/factory/ExternalMetaDataFactoryTest.java b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/factory/ExternalMetaDataFactoryTest.java index b43415650495e..f5a5b39bce5b4 100644 --- a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/factory/ExternalMetaDataFactoryTest.java +++ b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/factory/ExternalMetaDataFactoryTest.java @@ -41,7 +41,7 @@ void assertCreateSingleDatabase() throws SQLException { DatabaseConfiguration databaseConfig = new DataSourceProvidedDatabaseConfiguration(Collections.emptyMap(), Collections.emptyList()); ShardingSphereDatabase actual = ExternalMetaDataFactory.create("foo_db", databaseConfig, new ConfigurationProperties(new Properties()), mock(InstanceContext.class)); assertThat(actual.getName(), is("foo_db")); - assertTrue(actual.getResourceMetaData().getDataSources().isEmpty()); + assertTrue(actual.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty()); } @Test @@ -50,7 +50,7 @@ void assertCreateDatabaseMap() throws SQLException { Map actual = ExternalMetaDataFactory.create( Collections.singletonMap("foo_db", databaseConfig), new ConfigurationProperties(new Properties()), mock(InstanceContext.class)); assertTrue(actual.containsKey("foo_db")); - assertTrue(actual.get("foo_db").getResourceMetaData().getDataSources().isEmpty()); + assertTrue(actual.get("foo_db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty()); } @Test @@ -59,6 +59,6 @@ void assertCreateDatabaseMapWhenConfigUppercaseDatabaseName() throws SQLExceptio Map actual = ExternalMetaDataFactory.create( Collections.singletonMap("FOO_DB", databaseConfig), new ConfigurationProperties(new Properties()), mock(InstanceContext.class)); assertTrue(actual.containsKey("foo_db")); - assertTrue(actual.get("foo_db").getResourceMetaData().getDataSources().isEmpty()); + assertTrue(actual.get("foo_db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty()); } } diff --git a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistServiceTest.java b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistServiceTest.java index 09016255a32f6..aabe0b3b9c83b 100644 --- a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistServiceTest.java +++ b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/MetaDataPersistServiceTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.database.impl.DataSourceProvidedDatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; import org.apache.shardingsphere.metadata.persist.service.config.database.datasource.DataSourceUnitPersistService; @@ -87,7 +87,7 @@ private void setField(final String name, final Object value) throws ReflectiveOp @Test void assertGetEffectiveDataSources() { Map dataSourceMap = createDataSourceMap(); - Collection ruleConfigs = new YamlRuleConfigurationSwapperEngine().swapToRuleConfigurations(YamlEngine.unmarshal(readYAML(SCHEMA_RULE_YAML), Collection.class)); + Collection ruleConfigs = new YamlRuleConfigurationSwapperEngine().swapToRuleConfigurations(YamlEngine.unmarshal(readYAML(), Collection.class)); Map databaseConfigs = Collections.singletonMap("foo_db", new DataSourceProvidedDatabaseConfiguration(dataSourceMap, ruleConfigs)); Map resultEffectiveDataSources = metaDataPersistService.getEffectiveDataSources("foo_db", databaseConfigs); assertTrue(resultEffectiveDataSources.isEmpty()); @@ -110,7 +110,7 @@ private DataSource createDataSource(final String name) { } @SneakyThrows({IOException.class, URISyntaxException.class}) - private String readYAML(final String yamlFile) { - return Files.readAllLines(Paths.get(ClassLoader.getSystemResource(yamlFile).toURI())).stream().map(each -> each + System.lineSeparator()).collect(Collectors.joining()); + private String readYAML() { + return Files.readAllLines(Paths.get(ClassLoader.getSystemResource(SCHEMA_RULE_YAML).toURI())).stream().map(each -> each + System.lineSeparator()).collect(Collectors.joining()); } } diff --git a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java index 290a871c7df76..eb502feb2b879 100644 --- a/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java +++ b/kernel/metadata/core/src/test/java/org/apache/shardingsphere/metadata/persist/service/config/database/DataSourceUnitPersistServiceTest.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.metadata.persist.service.config.database; import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.metadata.persist.service.config.database.datasource.DataSourceUnitPersistService; import org.apache.shardingsphere.mode.spi.PersistRepository; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; @@ -54,10 +54,10 @@ class DataSourceUnitPersistServiceTest { void assertLoad() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); when(repository.getDirectly("/metadata/foo_db/versions/0/data_sources/units")).thenReturn(readDataSourceYaml("yaml/persist/data-source.yaml")); - Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); + Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); assertThat(actual.size(), is(2)); - assertDataSourceProperties(actual.get("ds_0"), DataSourcePropertiesCreator.create(createDataSource("ds_0"))); - assertDataSourceProperties(actual.get("ds_1"), DataSourcePropertiesCreator.create(createDataSource("ds_1"))); + assertDataSourcePoolProperties(actual.get("ds_0"), DataSourcePoolPropertiesCreator.create(createDataSource("ds_0"))); + assertDataSourcePoolProperties(actual.get("ds_1"), DataSourcePoolPropertiesCreator.create(createDataSource("ds_1"))); } @SneakyThrows({IOException.class, URISyntaxException.class}) @@ -66,8 +66,8 @@ private String readDataSourceYaml(final String path) { .stream().filter(each -> !"".equals(each.trim()) && !each.startsWith("#")).map(each -> each + System.lineSeparator()).collect(Collectors.joining()); } - private void assertDataSourceProperties(final DataSourceProperties actual, final DataSourceProperties expected) { - assertThat(actual.getDataSourceClassName(), is(expected.getDataSourceClassName())); + private void assertDataSourcePoolProperties(final DataSourcePoolProperties actual, final DataSourcePoolProperties expected) { + assertThat(actual.getPoolClassName(), is(expected.getPoolClassName())); assertThat(actual.getAllLocalProperties().get("url"), is(expected.getAllLocalProperties().get("url"))); assertThat(actual.getAllLocalProperties().get("username"), is(expected.getAllLocalProperties().get("username"))); assertThat(actual.getAllLocalProperties().get("password"), is(expected.getAllLocalProperties().get("password"))); @@ -77,14 +77,14 @@ private void assertDataSourceProperties(final DataSourceProperties actual, final @Test void assertLoadWithoutPath() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); - Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); + Map actual = new DataSourceUnitPersistService(repository).load("foo_db"); assertTrue(actual.isEmpty()); } @Test void assertAppend() { when(repository.getDirectly("/metadata/foo_db/active_version")).thenReturn("0"); - new DataSourceUnitPersistService(repository).append("foo_db", Collections.singletonMap("foo_ds", DataSourcePropertiesCreator.create(createDataSource("foo_ds")))); + new DataSourceUnitPersistService(repository).append("foo_db", Collections.singletonMap("foo_ds", DataSourcePoolPropertiesCreator.create(createDataSource("foo_ds")))); String expected = readDataSourceYaml("yaml/persist/data-source-foo.yaml"); verify(repository).persist("/metadata/foo_db/versions/0/data_sources/units", expected); } diff --git a/kernel/metadata/core/src/test/resources/yaml/persist/data-source-foo.yaml b/kernel/metadata/core/src/test/resources/yaml/persist/data-source-foo.yaml index 00e18cb08dbcc..00107256be7db 100644 --- a/kernel/metadata/core/src/test/resources/yaml/persist/data-source-foo.yaml +++ b/kernel/metadata/core/src/test/resources/yaml/persist/data-source-foo.yaml @@ -17,6 +17,7 @@ foo_ds: password: root + closed: false dataSourceClassName: org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource connectionInitSqls: # No spaces after conversion diff --git a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/datanode/SingleTableDataNodeLoader.java b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/datanode/SingleTableDataNodeLoader.java index 4b4c75bdc5ec1..bb5d3ac96c5bc 100644 --- a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/datanode/SingleTableDataNodeLoader.java +++ b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/datanode/SingleTableDataNodeLoader.java @@ -127,7 +127,7 @@ private static Map> loadSpecifiedDataNodes(final Ma private static Collection loadSpecifiedDataNode(final Collection dataNodes, final Collection featureRequiredSingleTables, final Map>> configuredTableMap) { - for (final DataNode each : dataNodes) { + for (DataNode each : dataNodes) { if (featureRequiredSingleTables.contains(each.getTableName())) { return getSingleDataNodeCollection(each); } @@ -142,7 +142,7 @@ private static Collection loadSpecifiedDataNode(final Collection { @Override public RouteContext createRouteContext(final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final SingleRule rule, final ConfigurationProperties props, final ConnectionContext connectionContext) { - if (1 == database.getResourceMetaData().getDataSources().size()) { - return createSingleDataSourceRouteContext(rule, database); + if (1 == database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()) { + return createSingleDataSourceRouteContext(rule, database, queryContext); } RouteContext result = new RouteContext(); SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); @@ -80,11 +80,19 @@ public void decorateRouteContext(final RouteContext routeContext, final QueryCon SingleRouteEngineFactory.newInstance(singleTables, sqlStatementContext.getSqlStatement()).ifPresent(optional -> optional.route(routeContext, rule)); } - private RouteContext createSingleDataSourceRouteContext(final SingleRule rule, final ShardingSphereDatabase database) { + private RouteContext createSingleDataSourceRouteContext(final SingleRule rule, final ShardingSphereDatabase database, final QueryContext queryContext) { String logicDataSource = rule.getDataSourceNames().iterator().next(); - String actualDataSource = database.getResourceMetaData().getDataSources().keySet().iterator().next(); + String actualDataSource = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet().iterator().next(); RouteContext result = new RouteContext(); - result.getRouteUnits().add(new RouteUnit(new RouteMapper(logicDataSource, actualDataSource), Collections.emptyList())); + result.getRouteUnits().add(new RouteUnit(new RouteMapper(logicDataSource, actualDataSource), createTableMappers(queryContext.getSqlStatementContext().getTablesContext().getTableNames()))); + return result; + } + + private Collection createTableMappers(final Collection tableNames) { + Collection result = new LinkedList<>(); + for (String each : tableNames) { + result.add(new RouteMapper(each, each)); + } return result; } diff --git a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java index 1b40e868e9aa3..75740ecca3259 100644 --- a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java +++ b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java @@ -85,12 +85,12 @@ private Collection getDataNodes(final RouteContext routeContext) { private void routeDDLStatement(final RouteContext routeContext, final SingleRule rule) { if (sqlStatement instanceof CreateTableStatement) { QualifiedTable table = singleTables.iterator().next(); - Optional dataNodeOptional = rule.findTableDataNode(table.getSchemaName(), table.getTableName()); + Optional dataNode = rule.findTableDataNode(table.getSchemaName(), table.getTableName()); boolean containsIfNotExists = CreateTableStatementHandler.ifNotExists((CreateTableStatement) sqlStatement); - if (dataNodeOptional.isPresent() && containsIfNotExists) { - String dataSourceName = dataNodeOptional.map(DataNode::getDataSourceName).orElse(null); + if (dataNode.isPresent() && containsIfNotExists) { + String dataSourceName = dataNode.map(DataNode::getDataSourceName).orElse(null); routeContext.getRouteUnits().add(new RouteUnit(new RouteMapper(dataSourceName, dataSourceName), Collections.singleton(new RouteMapper(table.getTableName(), table.getTableName())))); - } else if (dataNodeOptional.isPresent()) { + } else if (dataNode.isPresent()) { throw new TableExistsException(table.getTableName()); } else { String dataSourceName = rule.assignNewDataSourceName(); @@ -105,9 +105,7 @@ private void fillRouteContext(final SingleRule singleRule, final RouteContext ro for (QualifiedTable each : logicTables) { String tableName = each.getTableName(); Optional dataNode = singleRule.findTableDataNode(each.getSchemaName(), tableName); - if (!dataNode.isPresent()) { - throw new SingleTableNotFoundException(tableName); - } + ShardingSpherePreconditions.checkState(dataNode.isPresent(), () -> new SingleTableNotFoundException(tableName)); String dataSource = dataNode.get().getDataSourceName(); routeContext.putRouteUnit(new RouteMapper(dataSource, dataSource), Collections.singletonList(new RouteMapper(tableName, tableName))); } diff --git a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/rule/SingleRule.java b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/rule/SingleRule.java index c3b7908fdd125..9394b796bcb16 100644 --- a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/rule/SingleRule.java +++ b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/rule/SingleRule.java @@ -309,9 +309,4 @@ public TableNamesMapper getEnhancedTableMapper() { public Map getExportData() { return Collections.singletonMap(ExportableConstants.EXPORT_SINGLE_TABLES, tableNamesMapper.getTableNames()); } - - @Override - public String getType() { - return SingleRule.class.getSimpleName(); - } } diff --git a/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/SingleSQLRouterTest.java b/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/SingleSQLRouterTest.java index 0554299ba310b..421483fe3a69c 100644 --- a/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/SingleSQLRouterTest.java +++ b/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/SingleSQLRouterTest.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.route.SQLRouter; @@ -57,7 +58,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; @@ -77,12 +78,12 @@ void assertCreateRouteContextWithSingleDataSource() throws SQLException { RouteUnit routeUnit = actual.getRouteUnits().iterator().next(); assertThat(routeUnit.getDataSourceMapper().getLogicName(), is("foo_ds")); assertThat(routeUnit.getDataSourceMapper().getActualName(), is("foo_ds")); - assertTrue(routeUnit.getTableMappers().isEmpty()); + assertFalse(routeUnit.getTableMappers().isEmpty()); } private ShardingSphereDatabase mockSingleDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(result.getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_ds", new MockedDataSource())); + when(result.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", mock(StorageUnit.class))); return result; } @@ -98,13 +99,13 @@ void assertCreateRouteContextWithReadwriteSplittingDataSource() throws SQLExcept RouteUnit routeUnit = actual.getRouteUnits().iterator().next(); assertThat(routeUnit.getDataSourceMapper().getLogicName(), is("readwrite_ds")); assertThat(routeUnit.getDataSourceMapper().getActualName(), is("write_ds")); - assertTrue(routeUnit.getTableMappers().isEmpty()); + assertFalse(routeUnit.getTableMappers().isEmpty()); } private ShardingSphereDatabase mockReadwriteSplittingDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(result.getName()).thenReturn(" db_schema"); - when(result.getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("write_ds", new MockedDataSource())); + when(result.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("write_ds", mock(StorageUnit.class))); return result; } @@ -148,7 +149,7 @@ private ShardingSphereDatabase mockDatabaseWithMultipleResources() { Map dataSourceMap = new HashMap<>(2, 1F); dataSourceMap.put("ds_0", new MockedDataSource()); dataSourceMap.put("ds_1", new MockedDataSource()); - when(result.getResourceMetaData().getDataSources()).thenReturn(dataSourceMap); + when(result.getResourceMetaData().getStorageUnitMetaData().getDataSources()).thenReturn(dataSourceMap); when(result.getName()).thenReturn(DefaultDatabase.LOGIC_NAME); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); when(schema.containsTable("t_order")).thenReturn(true); diff --git a/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngineTest.java b/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngineTest.java index 414193b9e3cca..55c072e28cbe7 100644 --- a/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngineTest.java +++ b/kernel/single/core/src/test/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngineTest.java @@ -61,8 +61,8 @@ class SingleStandardRouteEngineTest { void assertRouteInSameDataSource() throws SQLException { SingleStandardRouteEngine engine = new SingleStandardRouteEngine(mockQualifiedTables(), null); SingleRule singleRule = new SingleRule(new SingleRuleConfiguration(), DefaultDatabase.LOGIC_NAME, createDataSourceMap(), Collections.emptyList()); - singleRule.getSingleTableDataNodes().put("t_order", Collections.singletonList(mockDataNode("t_order"))); - singleRule.getSingleTableDataNodes().put("t_order_item", Collections.singletonList(mockDataNode("t_order_item"))); + singleRule.getSingleTableDataNodes().put("t_order", Collections.singleton(mockDataNode("t_order"))); + singleRule.getSingleTableDataNodes().put("t_order_item", Collections.singleton(mockDataNode("t_order_item"))); RouteContext routeContext = new RouteContext(); engine.route(routeContext, singleRule); List routeUnits = new ArrayList<>(routeContext.getRouteUnits()); @@ -130,13 +130,13 @@ private Map createDataSourceMap() throws SQLException { @Test void assertRouteDuplicateSingleTable() { - SingleStandardRouteEngine engine = new SingleStandardRouteEngine(Collections.singletonList(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order")), mockStatement(false)); + SingleStandardRouteEngine engine = new SingleStandardRouteEngine(Collections.singleton(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order")), mockStatement(false)); assertThrows(TableExistsException.class, () -> engine.route(new RouteContext(), mockSingleRule())); } @Test void assertRouteIfNotExistsDuplicateSingleTable() { - SingleStandardRouteEngine engine = new SingleStandardRouteEngine(Collections.singletonList(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order")), mockStatement(true)); + SingleStandardRouteEngine engine = new SingleStandardRouteEngine(Collections.singleton(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "t_order")), mockStatement(true)); assertDoesNotThrow(() -> engine.route(new RouteContext(), mockSingleRule())); } diff --git a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowDefaultSingleTableStorageUnitExecutor.java b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowDefaultSingleTableStorageUnitExecutor.java index ddeb668309181..ff60858f03c10 100644 --- a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowDefaultSingleTableStorageUnitExecutor.java +++ b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowDefaultSingleTableStorageUnitExecutor.java @@ -38,9 +38,9 @@ public Collection getColumnNames() { } @Override - public Collection getRows(final ShardingSphereDatabase shardingSphereDatabase, final ShowDefaultSingleTableStorageUnitStatement sqlStatement) { + public Collection getRows(final ShardingSphereDatabase database, final ShowDefaultSingleTableStorageUnitStatement sqlStatement) { Collection result = new LinkedList<>(); - SingleRule rule = shardingSphereDatabase.getRuleMetaData().getSingleRule(SingleRule.class); + SingleRule rule = database.getRuleMetaData().getSingleRule(SingleRule.class); result.add(new LocalDataQueryResultRow(rule.getConfiguration().getDefaultDataSource().orElse("RANDOM"))); return result; } diff --git a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowUnloadedSingleTableExecutor.java b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowUnloadedSingleTableExecutor.java index d3d23805cdc62..8f4897a99ee43 100644 --- a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowUnloadedSingleTableExecutor.java +++ b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/query/ShowUnloadedSingleTableExecutor.java @@ -60,7 +60,8 @@ public Collection getRows(final ShardingSphereDatabase private Map> getActualDataNodes(final ShardingSphereDatabase database) { ResourceMetaData resourceMetaData = database.getResourceMetaData(); - Map aggregateDataSourceMap = SingleTableLoadUtils.getAggregatedDataSourceMap(resourceMetaData.getDataSources(), database.getRuleMetaData().getRules()); + Map aggregateDataSourceMap = SingleTableLoadUtils.getAggregatedDataSourceMap( + resourceMetaData.getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules()); Collection excludedTables = SingleTableLoadUtils.getExcludedTables(database.getRuleMetaData().getRules()); return SingleTableDataNodeLoader.load(database.getName(), database.getProtocolType(), aggregateDataSourceMap, excludedTables); } diff --git a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/LoadSingleTableStatementUpdater.java b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/LoadSingleTableStatementUpdater.java index ece0779d4afb3..63a134642e384 100644 --- a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/LoadSingleTableStatementUpdater.java +++ b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/LoadSingleTableStatementUpdater.java @@ -112,7 +112,8 @@ private void checkActualTableExist(final ShardingSphereDatabase database, final return; } ResourceMetaData resourceMetaData = database.getResourceMetaData(); - Map aggregateDataSourceMap = SingleTableLoadUtils.getAggregatedDataSourceMap(resourceMetaData.getDataSources(), database.getRuleMetaData().getRules()); + Map aggregateDataSourceMap = SingleTableLoadUtils.getAggregatedDataSourceMap( + resourceMetaData.getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules()); Map>> actualTableNodes = new LinkedHashMap<>(); for (String each : requiredDataSources) { Map> schemaTableNames = SingleTableDataNodeLoader.loadSchemaTableNames(database.getName(), database.getProtocolType(), aggregateDataSourceMap.get(each), each); diff --git a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/SetDefaultSingleTableStorageUnitStatementUpdater.java b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/SetDefaultSingleTableStorageUnitStatementUpdater.java index 6a1dfbc2f86a2..f353939f28650 100644 --- a/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/SetDefaultSingleTableStorageUnitStatementUpdater.java +++ b/kernel/single/distsql/handler/src/main/java/org/apache/shardingsphere/single/distsql/handler/update/SetDefaultSingleTableStorageUnitStatementUpdater.java @@ -40,7 +40,7 @@ public void checkSQLStatement(final ShardingSphereDatabase database, final SetDe private void checkStorageUnitExist(final ShardingSphereDatabase database, final SetDefaultSingleTableStorageUnitStatement sqlStatement) { if (!Strings.isNullOrEmpty(sqlStatement.getDefaultStorageUnit())) { - Collection storageUnitNames = database.getResourceMetaData().getDataSources().keySet(); + Collection storageUnitNames = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet(); ShardingSpherePreconditions.checkState(storageUnitNames.contains(sqlStatement.getDefaultStorageUnit()), () -> new MissingRequiredStorageUnitsException(database.getName(), Collections.singleton(sqlStatement.getDefaultStorageUnit()))); } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/SQLNodeConverterEngine.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/SQLNodeConverterEngine.java index 756e92dede074..eb8a934fa9f64 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/SQLNodeConverterEngine.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/SQLNodeConverterEngine.java @@ -21,9 +21,15 @@ import lombok.NoArgsConstructor; import org.apache.calcite.sql.SqlNode; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dal.ExplainStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.delete.DeleteStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.explain.ExplainStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.insert.InsertStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.update.UpdateStatementConverter; import org.apache.shardingsphere.sqlfederation.exception.OptimizationSQLNodeConvertException; import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.select.SelectStatementConverter; @@ -47,6 +53,15 @@ public static SqlNode convert(final SQLStatement statement) { if (statement instanceof DeleteStatement) { return new DeleteStatementConverter().convert((DeleteStatement) statement); } + if (statement instanceof ExplainStatement) { + return new ExplainStatementConverter().convert((ExplainStatement) statement); + } + if (statement instanceof UpdateStatement) { + return new UpdateStatementConverter().convert((UpdateStatement) statement); + } + if (statement instanceof InsertStatement) { + return new InsertStatementConverter().convert((InsertStatement) statement); + } throw new OptimizationSQLNodeConvertException(statement); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/ExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/ExpressionConverter.java index 5077462596d82..248283a17c973 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/ExpressionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/ExpressionConverter.java @@ -19,6 +19,7 @@ import org.apache.calcite.sql.SqlNode; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dal.VariableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; @@ -37,12 +38,16 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MatchAgainstExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.AggregationProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CollateExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.RowExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.UnaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.BetweenExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.BinaryOperationExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.CaseWhenExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ColumnConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.CollateExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ExistsSubqueryExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ExtractArgExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.FunctionConverter; @@ -52,10 +57,13 @@ import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ParameterMarkerExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.SubqueryExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.TypeCastExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.UnaryOperationExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.projection.impl.AggregationProjectionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.projection.impl.DataTypeConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.NotExpressionConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.MatchExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.RowExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.VariableSegmentConverter; import java.util.Optional; @@ -124,6 +132,18 @@ public Optional convert(final ExpressionSegment segment) { if (segment instanceof MatchAgainstExpression) { return new MatchExpressionConverter().convert((MatchAgainstExpression) segment); } + if (segment instanceof CollateExpression) { + return new CollateExpressionConverter().convert((CollateExpression) segment); + } + if (segment instanceof RowExpression) { + return new RowExpressionConverter().convert((RowExpression) segment); + } + if (segment instanceof VariableSegment) { + return new VariableSegmentConverter().convert((VariableSegment) segment); + } + if (segment instanceof UnaryOperationExpression) { + return new UnaryOperationExpressionConverter().convert((UnaryOperationExpression) segment); + } throw new UnsupportedSQLOperationException("unsupported TableSegment type: " + segment.getClass()); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/BinaryOperationExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/BinaryOperationExpressionConverter.java index cd4b2f43a86bd..729913874f219 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/BinaryOperationExpressionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/BinaryOperationExpressionConverter.java @@ -79,9 +79,11 @@ private static void register() { register(SQLExtensionOperatorTable.SIGNED_LEFT_SHIFT); register(SQLExtensionOperatorTable.XOR); register(SQLExtensionOperatorTable.LOGICAL_AND); + register(SQLExtensionOperatorTable.REGEXP); register(SQLExtensionOperatorTable.NOT_REGEXP); register(SQLExtensionOperatorTable.SOUNDS_LIKE); register(SQLExtensionOperatorTable.NULL_SAFE); + register(SQLExtensionOperatorTable.ASSIGNMENT); } private static void register(final SqlOperator sqlOperator) { diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/CollateExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/CollateExpressionConverter.java new file mode 100644 index 0000000000000..630a5f3c4a276 --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/CollateExpressionConverter.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CollateExpression; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; + +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; + +/** + * Collate expression converter. + */ +public final class CollateExpressionConverter implements SQLSegmentConverter { + + @Override + public Optional convert(final CollateExpression segment) { + List sqlNodes = new LinkedList<>(); + sqlNodes.add(new ExpressionConverter().convert(segment.getExpr().get()).orElse(SqlNodeList.EMPTY)); + sqlNodes.add(new ExpressionConverter().convert(segment.getCollateName()).orElse(SqlNodeList.EMPTY)); + return Optional.of(new SqlBasicCall(SQLExtensionOperatorTable.COLLATE, sqlNodes, SqlParserPos.ZERO)); + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/ColumnConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/ColumnConverter.java index 37ef187ace46b..0668a96ac8b04 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/ColumnConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/ColumnConverter.java @@ -24,7 +24,8 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; -import java.util.Arrays; +import java.util.ArrayList; +import java.util.List; import java.util.Optional; /** @@ -34,10 +35,14 @@ public final class ColumnConverter implements SQLSegmentConverter convert(final ColumnSegment segment) { - Optional owner = segment.getOwner(); - String columnName = segment.getIdentifier().getValue(); - SqlIdentifier sqlIdentifier = owner.map(optional -> new SqlIdentifier(Arrays.asList(optional.getIdentifier().getValue(), columnName), SqlParserPos.ZERO)) - .orElseGet(() -> new SqlIdentifier(columnName, SqlParserPos.ZERO)); - return Optional.of(sqlIdentifier); + List names = new ArrayList<>(); + segment.getOwner().ifPresent(optional -> addOwnerNames(names, optional)); + names.add(segment.getIdentifier().getValue()); + return Optional.of(new SqlIdentifier(names, SqlParserPos.ZERO)); + } + + private void addOwnerNames(final List names, final OwnerSegment owner) { + owner.getOwner().ifPresent(optional -> addOwnerNames(names, optional)); + names.add(owner.getIdentifier().getValue()); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/FunctionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/FunctionConverter.java index 89e964dc3a9f2..3cb9c041603ab 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/FunctionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/FunctionConverter.java @@ -21,6 +21,7 @@ import org.apache.calcite.sql.SqlFunctionCategory; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlSyntax; import org.apache.calcite.sql.SqlUnresolvedFunction; @@ -33,6 +34,7 @@ import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; import java.util.Collection; +import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -52,6 +54,9 @@ public Optional convert(final FunctionSegment segment) { if ("TRIM".equalsIgnoreCase(functionName.getSimple())) { return new TrimFunctionConverter().convert(segment); } + if ("OVER".equalsIgnoreCase(functionName.getSimple())) { + return new WindowFunctionConverter().convert(segment); + } List functions = new LinkedList<>(); SqlStdOperatorTable.instance().lookupOperatorOverloads(functionName, null, SqlSyntax.FUNCTION, functions, SqlNameMatchers.withCaseSensitive(false)); return Optional.of(functions.isEmpty() @@ -62,8 +67,9 @@ public Optional convert(final FunctionSegment segment) { private List getFunctionParameters(final Collection sqlSegments) { List result = new LinkedList<>(); + ExpressionConverter expressionConverter = new ExpressionConverter(); for (ExpressionSegment each : sqlSegments) { - new ExpressionConverter().convert(each).ifPresent(result::add); + expressionConverter.convert(each).ifPresent(optional -> result.addAll(optional instanceof SqlNodeList ? ((SqlNodeList) optional).getList() : Collections.singleton(optional))); } return result; } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/MatchExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/MatchExpressionConverter.java index bd2d59a039834..e633ad288f0a3 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/MatchExpressionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/MatchExpressionConverter.java @@ -23,9 +23,11 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MatchAgainstExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -38,10 +40,22 @@ public final class MatchExpressionConverter implements SQLSegmentConverter convert(final MatchAgainstExpression segment) { List sqlNodes = new LinkedList<>(); - sqlNodes.add(new SqlIdentifier(segment.getColumnName().getIdentifier().getValue(), SqlParserPos.ZERO)); + List names = new ArrayList<>(); + if (segment.getColumnName().getOwner().isPresent()) { + addOwnerNames(names, segment.getColumnName().getOwner().get()); + } + names.add(segment.getColumnName().getIdentifier().getValue()); + sqlNodes.add(new SqlIdentifier(names, SqlParserPos.ZERO)); new ExpressionConverter().convert(segment.getExpr()).ifPresent(sqlNodes::add); SqlNode searchModifier = SqlLiteral.createCharString(segment.getSearchModifier(), SqlParserPos.ZERO); sqlNodes.add(searchModifier); return Optional.of(new SqlBasicCall(SQLExtensionOperatorTable.MATCH_AGAINST, sqlNodes, SqlParserPos.ZERO)); } + + private void addOwnerNames(final List names, final OwnerSegment owner) { + if (null != owner) { + addOwnerNames(names, owner.getOwner().orElse(null)); + names.add(owner.getIdentifier().getValue()); + } + } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/RowExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/RowExpressionConverter.java new file mode 100644 index 0000000000000..928b68b74205f --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/RowExpressionConverter.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl; + +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.RowExpression; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +/** + * Row expression converter. + */ +public final class RowExpressionConverter implements SQLSegmentConverter { + + @Override + public Optional convert(final RowExpression segment) { + List sqlNodes = new ArrayList<>(); + ExpressionConverter expressionConverter = new ExpressionConverter(); + for (ExpressionSegment each : segment.getItems()) { + expressionConverter.convert(each).ifPresent(sqlNodes::add); + } + return Optional.of(SqlStdOperatorTable.ROW.createCall(SqlParserPos.ZERO, sqlNodes)); + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/SQLExtensionOperatorTable.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/SQLExtensionOperatorTable.java index 119989e0a4fa8..946b4058c3619 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/SQLExtensionOperatorTable.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/SQLExtensionOperatorTable.java @@ -48,9 +48,17 @@ public final class SQLExtensionOperatorTable { public static final SqlBinaryOperator LOGICAL_AND = new SqlBinaryOperator("&&", SqlKind.OTHER, 24, true, null, null, null); + public static final SqlBinaryOperator REGEXP = new SqlBinaryOperator("REGEXP", SqlKind.OTHER, 30, true, null, null, null); + public static final SqlBinaryOperator NOT_REGEXP = new SqlBinaryOperator("NOT REGEXP", SqlKind.OTHER, 30, true, null, null, null); public static final SqlBinaryOperator SOUNDS_LIKE = new SqlBinaryOperator("SOUNDS LIKE", SqlKind.OTHER, 30, true, null, null, null); public static final MySQLMatchAgainstFunction MATCH_AGAINST = new MySQLMatchAgainstFunction(); + + public static final SqlBinaryOperator COLLATE = new SqlBinaryOperator("COLLATE", SqlKind.OTHER, 30, true, null, null, null); + + public static final SqlBinaryOperator ASSIGNMENT = new SqlBinaryOperator(":=", SqlKind.OTHER, 30, true, null, null, null); + + public static final SqlPrefixOperator TILDE = new SqlPrefixOperator("~", SqlKind.OTHER, 26, null, null, null); } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/TypeCastExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/TypeCastExpressionConverter.java index 837c2173296e2..97387b0542ac0 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/TypeCastExpressionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/TypeCastExpressionConverter.java @@ -24,10 +24,10 @@ import org.apache.calcite.sql.SqlTypeNameSpec; import org.apache.calcite.sql.fun.SqlCastFunction; import org.apache.calcite.sql.parser.SqlParserPos; -import org.apache.calcite.sql.type.SqlTypeName; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.TypeCastExpression; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.type.DataTypeConverter; import java.util.Arrays; import java.util.Optional; @@ -43,7 +43,7 @@ public Optional convert(final TypeCastExpression segment) { if (!expression.isPresent()) { return Optional.empty(); } - SqlTypeNameSpec sqlTypeName = new SqlBasicTypeNameSpec(SqlTypeName.valueOf(segment.getDataType().toUpperCase()), SqlParserPos.ZERO); + SqlTypeNameSpec sqlTypeName = new SqlBasicTypeNameSpec(DataTypeConverter.convert(segment.getDataType().toUpperCase()), SqlParserPos.ZERO); return Optional.of(new SqlBasicCall(new SqlCastFunction(), Arrays.asList(expression.get(), new SqlDataTypeSpec(sqlTypeName, SqlParserPos.ZERO)), SqlParserPos.ZERO)); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/UnaryOperationExpressionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/UnaryOperationExpressionConverter.java new file mode 100644 index 0000000000000..cc95a87b6bd2d --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/UnaryOperationExpressionConverter.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl; + +import com.google.common.base.Preconditions; +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.UnaryOperationExpression; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; + +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.TreeMap; + +/** + * Unary operation expression converter. + */ +public final class UnaryOperationExpressionConverter implements SQLSegmentConverter { + + private static final Map REGISTRY = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + + static { + register(); + } + + private static void register() { + register(SqlStdOperatorTable.UNARY_PLUS); + register(SqlStdOperatorTable.UNARY_MINUS); + register(SQLExtensionOperatorTable.TILDE); + } + + private static void register(final SqlOperator sqlOperator) { + REGISTRY.put(sqlOperator.getName(), sqlOperator); + } + + @Override + public Optional convert(final UnaryOperationExpression segment) { + SqlOperator operator = convertOperator(segment); + List sqlNodes = convertSqlNodes(segment); + return Optional.of(new SqlBasicCall(operator, sqlNodes, SqlParserPos.ZERO)); + } + + private SqlOperator convertOperator(final UnaryOperationExpression segment) { + String operator = segment.getOperator(); + Preconditions.checkState(REGISTRY.containsKey(operator), "Unsupported SQL operator: %s", operator); + return REGISTRY.get(operator); + } + + private List convertSqlNodes(final UnaryOperationExpression segment) { + SqlNode expression = new ExpressionConverter().convert(segment.getExpression()).orElseThrow(IllegalStateException::new); + List result = new LinkedList<>(); + result.add(expression); + return result; + } +} diff --git a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/VariableSegmentConverter.java similarity index 53% rename from infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java rename to kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/VariableSegmentConverter.java index 41124f6bf30be..ed4a5436331de 100644 --- a/infra/datasource/type/druid/src/main/java/org/apache/shardingsphere/infra/datasource/druid/metadata/DruidDataSourcePoolFieldMetaData.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/VariableSegmentConverter.java @@ -15,32 +15,23 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.datasource.druid.metadata; +package org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dal.VariableSegment; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; + +import java.util.Optional; /** - * Druid data source pool field meta data. + * Variable segment converter. */ -public final class DruidDataSourcePoolFieldMetaData implements DataSourcePoolFieldMetaData { - - @Override - public String getUsernameFieldName() { - return "username"; - } - - @Override - public String getPasswordFieldName() { - return "password"; - } - - @Override - public String getJdbcUrlFieldName() { - return "url"; - } +public final class VariableSegmentConverter implements SQLSegmentConverter { @Override - public String getJdbcUrlPropertiesFieldName() { - return "connectionProperties"; + public Optional convert(final VariableSegment segment) { + return Optional.of(new SqlIdentifier(segment.getText(), SqlParserPos.ZERO)); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/WindowFunctionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/WindowFunctionConverter.java new file mode 100644 index 0000000000000..9cb96482071c0 --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/expression/impl/WindowFunctionConverter.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlIdentifier; +import org.apache.calcite.sql.SqlLiteral; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOperator; +import org.apache.calcite.sql.SqlSyntax; +import org.apache.calcite.sql.SqlWindow; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.validate.SqlNameMatchers; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; + +/** + * Window function converter. + */ +public final class WindowFunctionConverter extends FunctionConverter { + + @Override + public Optional convert(final FunctionSegment segment) { + SqlIdentifier functionName = new SqlIdentifier(segment.getFunctionName(), SqlParserPos.ZERO); + List functions = new LinkedList<>(); + SqlStdOperatorTable.instance().lookupOperatorOverloads(functionName, null, SqlSyntax.BINARY, functions, SqlNameMatchers.withCaseSensitive(false)); + return Optional.of(new SqlBasicCall(functions.iterator().next(), getWindowFunctionParameters(segment.getParameters()), SqlParserPos.ZERO)); + } + + private List getWindowFunctionParameters(final Collection sqlSegments) { + List result = new LinkedList<>(); + for (ExpressionSegment each : sqlSegments) { + new ExpressionConverter().convert(each).ifPresent(result::add); + } + if (1 == result.size()) { + result.add(new SqlWindow(SqlParserPos.ZERO, null, null, new SqlNodeList(SqlParserPos.ZERO), new SqlNodeList(SqlParserPos.ZERO), SqlLiteral.createBoolean(false, SqlParserPos.ZERO), null, + null, null)); + } + return result; + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/from/impl/JoinTableConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/from/impl/JoinTableConverter.java index 1e907504aa131..869b42556d20d 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/from/impl/JoinTableConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/from/impl/JoinTableConverter.java @@ -54,7 +54,7 @@ private SqlLiteral convertConditionType(final JoinTableSegment segment) { if (!segment.getUsing().isEmpty()) { return JoinConditionType.USING.symbol(SqlParserPos.ZERO); } - return null != segment.getCondition() ? JoinConditionType.ON.symbol(SqlParserPos.ZERO) : JoinConditionType.NONE.symbol(SqlParserPos.ZERO); + return null == segment.getCondition() ? JoinConditionType.NONE.symbol(SqlParserPos.ZERO) : JoinConditionType.ON.symbol(SqlParserPos.ZERO); } private Optional convertJoinCondition(final JoinTableSegment segment) { diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/DataTypeConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/DataTypeConverter.java index f2b84bd5d6772..1e94cb16ebf66 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/DataTypeConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/DataTypeConverter.java @@ -19,11 +19,13 @@ import org.apache.calcite.sql.SqlDataTypeSpec; import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec; +import org.apache.calcite.sql.SqlBasicTypeNameSpec; import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.type.SqlTypeName; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; +import java.util.Objects; import java.util.Optional; /** @@ -36,6 +38,7 @@ public Optional convert(final DataTypeSegment segment) { if (null == segment) { return Optional.empty(); } - return Optional.of(new SqlDataTypeSpec(new SqlUserDefinedTypeNameSpec(segment.getDataTypeName(), SqlParserPos.ZERO), SqlParserPos.ZERO)); + return Optional.of(new SqlDataTypeSpec(new SqlBasicTypeNameSpec(Objects.requireNonNull(SqlTypeName.get(segment.getDataTypeName())), segment.getDataLength().getPrecision(), SqlParserPos.ZERO), + SqlParserPos.ZERO)); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/ShorthandProjectionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/ShorthandProjectionConverter.java index 78812c7a9d57b..6a0c1e8fe7573 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/ShorthandProjectionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/ShorthandProjectionConverter.java @@ -22,10 +22,13 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ShorthandProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; -import java.util.Arrays; +import java.util.ArrayList; +import java.util.List; import java.util.Optional; +import java.util.stream.IntStream; /** * Shorthand projection converter. @@ -38,8 +41,16 @@ public Optional convert(final ShorthandProjectionSegment segment) { return Optional.empty(); } if (segment.getOwner().isPresent()) { - return Optional.of(SqlIdentifier.star(Arrays.asList(segment.getOwner().get().getIdentifier().getValue(), ""), SqlParserPos.ZERO, ImmutableList.of(SqlParserPos.ZERO, SqlParserPos.ZERO))); + List names = new ArrayList<>(); + addOwnerNames(names, segment.getOwner().get()); + names.add(""); + return Optional.of(SqlIdentifier.star(names, SqlParserPos.ZERO, IntStream.range(0, names.size()).mapToObj(i -> SqlParserPos.ZERO).collect(ImmutableList.toImmutableList()))); } return Optional.of(SqlIdentifier.star(SqlParserPos.ZERO)); } + + private void addOwnerNames(final List names, final OwnerSegment owner) { + owner.getOwner().ifPresent(optional -> addOwnerNames(names, optional)); + names.add(owner.getIdentifier().getValue()); + } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/SubqueryProjectionConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/SubqueryProjectionConverter.java index 52adc8d0e45b2..653749ce6f739 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/SubqueryProjectionConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/segment/projection/impl/SubqueryProjectionConverter.java @@ -22,12 +22,14 @@ import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.SubqueryProjectionSegment; import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.SQLSegmentConverter; import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.select.SelectStatementConverter; -import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.SubqueryProjectionSegment; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.LinkedList; import java.util.Optional; @@ -42,13 +44,18 @@ public Optional convert(final SubqueryProjectionSegment segment) { return Optional.empty(); } SqlNode sqlNode = new SelectStatementConverter().convert(segment.getSubquery().getSelect()); - return segment.getAliasName().isPresent() ? convertToSQLStatement(sqlNode, segment.getAliasName().get()) : Optional.of(sqlNode); + if (segment.getAliasName().isPresent()) { + sqlNode = convertWithAlias(sqlNode, segment.getAliasName().get()); + } + return SubqueryType.EXISTS_SUBQUERY == segment.getSubquery().getSubqueryType() + ? Optional.of(new SqlBasicCall(SqlStdOperatorTable.EXISTS, Collections.singletonList(sqlNode), SqlParserPos.ZERO)) + : Optional.of(sqlNode); } - private Optional convertToSQLStatement(final SqlNode sqlNode, final String alias) { + private SqlNode convertWithAlias(final SqlNode sqlNode, final String alias) { Collection sqlNodes = new LinkedList<>(); sqlNodes.add(sqlNode); sqlNodes.add(new SqlIdentifier(alias, SqlParserPos.ZERO)); - return Optional.of(new SqlBasicCall(SqlStdOperatorTable.AS, new ArrayList<>(sqlNodes), SqlParserPos.ZERO)); + return new SqlBasicCall(SqlStdOperatorTable.AS, new ArrayList<>(sqlNodes), SqlParserPos.ZERO); } } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/explain/ExplainStatementConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/explain/ExplainStatementConverter.java new file mode 100644 index 0000000000000..6bdd4e2d1b70a --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/explain/ExplainStatementConverter.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.statement.explain; + +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlExplain; +import org.apache.calcite.sql.SqlExplainLevel; +import org.apache.calcite.sql.SqlExplainFormat; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dal.ExplainStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DeleteStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.SQLStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.delete.DeleteStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.select.SelectStatementConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.update.UpdateStatementConverter; + +/** + * Explain statement converter. + */ +public final class ExplainStatementConverter implements SQLStatementConverter { + + @Override + public SqlNode convert(final ExplainStatement explainStatement) { + return new SqlExplain(SqlParserPos.ZERO, convertSQLStatement(explainStatement), SqlExplainLevel.ALL_ATTRIBUTES.symbol(SqlParserPos.ZERO), + SqlExplain.Depth.TYPE.symbol(SqlParserPos.ZERO), SqlExplainFormat.TEXT.symbol(SqlParserPos.ZERO), 0); + } + + private SqlNode convertSQLStatement(final ExplainStatement explainStatement) { + return explainStatement.getStatement().map(this::convertSqlNode).orElseThrow(IllegalStateException::new); + } + + private SqlNode convertSqlNode(final SQLStatement sqlStatement) { + if (sqlStatement instanceof SelectStatement) { + return new SelectStatementConverter().convert((SelectStatement) sqlStatement); + } else if (sqlStatement instanceof DeleteStatement) { + return new DeleteStatementConverter().convert((DeleteStatement) sqlStatement); + } else if (sqlStatement instanceof UpdateStatement) { + return new UpdateStatementConverter().convert((UpdateStatement) sqlStatement); + } + // TODO other statement converter. + return null; + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/insert/InsertStatementConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/insert/InsertStatementConverter.java new file mode 100644 index 0000000000000..2a2cdc88129ca --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/insert/InsertStatementConverter.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.statement.insert; + +import org.apache.calcite.sql.SqlBasicCall; +import org.apache.calcite.sql.SqlInsert; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlSelect; +import org.apache.calcite.sql.SqlValuesOperator; +import org.apache.calcite.sql.fun.SqlRowOperator; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.SelectStatementHandler; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ColumnConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.from.TableConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.groupby.GroupByConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.groupby.HavingConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.projection.DistinctConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.projection.ProjectionsConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.where.WhereConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.window.WindowConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.SQLStatementConverter; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Insert statement converter. + */ +public final class InsertStatementConverter implements SQLStatementConverter { + + @Override + public SqlNode convert(final InsertStatement insertStatement) { + return convertInsert(insertStatement); + } + + private SqlInsert convertInsert(final InsertStatement insertStatement) { + SqlNode table = new TableConverter().convert(insertStatement.getTable()).orElseThrow(IllegalStateException::new); + SqlParserPos position = SqlParserPos.ZERO; + SqlNodeList keywords = new SqlNodeList(position); + SqlNode source; + if (insertStatement.getInsertSelect().isPresent()) { + source = convertSelect(insertStatement.getInsertSelect().get()); + } else { + source = convertValues(insertStatement.getValues()); + } + SqlNodeList columnList = convertColumn(insertStatement.getColumns()); + return new SqlInsert(SqlParserPos.ZERO, keywords, table, source, columnList); + } + + private SqlNode convertSelect(final SubquerySegment subquerySegment) { + SelectStatement selectStatement = subquerySegment.getSelect(); + SqlNodeList distinct = new DistinctConverter().convert(selectStatement.getProjections()).orElse(null); + SqlNodeList projection = new ProjectionsConverter().convert(selectStatement.getProjections()).orElseThrow(IllegalStateException::new); + SqlNode from = new TableConverter().convert(selectStatement.getFrom()).orElse(null); + SqlNode where = selectStatement.getWhere().flatMap(optional -> new WhereConverter().convert(optional)).orElse(null); + SqlNodeList groupBy = selectStatement.getGroupBy().flatMap(optional -> new GroupByConverter().convert(optional)).orElse(null); + SqlNode having = selectStatement.getHaving().flatMap(optional -> new HavingConverter().convert(optional)).orElse(null); + SqlNodeList window = SelectStatementHandler.getWindowSegment(selectStatement).flatMap(new WindowConverter()::convert).orElse(SqlNodeList.EMPTY); + return new SqlSelect(SqlParserPos.ZERO, distinct, projection, from, where, groupBy, having, window, null, null, null, null, SqlNodeList.EMPTY); + } + + private SqlNode convertValues(final Collection insertValuesSegments) { + List values = new ArrayList<>(); + for (InsertValuesSegment each : insertValuesSegments) { + for (ExpressionSegment value : each.getValues()) { + values.add(convertExpression(value)); + } + } + List operands = new ArrayList<>(); + operands.add(new SqlBasicCall(new SqlRowOperator("ROW"), values, SqlParserPos.ZERO)); + return new SqlBasicCall(new SqlValuesOperator(), operands, SqlParserPos.ZERO); + } + + private SqlNodeList convertColumn(final Collection columnSegments) { + List columns = columnSegments.stream().map(each -> new ColumnConverter().convert(each).orElseThrow(IllegalStateException::new)).collect(Collectors.toList()); + if (columns.isEmpty()) { + return SqlNodeList.EMPTY; + } + return new SqlNodeList(columns, SqlParserPos.ZERO); + } + + private SqlNode convertExpression(final ExpressionSegment expressionSegment) { + return new ExpressionConverter().convert(expressionSegment).orElseThrow(IllegalStateException::new); + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/select/SelectStatementConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/select/SelectStatementConverter.java index bab0b9feb325e..86685b01eac32 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/select/SelectStatementConverter.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/select/SelectStatementConverter.java @@ -69,7 +69,7 @@ private SqlSelect convertSelect(final SelectStatement selectStatement) { SqlNodeList groupBy = selectStatement.getGroupBy().flatMap(optional -> new GroupByConverter().convert(optional)).orElse(null); SqlNode having = selectStatement.getHaving().flatMap(optional -> new HavingConverter().convert(optional)).orElse(null); SqlNodeList window = SelectStatementHandler.getWindowSegment(selectStatement).flatMap(new WindowConverter()::convert).orElse(SqlNodeList.EMPTY); - return new SqlSelect(SqlParserPos.ZERO, distinct, projection, from, where, groupBy, having, window, null, null, null, SqlNodeList.EMPTY); + return new SqlSelect(SqlParserPos.ZERO, distinct, projection, from, where, groupBy, having, window, null, null, null, null, SqlNodeList.EMPTY); } private SqlNode convertCombine(final SqlNode sqlNode, final SelectStatement selectStatement) { diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/update/UpdateStatementConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/update/UpdateStatementConverter.java new file mode 100644 index 0000000000000..b4efca5e2bd9d --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/statement/update/UpdateStatementConverter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.statement.update; + +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.SqlNodeList; +import org.apache.calcite.sql.SqlOrderBy; +import org.apache.calcite.sql.SqlUpdate; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.UpdateStatementHandler; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.ExpressionConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.expression.impl.ColumnConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.from.TableConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.limit.PaginationValueSQLConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.orderby.OrderByConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.segment.where.WhereConverter; +import org.apache.shardingsphere.sqlfederation.compiler.converter.statement.SQLStatementConverter; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Update statement converter. + */ +public final class UpdateStatementConverter implements SQLStatementConverter { + + @Override + public SqlNode convert(final UpdateStatement updateStatement) { + SqlUpdate sqlUpdate = convertUpdate(updateStatement); + SqlNodeList orderBy = UpdateStatementHandler.getOrderBySegment(updateStatement).flatMap(optional -> new OrderByConverter().convert(optional)).orElse(SqlNodeList.EMPTY); + Optional limit = UpdateStatementHandler.getLimitSegment(updateStatement); + if (limit.isPresent()) { + SqlNode offset = limit.get().getOffset().flatMap(optional -> new PaginationValueSQLConverter().convert(optional)).orElse(null); + SqlNode rowCount = limit.get().getRowCount().flatMap(optional -> new PaginationValueSQLConverter().convert(optional)).orElse(null); + return new SqlOrderBy(SqlParserPos.ZERO, sqlUpdate, orderBy, offset, rowCount); + } + return orderBy.isEmpty() ? sqlUpdate : new SqlOrderBy(SqlParserPos.ZERO, sqlUpdate, orderBy, null, null); + } + + private SqlUpdate convertUpdate(final UpdateStatement updateStatement) { + SqlNode table = new TableConverter().convert(updateStatement.getTable()).orElseThrow(IllegalStateException::new); + SqlNode condition = updateStatement.getWhere().flatMap(optional -> new WhereConverter().convert(optional)).orElse(null); + SqlNodeList columns = new SqlNodeList(SqlParserPos.ZERO); + SqlNodeList expressions = new SqlNodeList(SqlParserPos.ZERO); + for (AssignmentSegment each : updateStatement.getAssignmentSegment().orElseThrow(IllegalStateException::new).getAssignments()) { + columns.addAll(convertColumn(each.getColumns())); + expressions.add(convertExpression(each.getValue())); + } + return new SqlUpdate(SqlParserPos.ZERO, table, columns, expressions, condition, null, null); + } + + private List convertColumn(final List columnSegments) { + return columnSegments.stream().map(each -> new ColumnConverter().convert(each).orElseThrow(IllegalStateException::new)).collect(Collectors.toList()); + } + + private SqlNode convertExpression(final ExpressionSegment expressionSegment) { + return new ExpressionConverter().convert(expressionSegment).orElseThrow(IllegalStateException::new); + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/type/DataTypeConverter.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/type/DataTypeConverter.java new file mode 100644 index 0000000000000..34e9b381505c2 --- /dev/null +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/converter/type/DataTypeConverter.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sqlfederation.compiler.converter.type; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.calcite.sql.type.SqlTypeName; + +import java.util.HashMap; +import java.util.Map; + +/** + * Data type converter. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class DataTypeConverter { + + private static final Map REGISTRY = new HashMap<>(); + + static { + registerDataType(); + } + + private static void registerDataType() { + REGISTRY.put("INT", SqlTypeName.INTEGER); + REGISTRY.put("INT2", SqlTypeName.SMALLINT); + REGISTRY.put("INT4", SqlTypeName.INTEGER); + REGISTRY.put("INT8", SqlTypeName.BIGINT); + REGISTRY.put("MONEY", SqlTypeName.DECIMAL); + } + + /** + * Convert to SQL operator. + * + * @param dataType data type to be converted + * @return converted SQL operator + */ + public static SqlTypeName convert(final String dataType) { + if (!REGISTRY.containsKey(dataType)) { + return SqlTypeName.valueOf(dataType); + } + return REGISTRY.get(dataType); + } +} diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/metadata/schema/SQLFederationTable.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/metadata/schema/SQLFederationTable.java index 9d877a3a627a7..c3ed10d7ad528 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/metadata/schema/SQLFederationTable.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/compiler/metadata/schema/SQLFederationTable.java @@ -20,7 +20,9 @@ import lombok.RequiredArgsConstructor; import lombok.Setter; import org.apache.calcite.DataContext; +import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.tree.Expression; @@ -43,6 +45,7 @@ import org.apache.shardingsphere.sqlfederation.compiler.statistic.SQLFederationStatistic; import org.apache.shardingsphere.sqlfederation.executor.enumerable.EnumerableScanExecutor; import org.apache.shardingsphere.sqlfederation.executor.enumerable.EnumerableScanExecutorContext; +import org.apache.shardingsphere.sqlfederation.executor.row.EmptyRowEnumerator; import java.lang.reflect.Type; import java.util.Collections; @@ -96,9 +99,22 @@ public RelNode toRel(final ToRelContext context, final RelOptTable relOptTable) * @return enumerable result */ public Enumerable execute(final DataContext root, final String sql, final int[] paramIndexes) { + if (null == scanExecutor) { + return createEmptyEnumerable(); + } return scanExecutor.execute(table, new EnumerableScanExecutorContext(root, sql, paramIndexes)); } + private AbstractEnumerable createEmptyEnumerable() { + return new AbstractEnumerable() { + + @Override + public Enumerator enumerator() { + return new EmptyRowEnumerator(); + } + }; + } + @Override public String toString() { return "SQLFederationTable"; diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/executor/enumerable/EnumerableScanExecutor.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/executor/enumerable/EnumerableScanExecutor.java index c6d74f2919900..6eb80eff97d63 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/executor/enumerable/EnumerableScanExecutor.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/executor/enumerable/EnumerableScanExecutor.java @@ -29,6 +29,7 @@ import org.apache.shardingsphere.infra.database.core.metadata.database.system.SystemDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.wrapper.SQLWrapperException; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroup; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupContext; import org.apache.shardingsphere.infra.executor.kernel.model.ExecutionGroupReportContext; @@ -58,7 +59,6 @@ import org.apache.shardingsphere.infra.parser.sql.SQLStatementParserEngine; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.session.query.QueryContext; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.wrapper.SQLWrapperException; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.apache.shardingsphere.sqlfederation.compiler.context.OptimizerContext; import org.apache.shardingsphere.sqlfederation.executor.SQLFederationExecutorContext; @@ -283,8 +283,9 @@ private QueryContext createQueryContext(final ShardingSphereMetaData metaData, f optimizerContext.getSqlParserRule().getSqlStatementCache(), optimizerContext.getSqlParserRule().getParseTreeCache(), optimizerContext.getSqlParserRule().isSqlCommentParseEnabled()).parse(sql, useCache); List params = getParameters(sqlString.getParamIndexes()); - SQLStatementContext sqlStatementContext = new SQLBindEngine(metaData, executorContext.getDatabaseName()).bind(sqlStatement, params); - return new QueryContext(sqlStatementContext, sql, params, new HintValueContext(), useCache); + HintValueContext hintValueContext = new HintValueContext(); + SQLStatementContext sqlStatementContext = new SQLBindEngine(metaData, executorContext.getDatabaseName(), hintValueContext).bind(sqlStatement, params); + return new QueryContext(sqlStatementContext, sql, params, hintValueContext, useCache); } private List getParameters(final int[] paramIndexes) { diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/resultset/SQLFederationResultSetMetaData.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/resultset/SQLFederationResultSetMetaData.java index 9e122e7236f59..a91af4933a974 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/resultset/SQLFederationResultSetMetaData.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/resultset/SQLFederationResultSetMetaData.java @@ -181,7 +181,7 @@ private Optional findTableName(final int column) { expandProjections.size() < column ? new ColumnProjection(null, resultColumnType.getFieldList().get(column - 1).getName(), null, selectStatementContext.getDatabaseType()) : expandProjections.get(column - 1); if (projection instanceof ColumnProjection) { - return Optional.of(((ColumnProjection) projection).getOriginalTable().getValue()); + return Optional.ofNullable(((ColumnProjection) projection).getOriginalTable().getValue()); } return Optional.empty(); } diff --git a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/rule/SQLFederationRule.java b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/rule/SQLFederationRule.java index b2ba12064d61d..af866624c598a 100644 --- a/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/rule/SQLFederationRule.java +++ b/kernel/sql-federation/core/src/main/java/org/apache/shardingsphere/sqlfederation/rule/SQLFederationRule.java @@ -62,9 +62,4 @@ public void dropDatabase(final String databaseName) { optimizerContext.removeParserContext(databaseName); optimizerContext.removePlannerContext(databaseName); } - - @Override - public String getType() { - return SQLFederationRule.class.getSimpleName(); - } } diff --git a/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleMatchFixture.java b/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleMatchFixture.java index 25dad8dbcd9fa..fdbce39cfc6ef 100644 --- a/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleMatchFixture.java +++ b/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleMatchFixture.java @@ -30,9 +30,4 @@ public final class SQLFederationDeciderRuleMatchFixture implements ShardingSpher public RuleConfiguration getConfiguration() { return new SQLFederationRuleConfiguration(true, mock(CacheOption.class)); } - - @Override - public String getType() { - return SQLFederationDeciderRuleMatchFixture.class.getSimpleName(); - } } diff --git a/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleNotMatchFixture.java b/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleNotMatchFixture.java index 36788a0f27b78..e32f797264ace 100644 --- a/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleNotMatchFixture.java +++ b/kernel/sql-federation/core/src/test/java/org/apache/shardingsphere/sqlfederation/engine/fixture/rule/SQLFederationDeciderRuleNotMatchFixture.java @@ -30,9 +30,4 @@ public final class SQLFederationDeciderRuleNotMatchFixture implements ShardingSp public RuleConfiguration getConfiguration() { return new SQLFederationRuleConfiguration(false, mock(CacheOption.class)); } - - @Override - public String getType() { - return SQLFederationDeciderRuleNotMatchFixture.class.getSimpleName(); - } } diff --git a/kernel/sql-federation/core/src/test/resources/cases/federation-query-sql-cases.xml b/kernel/sql-federation/core/src/test/resources/cases/federation-query-sql-cases.xml index 9585657c32280..2438f44d052ee 100644 --- a/kernel/sql-federation/core/src/test/resources/cases/federation-query-sql-cases.xml +++ b/kernel/sql-federation/core/src/test/resources/cases/federation-query-sql-cases.xml @@ -46,15 +46,15 @@ - + - + - + @@ -398,7 +398,7 @@ - + diff --git a/kernel/sql-parser/core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java b/kernel/sql-parser/core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java index 6d8cdb7002ee1..d9b1655285435 100644 --- a/kernel/sql-parser/core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java +++ b/kernel/sql-parser/core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java @@ -61,9 +61,4 @@ public SQLParserEngine getSQLParserEngine(final DatabaseType databaseType) { ? new ShardingSphereSQLParserEngine(databaseType, sqlStatementCache, parseTreeCache, sqlCommentParseEnabled) : new SimpleSQLParserEngine(); } - - @Override - public String getType() { - return SQLParserRule.class.getSimpleName(); - } } diff --git a/kernel/sql-parser/core/src/test/java/org/apache/shardingsphere/parser/rule/SQLParserRuleTest.java b/kernel/sql-parser/core/src/test/java/org/apache/shardingsphere/parser/rule/SQLParserRuleTest.java index 9c5734c13e723..e4592a973d721 100644 --- a/kernel/sql-parser/core/src/test/java/org/apache/shardingsphere/parser/rule/SQLParserRuleTest.java +++ b/kernel/sql-parser/core/src/test/java/org/apache/shardingsphere/parser/rule/SQLParserRuleTest.java @@ -43,11 +43,6 @@ void assertGetSQLParserEngine() { assertNotNull(sqlParserRule.getSQLParserEngine(TypedSPILoader.getService(DatabaseType.class, "H2"))); } - @Test - void assertGetType() { - assertThat(sqlParserRule.getType(), is(SQLParserRule.class.getSimpleName())); - } - @Test void assertFields() { assertTrue(sqlParserRule.isSqlCommentParseEnabled()); diff --git a/kernel/sql-translator/core/src/main/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRule.java b/kernel/sql-translator/core/src/main/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRule.java index 272eea1efc900..3355e010626b9 100644 --- a/kernel/sql-translator/core/src/main/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRule.java +++ b/kernel/sql-translator/core/src/main/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRule.java @@ -66,9 +66,4 @@ public String translate(final String sql, final SQLStatement sqlStatement, final throw ex; } } - - @Override - public String getType() { - return SQLTranslatorRule.class.getSimpleName(); - } } diff --git a/kernel/sql-translator/core/src/test/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRuleTest.java b/kernel/sql-translator/core/src/test/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRuleTest.java index b4d6aece840b0..6d847167a0844 100644 --- a/kernel/sql-translator/core/src/test/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRuleTest.java +++ b/kernel/sql-translator/core/src/test/java/org/apache/shardingsphere/sqltranslator/rule/SQLTranslatorRuleTest.java @@ -75,9 +75,4 @@ void assertGetConfiguration() { SQLTranslatorRuleConfiguration expected = new SQLTranslatorRuleConfiguration("CONVERT_TO_UPPER_CASE", false); assertThat(new SQLTranslatorRule(expected).getConfiguration(), is(expected)); } - - @Test - void assertGetType() { - assertThat(new SQLTranslatorRule(new SQLTranslatorRuleConfiguration("CONVERT_TO_UPPER_CASE", false)).getType(), is(SQLTranslatorRule.class.getSimpleName())); - } } diff --git a/kernel/time-service/core/src/main/java/org/apache/shardingsphere/timeservice/core/rule/TimestampServiceRule.java b/kernel/time-service/core/src/main/java/org/apache/shardingsphere/timeservice/core/rule/TimestampServiceRule.java index 8a3df4f71a432..87fb65f3f7708 100644 --- a/kernel/time-service/core/src/main/java/org/apache/shardingsphere/timeservice/core/rule/TimestampServiceRule.java +++ b/kernel/time-service/core/src/main/java/org/apache/shardingsphere/timeservice/core/rule/TimestampServiceRule.java @@ -49,9 +49,4 @@ public TimestampServiceRule(final TimestampServiceRuleConfiguration ruleConfig) public Timestamp getTimestamp() { return timestampService.getTimestamp(); } - - @Override - public String getType() { - return TimestampServiceRule.class.getSimpleName(); - } } diff --git a/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java b/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java index 88fd42269a0a0..5dea456923c49 100644 --- a/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java +++ b/kernel/time-service/type/database/src/main/java/org/apache/shardingsphere/timeservice/type/database/DatabaseTimestampService.java @@ -48,8 +48,8 @@ public final class DatabaseTimestampService implements TimestampService { @Override public void init(final Properties props) { - dataSource = DataSourcePoolCreator.create(new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties( - props.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().toString(), Entry::getValue, (key, value) -> value)))); + dataSource = DataSourcePoolCreator.create(new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties( + props.entrySet().stream().collect(Collectors.toMap(entry -> entry.getKey().toString(), Entry::getValue)))); storageType = DatabaseTypeEngine.getStorageType(Collections.singleton(dataSource)); } diff --git a/kernel/traffic/api/src/main/java/org/apache/shardingsphere/traffic/spi/TrafficLoadBalanceAlgorithm.java b/kernel/traffic/api/src/main/java/org/apache/shardingsphere/traffic/spi/TrafficLoadBalanceAlgorithm.java index 824a8a66ac815..c84bf2c6b064b 100644 --- a/kernel/traffic/api/src/main/java/org/apache/shardingsphere/traffic/spi/TrafficLoadBalanceAlgorithm.java +++ b/kernel/traffic/api/src/main/java/org/apache/shardingsphere/traffic/spi/TrafficLoadBalanceAlgorithm.java @@ -28,11 +28,11 @@ public interface TrafficLoadBalanceAlgorithm extends ShardingSphereAlgorithm { /** - * Get instance definition. + * Get instance meta data. * * @param name traffic strategy name - * @param instances instance collection - * @return instance definition + * @param instances instances + * @return got meta data */ - InstanceMetaData getInstanceId(String name, List instances); + InstanceMetaData getInstanceMetaData(String name, List instances); } diff --git a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithm.java b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithm.java index 2e9b2d1e7e80f..5918ec50bac4b 100644 --- a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithm.java +++ b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithm.java @@ -29,7 +29,7 @@ public final class RandomTrafficLoadBalanceAlgorithm implements TrafficLoadBalanceAlgorithm { @Override - public InstanceMetaData getInstanceId(final String name, final List instances) { + public InstanceMetaData getInstanceMetaData(final String name, final List instances) { return instances.get(ThreadLocalRandom.current().nextInt(instances.size())); } diff --git a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithm.java b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithm.java index f216aad5857f0..6ecb74cc2741e 100644 --- a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithm.java +++ b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithm.java @@ -31,7 +31,7 @@ public final class RoundRobinTrafficLoadBalanceAlgorithm implements TrafficLoadB private final AtomicInteger count = new AtomicInteger(0); @Override - public InstanceMetaData getInstanceId(final String name, final List instances) { + public InstanceMetaData getInstanceMetaData(final String name, final List instances) { return instances.get(Math.abs(count.getAndIncrement()) % instances.size()); } diff --git a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/engine/TrafficEngine.java b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/engine/TrafficEngine.java index 2190083a2209b..fae5374577a89 100644 --- a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/engine/TrafficEngine.java +++ b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/engine/TrafficEngine.java @@ -54,7 +54,7 @@ public Optional dispatch(final QueryContext queryContext, final boolean List instances = instanceContext.getAllClusterInstances(InstanceType.PROXY, strategyRule.get().getLabels()); if (!instances.isEmpty()) { TrafficLoadBalanceAlgorithm loadBalancer = strategyRule.get().getLoadBalancer(); - InstanceMetaData instanceMetaData = 1 == instances.size() ? instances.iterator().next() : loadBalancer.getInstanceId(strategyRule.get().getName(), instances); + InstanceMetaData instanceMetaData = 1 == instances.size() ? instances.iterator().next() : loadBalancer.getInstanceMetaData(strategyRule.get().getName(), instances); return Optional.of(instanceMetaData.getId()); } return Optional.empty(); diff --git a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/rule/TrafficRule.java b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/rule/TrafficRule.java index 24b5a47374aae..cdca0f4e8a731 100644 --- a/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/rule/TrafficRule.java +++ b/kernel/traffic/core/src/main/java/org/apache/shardingsphere/traffic/rule/TrafficRule.java @@ -189,9 +189,4 @@ public Collection getLabels() { } return result; } - - @Override - public String getType() { - return TrafficRule.class.getSimpleName(); - } } diff --git a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/engine/TrafficEngineTest.java b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/engine/TrafficEngineTest.java index fa4644ce5a0d7..9f8878edb103f 100644 --- a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/engine/TrafficEngineTest.java +++ b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/engine/TrafficEngineTest.java @@ -92,7 +92,7 @@ void assertDispatchWhenExistTrafficStrategyRuleExistComputeNodeInstances() { when(strategyRule.getLabels()).thenReturn(Arrays.asList("OLTP", "OLAP")); TrafficLoadBalanceAlgorithm loadBalancer = mock(TrafficLoadBalanceAlgorithm.class); List instanceIds = mockComputeNodeInstances(); - when(loadBalancer.getInstanceId("traffic", instanceIds)).thenReturn(new ProxyInstanceMetaData("foo_id", 3307)); + when(loadBalancer.getInstanceMetaData("traffic", instanceIds)).thenReturn(new ProxyInstanceMetaData("foo_id", 3307)); when(strategyRule.getLoadBalancer()).thenReturn(loadBalancer); when(strategyRule.getName()).thenReturn("traffic"); when(instanceContext.getAllClusterInstances(InstanceType.PROXY, Arrays.asList("OLTP", "OLAP"))).thenReturn(instanceIds); diff --git a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithmTest.java b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithmTest.java index 4f63f3f442cc1..71ada02ede6ef 100644 --- a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithmTest.java +++ b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RandomTrafficLoadBalanceAlgorithmTest.java @@ -33,8 +33,8 @@ void assertGetInstanceId() { RandomTrafficLoadBalanceAlgorithm randomAlgorithm = new RandomTrafficLoadBalanceAlgorithm(); List instances = Arrays.asList(new ProxyInstanceMetaData("foo_id", "127.0.0.1@3307", "foo_verison"), new ProxyInstanceMetaData("bar_id", "127.0.0.1@3308", "foo_verison")); - assertTrue(instances.contains(randomAlgorithm.getInstanceId("simple_traffic", instances))); - assertTrue(instances.contains(randomAlgorithm.getInstanceId("simple_traffic", instances))); - assertTrue(instances.contains(randomAlgorithm.getInstanceId("simple_traffic", instances))); + assertTrue(instances.contains(randomAlgorithm.getInstanceMetaData("simple_traffic", instances))); + assertTrue(instances.contains(randomAlgorithm.getInstanceMetaData("simple_traffic", instances))); + assertTrue(instances.contains(randomAlgorithm.getInstanceMetaData("simple_traffic", instances))); } } diff --git a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithmTest.java b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithmTest.java index 2d91acc72e6e6..b36f75c047361 100644 --- a/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithmTest.java +++ b/kernel/traffic/core/src/test/java/org/apache/shardingsphere/traffic/algorithm/loadbalance/RoundRobinTrafficLoadBalanceAlgorithmTest.java @@ -35,8 +35,8 @@ void assertGetInstanceId() { InstanceMetaData instance2 = new ProxyInstanceMetaData("127.0.0.1@3308", "127.0.0.1@3308", "foo_version"); List instances = Arrays.asList(instance1, instance2); RoundRobinTrafficLoadBalanceAlgorithm roundRobinAlgorithm = new RoundRobinTrafficLoadBalanceAlgorithm(); - assertThat(roundRobinAlgorithm.getInstanceId("simple_traffic", instances), is(instance1)); - assertThat(roundRobinAlgorithm.getInstanceId("simple_traffic", instances), is(instance2)); - assertThat(roundRobinAlgorithm.getInstanceId("simple_traffic", instances), is(instance1)); + assertThat(roundRobinAlgorithm.getInstanceMetaData("simple_traffic", instances), is(instance1)); + assertThat(roundRobinAlgorithm.getInstanceMetaData("simple_traffic", instances), is(instance2)); + assertThat(roundRobinAlgorithm.getInstanceMetaData("simple_traffic", instances), is(instance1)); } } diff --git a/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/fixture/DistSQLTrafficLoadBalanceAlgorithmFixture.java b/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/fixture/DistSQLTrafficLoadBalanceAlgorithmFixture.java index fc86ce430bfb2..c319474d92388 100644 --- a/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/fixture/DistSQLTrafficLoadBalanceAlgorithmFixture.java +++ b/kernel/traffic/distsql/handler/src/test/java/org/apache/shardingsphere/traffic/distsql/handler/fixture/DistSQLTrafficLoadBalanceAlgorithmFixture.java @@ -25,7 +25,7 @@ public final class DistSQLTrafficLoadBalanceAlgorithmFixture implements TrafficLoadBalanceAlgorithm { @Override - public InstanceMetaData getInstanceId(final String name, final List instances) { + public InstanceMetaData getInstanceMetaData(final String name, final List instances) { return null; } diff --git a/kernel/traffic/distsql/parser/src/main/java/org/apache/shardingsphere/traffic/distsql/parser/core/TrafficDistSQLStatementVisitor.java b/kernel/traffic/distsql/parser/src/main/java/org/apache/shardingsphere/traffic/distsql/parser/core/TrafficDistSQLStatementVisitor.java index b842f50c7f9d8..7951ff6a816eb 100644 --- a/kernel/traffic/distsql/parser/src/main/java/org/apache/shardingsphere/traffic/distsql/parser/core/TrafficDistSQLStatementVisitor.java +++ b/kernel/traffic/distsql/parser/src/main/java/org/apache/shardingsphere/traffic/distsql/parser/core/TrafficDistSQLStatementVisitor.java @@ -52,7 +52,7 @@ public ASTNode visitAlterTrafficRule(final AlterTrafficRuleContext ctx) { @Override public ASTNode visitTrafficRuleDefinition(final TrafficRuleDefinitionContext ctx) { - AlgorithmSegment loadBalancerSegment = null != ctx.loadBalancerDefinition() ? (AlgorithmSegment) visit(ctx.loadBalancerDefinition().algorithmDefinition()) : null; + AlgorithmSegment loadBalancerSegment = null == ctx.loadBalancerDefinition() ? null : (AlgorithmSegment) visit(ctx.loadBalancerDefinition().algorithmDefinition()); return new TrafficRuleSegment( getIdentifierValue(ctx.ruleName()), buildLabels(ctx.labelDefinition()), (AlgorithmSegment) visit(ctx.trafficAlgorithmDefinition().algorithmDefinition()), loadBalancerSegment); } diff --git a/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/ConnectionTransaction.java b/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/ConnectionTransaction.java index ec39c677f8721..f0d2e4fdd5ec6 100644 --- a/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/ConnectionTransaction.java +++ b/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/ConnectionTransaction.java @@ -36,20 +36,17 @@ public final class ConnectionTransaction { @Getter private final TransactionType transactionType; - private final String databaseName; - @Setter @Getter private volatile boolean rollbackOnly; private final ShardingSphereTransactionManager transactionManager; - public ConnectionTransaction(final String databaseName, final TransactionRule rule) { - this(databaseName, rule.getDefaultType(), rule); + public ConnectionTransaction(final TransactionRule rule) { + this(rule.getDefaultType(), rule); } - public ConnectionTransaction(final String databaseName, final TransactionType transactionType, final TransactionRule rule) { - this.databaseName = databaseName; + public ConnectionTransaction(final TransactionType transactionType, final TransactionRule rule) { this.transactionType = transactionType; transactionManager = rule.getResource().getTransactionManager(transactionType); } @@ -94,14 +91,15 @@ public boolean isHoldTransaction(final boolean autoCommit) { /** * Get connection in transaction. - * + * + * @param databaseName database name * @param dataSourceName data source name * @param transactionConnectionContext transaction connection context * @return connection in transaction * @throws SQLException SQL exception */ - public Optional getConnection(final String dataSourceName, final TransactionConnectionContext transactionConnectionContext) throws SQLException { - return isInTransaction(transactionConnectionContext) ? Optional.of(transactionManager.getConnection(this.databaseName, dataSourceName)) : Optional.empty(); + public Optional getConnection(final String databaseName, final String dataSourceName, final TransactionConnectionContext transactionConnectionContext) throws SQLException { + return isInTransaction(transactionConnectionContext) ? Optional.of(transactionManager.getConnection(databaseName, dataSourceName)) : Optional.empty(); } /** diff --git a/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/rule/TransactionRule.java b/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/rule/TransactionRule.java index ee40067667855..ce567379c569a 100644 --- a/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/rule/TransactionRule.java +++ b/kernel/transaction/core/src/main/java/org/apache/shardingsphere/transaction/rule/TransactionRule.java @@ -71,8 +71,8 @@ private synchronized ShardingSphereTransactionManagerEngine createTransactionMan Map databaseTypes = new LinkedHashMap<>(databases.size(), 1F); for (Entry entry : databases.entrySet()) { ShardingSphereDatabase database = entry.getValue(); - database.getResourceMetaData().getDataSources().forEach((key, value) -> dataSourceMap.put(database.getName() + "." + key, value)); - database.getResourceMetaData().getStorageTypes().forEach((key, value) -> databaseTypes.put(database.getName() + "." + key, value)); + database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().forEach((key, value) -> dataSourceMap.put(database.getName() + "." + key, value.getDataSource())); + database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().forEach((key, value) -> databaseTypes.put(database.getName() + "." + key, value.getStorageType())); } if (dataSourceMap.isEmpty()) { return new ShardingSphereTransactionManagerEngine(defaultType); @@ -141,9 +141,4 @@ private void closeEngine(final ShardingSphereTransactionManagerEngine engine) { log.error("Close transaction engine failed", ex); } } - - @Override - public String getType() { - return TransactionRule.class.getSimpleName(); - } } diff --git a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/ConnectionTransactionTest.java b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/ConnectionTransactionTest.java index 71252a1b351c1..3134ffbf4f92b 100644 --- a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/ConnectionTransactionTest.java +++ b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/ConnectionTransactionTest.java @@ -17,7 +17,6 @@ package org.apache.shardingsphere.transaction; -import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.transaction.ConnectionTransaction.DistributedTransactionOperationType; import org.apache.shardingsphere.transaction.config.TransactionRuleConfiguration; import org.apache.shardingsphere.transaction.rule.TransactionRule; @@ -37,34 +36,34 @@ class ConnectionTransactionTest { @Test void assertDistributedTransactionOperationTypeCommit() { - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getXATransactionRule()); + connectionTransaction = new ConnectionTransaction(getXATransactionRule()); DistributedTransactionOperationType operationType = connectionTransaction.getDistributedTransactionOperationType(true); assertThat(operationType, is(DistributedTransactionOperationType.COMMIT)); } @Test void assertDistributedTransactionOperationTypeIgnore() { - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getXATransactionRule()); + connectionTransaction = new ConnectionTransaction(getXATransactionRule()); DistributedTransactionOperationType operationType = connectionTransaction.getDistributedTransactionOperationType(false); assertThat(operationType, is(DistributedTransactionOperationType.IGNORE)); } @Test void assertIsLocalTransaction() { - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getLocalTransactionRule()); + connectionTransaction = new ConnectionTransaction(getLocalTransactionRule()); assertTrue(connectionTransaction.isLocalTransaction()); - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getXATransactionRule()); + connectionTransaction = new ConnectionTransaction(getXATransactionRule()); assertFalse(connectionTransaction.isLocalTransaction()); } @Test void assertIsHoldTransaction() { - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getLocalTransactionRule()); + connectionTransaction = new ConnectionTransaction(getLocalTransactionRule()); assertTrue(connectionTransaction.isHoldTransaction(false)); - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getXATransactionRule()); + connectionTransaction = new ConnectionTransaction(getXATransactionRule()); assertTrue(connectionTransaction.isInTransaction()); assertTrue(connectionTransaction.isHoldTransaction(true)); - connectionTransaction = new ConnectionTransaction(DefaultDatabase.LOGIC_NAME, getLocalTransactionRule()); + connectionTransaction = new ConnectionTransaction(getLocalTransactionRule()); assertFalse(connectionTransaction.isHoldTransaction(true)); } diff --git a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/TransactionRuleTest.java b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/TransactionRuleTest.java index a62016760ba1d..3165f0a0a6bb9 100644 --- a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/TransactionRuleTest.java +++ b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/TransactionRuleTest.java @@ -17,10 +17,8 @@ package org.apache.shardingsphere.transaction.rule; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.transaction.api.TransactionType; import org.apache.shardingsphere.transaction.config.TransactionRuleConfiguration; @@ -37,6 +35,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -61,16 +60,14 @@ void assertAddResource() { assertThat(actual.getDatabases().size(), is(2)); assertTrue(actual.getDatabases().containsKey(SHARDING_DB_1)); ResourceMetaData resourceMetaData1 = actual.getDatabases().get(SHARDING_DB_1).getResourceMetaData(); - assertThat(resourceMetaData1.getDataSources().size(), is(2)); - assertTrue(resourceMetaData1.getDataSources().containsKey("ds_0")); - assertTrue(resourceMetaData1.getDataSources().containsKey("ds_1")); - assertThat(resourceMetaData1.getStorageTypes().size(), is(2)); + assertThat(resourceMetaData1.getStorageUnitMetaData().getDataSources().size(), is(2)); + assertTrue(resourceMetaData1.getStorageUnitMetaData().getDataSources().containsKey("ds_0")); + assertTrue(resourceMetaData1.getStorageUnitMetaData().getDataSources().containsKey("ds_1")); assertTrue(actual.getDatabases().containsKey(SHARDING_DB_2)); ResourceMetaData resourceMetaData2 = actual.getDatabases().get(SHARDING_DB_2).getResourceMetaData(); - assertThat(resourceMetaData2.getDataSources().size(), is(2)); - assertTrue(resourceMetaData2.getDataSources().containsKey("ds_0")); - assertTrue(resourceMetaData2.getDataSources().containsKey("ds_1")); - assertThat(resourceMetaData2.getStorageTypes().size(), is(2)); + assertThat(resourceMetaData2.getStorageUnitMetaData().getDataSources().size(), is(2)); + assertTrue(resourceMetaData2.getStorageUnitMetaData().getDataSources().containsKey("ds_0")); + assertTrue(resourceMetaData2.getStorageUnitMetaData().getDataSources().containsKey("ds_1")); assertThat(actual.getResource().getTransactionManager(TransactionType.XA), instanceOf(ShardingSphereTransactionManagerFixture.class)); } @@ -99,15 +96,11 @@ private ShardingSphereDatabase createDatabase() { } private ResourceMetaData createResourceMetaData() { - ResourceMetaData result = mock(ResourceMetaData.class); + ResourceMetaData result = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); Map dataSourceMap = new LinkedHashMap<>(2, 1F); dataSourceMap.put("ds_0", new MockedDataSource()); dataSourceMap.put("ds_1", new MockedDataSource()); - when(result.getDataSources()).thenReturn(dataSourceMap); - Map databaseTypes = new LinkedHashMap<>(2, 1F); - databaseTypes.put("ds_0", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); - databaseTypes.put("ds_1", TypedSPILoader.getService(DatabaseType.class, "openGauss")); - when(result.getStorageTypes()).thenReturn(databaseTypes); + when(result.getStorageUnitMetaData().getDataSources()).thenReturn(dataSourceMap); return result; } @@ -120,15 +113,11 @@ private ShardingSphereDatabase createAddDatabase() { } private ResourceMetaData createAddResourceMetaData() { - ResourceMetaData result = mock(ResourceMetaData.class); + ResourceMetaData result = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); Map dataSourceMap = new LinkedHashMap<>(2, 1F); dataSourceMap.put("ds_0", new MockedDataSource()); dataSourceMap.put("ds_1", new MockedDataSource()); - when(result.getDataSources()).thenReturn(dataSourceMap); - Map databaseTypes = new LinkedHashMap<>(2, 1F); - databaseTypes.put("ds_0", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); - databaseTypes.put("ds_1", TypedSPILoader.getService(DatabaseType.class, "openGauss")); - when(result.getStorageTypes()).thenReturn(databaseTypes); + when(result.getStorageUnitMetaData().getDataSources()).thenReturn(dataSourceMap); return result; } diff --git a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/builder/TransactionRuleBuilderTest.java b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/builder/TransactionRuleBuilderTest.java index fdff2da88c168..252f6d72b4d2e 100644 --- a/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/builder/TransactionRuleBuilderTest.java +++ b/kernel/transaction/core/src/test/java/org/apache/shardingsphere/transaction/rule/builder/TransactionRuleBuilderTest.java @@ -49,7 +49,7 @@ void assertBuild() { new RuleMetaData(Collections.singletonList(mock(ShardingSphereRule.class))), Collections.singletonMap("test", mock(ShardingSphereSchema.class))); TransactionRule rule = new TransactionRuleBuilder().build(ruleConfig, Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database), mock(ConfigurationProperties.class)); assertNotNull(rule.getConfiguration()); - assertThat(rule.getDatabases().get("logic_db").getResourceMetaData().getDataSources().size(), is(2)); + assertThat(rule.getDatabases().get("logic_db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size(), is(2)); } private Map createDataSourceMap() { diff --git a/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java b/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java index c161758000afd..ed0400467e7ee 100644 --- a/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java +++ b/kernel/transaction/type/xa/core/src/main/java/org/apache/shardingsphere/transaction/xa/jta/datasource/swapper/DataSourceSwapper.java @@ -20,8 +20,8 @@ import com.google.common.base.CaseFormat; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.transaction.xa.jta.datasource.properties.XADataSourceDefinition; import org.apache.shardingsphere.transaction.xa.jta.exception.XADataSourceInitializeException; @@ -52,8 +52,7 @@ public final class DataSourceSwapper { */ public XADataSource swap(final DataSource dataSource) { XADataSource result = createXADataSource(); - DataSource readDataSource = dataSource instanceof ShardingSphereStorageDataSourceWrapper ? (ShardingSphereStorageDataSourceWrapper) dataSource : dataSource; - setProperties(result, getDatabaseAccessConfiguration(readDataSource)); + setProperties(result, getDatabaseAccessConfiguration(dataSource)); return result; } @@ -79,8 +78,9 @@ private XADataSource loadXADataSource(final String xaDataSourceClassName) throws private Map getDatabaseAccessConfiguration(final DataSource dataSource) { Map result = new HashMap<>(3, 1F); - Map standardProps = DataSourcePropertiesCreator.create(dataSource).getAllStandardProperties(); - result.put("url", standardProps.get("url")); + Map standardProps = DataSourcePoolPropertiesCreator.create( + dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource).getAllStandardProperties(); + result.put("url", dataSource instanceof CatalogSwitchableDataSource ? ((CatalogSwitchableDataSource) dataSource).getUrl() : standardProps.get("url")); result.put("user", standardProps.get("username")); result.put("password", standardProps.get("password")); return result; diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java index 3c9a88277d8e7..55a1b9f777f19 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/ContextManager.java @@ -21,12 +21,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.SchemaManager; import org.apache.shardingsphere.infra.metadata.database.schema.builder.GenericSchemaBuilder; @@ -44,7 +45,6 @@ import org.apache.shardingsphere.mode.manager.switcher.SwitchingResource; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.Collections; import java.util.Map; @@ -99,13 +99,13 @@ public synchronized void renewMetaDataContexts(final MetaDataContexts metaDataCo } /** - * Get data source map. - * + * Get storage units. + * * @param databaseName database name - * @return data source map + * @return storage units */ - public Map getDataSourceMap(final String databaseName) { - return metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(); + public Map getStorageUnits(final String databaseName) { + return metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); } /** @@ -117,8 +117,8 @@ public void reloadDatabaseMetaData(final String databaseName) { try { ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); ResourceMetaData currentResourceMetaData = database.getResourceMetaData(); - Map dataSourceProps = metaDataContexts.get().getPersistService().getDataSourceUnitService().load(databaseName); - SwitchingResource switchingResource = new ResourceSwitchManager().createByAlterDataSourceProps(currentResourceMetaData, dataSourceProps); + Map props = metaDataContexts.get().getPersistService().getDataSourceUnitService().load(databaseName); + SwitchingResource switchingResource = new ResourceSwitchManager().createByAlterDataSourcePoolProperties(currentResourceMetaData, props); metaDataContexts.get().getMetaData().getDatabases().putAll(configurationContextManager.renewDatabase(database, switchingResource)); MetaDataContexts reloadedMetaDataContexts = createMetaDataContexts(databaseName, switchingResource); deletedSchemaNames(databaseName, reloadedMetaDataContexts.getMetaData().getDatabase(databaseName), database); @@ -181,8 +181,9 @@ public void reloadSchema(final String databaseName, final String schemaName, fin private ShardingSphereSchema loadSchema(final String databaseName, final String schemaName, final String dataSourceName) throws SQLException { ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); database.reloadRules(MutableDataNodeRule.class); - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), database.getResourceMetaData().getStorageTypes(), - Collections.singletonMap(dataSourceName, database.getResourceMetaData().getDataSources().get(dataSourceName)), + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), + Collections.singletonMap(dataSourceName, database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().get(dataSourceName).getStorageType()), + Collections.singletonMap(dataSourceName, database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().get(dataSourceName).getDataSource()), database.getRuleMetaData().getRules(), metaDataContexts.get().getMetaData().getProps(), schemaName); ShardingSphereSchema result = GenericSchemaBuilder.build(material).get(schemaName); result.getViews().putAll(metaDataContexts.get().getPersistService().getDatabaseMetaDataService().getViewMetaDataPersistService().load(database.getName(), schemaName)); @@ -197,9 +198,11 @@ private ShardingSphereSchema loadSchema(final String databaseName, final String * @param tableName to be reloaded table name */ public void reloadTable(final String databaseName, final String schemaName, final String tableName) { - Map dataSourceMap = metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(); + ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), + database.getResourceMetaData().getStorageUnitMetaData(), database.getRuleMetaData().getRules(), metaDataContexts.get().getMetaData().getProps(), schemaName); try { - reloadTable(databaseName, schemaName, tableName, dataSourceMap); + persistTable(database, schemaName, tableName, material); } catch (final SQLException ex) { log.error("Reload table: {} meta data of database: {} schema: {} failed", tableName, databaseName, schemaName, ex); } @@ -214,20 +217,19 @@ public void reloadTable(final String databaseName, final String schemaName, fina * @param tableName to be reloaded table name */ public void reloadTable(final String databaseName, final String schemaName, final String dataSourceName, final String tableName) { - Map dataSourceMap = Collections.singletonMap( - dataSourceName, metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources().get(dataSourceName)); + ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); + StorageUnit storageUnit = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().get(dataSourceName); + GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), Collections.singletonMap(dataSourceName, storageUnit.getStorageType()), + Collections.singletonMap(dataSourceName, storageUnit.getDataSource()), database.getRuleMetaData().getRules(), metaDataContexts.get().getMetaData().getProps(), schemaName); try { - reloadTable(databaseName, schemaName, tableName, dataSourceMap); + persistTable(database, schemaName, tableName, material); } catch (final SQLException ex) { log.error("Reload table: {} meta data of database: {} schema: {} with data source: {} failed", tableName, databaseName, schemaName, dataSourceName, ex); } } - private void reloadTable(final String databaseName, final String schemaName, final String tableName, final Map dataSourceMap) throws SQLException { - ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); - GenericSchemaBuilderMaterial material = new GenericSchemaBuilderMaterial(database.getProtocolType(), - database.getResourceMetaData().getStorageTypes(), dataSourceMap, database.getRuleMetaData().getRules(), metaDataContexts.get().getMetaData().getProps(), schemaName); - ShardingSphereSchema schema = GenericSchemaBuilder.build(Collections.singletonList(tableName), material).getOrDefault(schemaName, new ShardingSphereSchema()); + private void persistTable(final ShardingSphereDatabase database, final String schemaName, final String tableName, final GenericSchemaBuilderMaterial material) throws SQLException { + ShardingSphereSchema schema = GenericSchemaBuilder.build(Collections.singleton(tableName), material).getOrDefault(schemaName, new ShardingSphereSchema()); metaDataContexts.get().getPersistService().getDatabaseMetaDataService().getTableMetaDataPersistService() .persist(database.getName(), schemaName, Collections.singletonMap(tableName, schema.getTable(tableName))); } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java index 330d00695fbb8..c8a30ac444738 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/context/ConfigurationContextManager.java @@ -24,9 +24,11 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.scope.DatabaseRuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageUnit; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitNodeMapper; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; @@ -57,12 +59,13 @@ import java.util.Optional; import java.util.Properties; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; /** * Configuration context manager. */ -@Slf4j @RequiredArgsConstructor +@Slf4j public final class ConfigurationContextManager { private final AtomicReference metaDataContexts; @@ -73,15 +76,15 @@ public final class ConfigurationContextManager { * Register storage unit. * * @param databaseName database name - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void registerStorageUnit(final String databaseName, final Map dataSourceProps) { + public synchronized void registerStorageUnit(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new NewResourceSwitchManager().registerStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourceProps); + new NewResourceSwitchManager().registerStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); buildNewMetaDataContext(databaseName, switchingResource); } catch (final SQLException ex) { log.error("Alter database: {} register storage unit failed", databaseName, ex); @@ -92,15 +95,15 @@ public synchronized void registerStorageUnit(final String databaseName, final Ma * Alter storage unit. * * @param databaseName database name - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void alterStorageUnit(final String databaseName, final Map dataSourceProps) { + public synchronized void alterStorageUnit(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new NewResourceSwitchManager().alterStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourceProps); + new NewResourceSwitchManager().alterStorageUnit(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); buildNewMetaDataContext(databaseName, switchingResource); } catch (final SQLException ex) { log.error("Alter database: {} register storage unit failed", databaseName, ex); @@ -171,7 +174,8 @@ public synchronized void alterRuleConfiguration(final String databaseName, final ShardingSphereDatabase database = metaDataContexts.get().getMetaData().getDatabase(databaseName); Collection rules = new LinkedList<>(database.getRuleMetaData().getRules()); rules.removeIf(each -> each.getConfiguration().getClass().isAssignableFrom(ruleConfig.getClass())); - rules.addAll(DatabaseRulesBuilder.build(databaseName, database.getResourceMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig, instanceContext)); + rules.addAll(DatabaseRulesBuilder.build(databaseName, + database.getResourceMetaData().getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig, instanceContext)); refreshMetadata(databaseName, database, rules); } catch (final SQLException ex) { log.error("Alter database: {} rule configurations failed", databaseName, ex); @@ -190,7 +194,8 @@ public synchronized void dropRuleConfiguration(final String databaseName, final Collection rules = new LinkedList<>(database.getRuleMetaData().getRules()); rules.removeIf(each -> each.getConfiguration().getClass().isAssignableFrom(ruleConfig.getClass())); if (isNotEmptyConfig(ruleConfig)) { - rules.addAll(DatabaseRulesBuilder.build(databaseName, database.getResourceMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig, instanceContext)); + rules.addAll(DatabaseRulesBuilder.build(databaseName, + database.getResourceMetaData().getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig, instanceContext)); } refreshMetadata(databaseName, database, rules); } catch (final SQLException ex) { @@ -221,15 +226,15 @@ private MetaDataContexts createMetaDataContextsByAlterRule(final String database * Alter data source units configuration. * * @param databaseName database name - * @param dataSourcePropsMap altered data source properties map + * @param propsMap altered data source pool properties map */ @SuppressWarnings("rawtypes") - public synchronized void alterDataSourceUnitsConfiguration(final String databaseName, final Map dataSourcePropsMap) { + public synchronized void alterDataSourceUnitsConfiguration(final String databaseName, final Map propsMap) { try { Collection staleResourceHeldRules = getStaleResourceHeldRules(databaseName); staleResourceHeldRules.forEach(ResourceHeldRule::closeStaleResource); SwitchingResource switchingResource = - new ResourceSwitchManager().createByAlterDataSourceProps(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), dataSourcePropsMap); + new ResourceSwitchManager().createByAlterDataSourcePoolProperties(metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), propsMap); metaDataContexts.get().getMetaData().getDatabases().putAll(renewDatabase(metaDataContexts.get().getMetaData().getDatabase(databaseName), switchingResource)); // TODO Remove this logic when issue #22887 are finished. MetaDataContexts reloadMetaDataContexts = createMetaDataContexts(databaseName, false, switchingResource, null); @@ -269,30 +274,30 @@ public void alterSchemaMetaData(final String databaseName, final ShardingSphereD * @return ShardingSphere databases */ public Map renewDatabase(final ShardingSphereDatabase database, final SwitchingResource resource) { - Map newStorageNodes = getNewStorageNodes(database.getResourceMetaData().getStorageNodeMetaData().getDataSources(), resource); - Map newStorageUnits = getNewStorageUnits(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(), resource); - StorageResource newStorageResource = new StorageResource(newStorageNodes, newStorageUnits); - return Collections.singletonMap(database.getName().toLowerCase(), - new ShardingSphereDatabase(database.getName(), database.getProtocolType(), - new ResourceMetaData(database.getName(), newStorageResource, database.getResourceMetaData().getDataSourcePropsMap()), - database.getRuleMetaData(), database.getSchemas())); + Map newStorageNodes = getNewStorageNodes(database.getResourceMetaData().getStorageNodeDataSources(), resource); + Map newStorageUnitNodeMappers = getNewStorageUnitNodeMappers(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(), resource); + StorageResource newStorageResource = new StorageResource(newStorageNodes, newStorageUnitNodeMappers); + Map propsMap = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> currentValue, LinkedHashMap::new)); + return Collections.singletonMap(database.getName().toLowerCase(), new ShardingSphereDatabase( + database.getName(), database.getProtocolType(), new ResourceMetaData(database.getName(), newStorageResource, propsMap), database.getRuleMetaData(), database.getSchemas())); } - private Map getNewStorageNodes(final Map currentStorageNodes, final SwitchingResource resource) { - Map result = new LinkedHashMap<>(); - for (Entry entry : currentStorageNodes.entrySet()) { - if (!resource.getStaleStorageResource().getStorageNodes().containsKey(entry.getKey())) { + private Map getNewStorageNodes(final Map currentStorageNodes, final SwitchingResource resource) { + Map result = new LinkedHashMap<>(); + for (Entry entry : currentStorageNodes.entrySet()) { + if (!resource.getStaleStorageResource().getStorageNodeDataSources().containsKey(entry.getKey())) { result.put(entry.getKey(), entry.getValue()); } } return result; } - private Map getNewStorageUnits(final Map currentStorageUnits, final SwitchingResource resource) { - Map result = new LinkedHashMap<>(); + private Map getNewStorageUnitNodeMappers(final Map currentStorageUnits, final SwitchingResource resource) { + Map result = new LinkedHashMap<>(currentStorageUnits.size(), 1F); for (Entry entry : currentStorageUnits.entrySet()) { - if (!resource.getStaleStorageResource().getStorageUnits().containsKey(entry.getKey())) { - result.put(entry.getKey(), entry.getValue()); + if (!resource.getStaleStorageResource().getStorageUnitNodeMappers().containsKey(entry.getKey())) { + result.put(entry.getKey(), entry.getValue().getUnitNodeMapper()); } } return result; @@ -337,18 +342,11 @@ public MetaDataContexts createMetaDataContexts(final String databaseName, final */ public synchronized Map createChangedDatabases(final String databaseName, final boolean internalLoadMetaData, final SwitchingResource switchingResource, final Collection ruleConfigs) throws SQLException { - ResourceMetaData resourceMetaData = metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(); - if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageNodes().isEmpty()) { - resourceMetaData.getStorageNodeMetaData().getDataSources().putAll(switchingResource.getNewStorageResource().getStorageNodes()); - } - if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageUnits().isEmpty()) { - resourceMetaData.getStorageUnitMetaData().getStorageUnits().putAll(switchingResource.getNewStorageResource().getStorageUnits()); - } Collection toBeCreatedRuleConfigs = null == ruleConfigs ? metaDataContexts.get().getMetaData().getDatabase(databaseName).getRuleMetaData().getConfigurations() : ruleConfigs; - StorageResource storageResource = new StorageResource(resourceMetaData.getStorageNodeMetaData().getDataSources(), resourceMetaData.getStorageUnitMetaData().getStorageUnits()); - DatabaseConfiguration toBeCreatedDatabaseConfig = new DataSourceProvidedDatabaseConfiguration(storageResource, toBeCreatedRuleConfigs, resourceMetaData.getDataSourcePropsMap()); + DatabaseConfiguration toBeCreatedDatabaseConfig = getDatabaseConfiguration( + metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData(), switchingResource, toBeCreatedRuleConfigs); ShardingSphereDatabase changedDatabase = createChangedDatabase(metaDataContexts.get().getMetaData().getDatabase(databaseName).getName(), internalLoadMetaData, metaDataContexts.get().getPersistService(), toBeCreatedDatabaseConfig, metaDataContexts.get().getMetaData().getProps(), instanceContext); Map result = new LinkedHashMap<>(metaDataContexts.get().getMetaData().getDatabases()); @@ -357,6 +355,27 @@ public synchronized Map createChangedDatabases(f return result; } + private DatabaseConfiguration getDatabaseConfiguration(final ResourceMetaData resourceMetaData, final SwitchingResource switchingResource, + final Collection toBeCreatedRuleConfigs) { + StorageResource storageResource = getMergedStorageResource(resourceMetaData, switchingResource); + Map propsMap = null == switchingResource + ? resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap() + : switchingResource.getMergedDataSourcePoolPropertiesMap(); + return new DataSourceProvidedDatabaseConfiguration(storageResource, toBeCreatedRuleConfigs, propsMap); + } + + private StorageResource getMergedStorageResource(final ResourceMetaData currentResourceMetaData, final SwitchingResource switchingResource) { + Map storageNodeDataSources = currentResourceMetaData.getStorageNodeDataSources(); + Map storageUnitNodeMappers = currentResourceMetaData.getStorageUnitMetaData().getUnitNodeMappers(); + if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageNodeDataSources().isEmpty()) { + storageNodeDataSources.putAll(switchingResource.getNewStorageResource().getStorageNodeDataSources()); + } + if (null != switchingResource && null != switchingResource.getNewStorageResource() && !switchingResource.getNewStorageResource().getStorageUnitNodeMappers().isEmpty()) { + storageUnitNodeMappers.putAll(switchingResource.getNewStorageResource().getStorageUnitNodeMappers()); + } + return new StorageResource(storageNodeDataSources, storageUnitNodeMappers); + } + private ShardingSphereDatabase createChangedDatabase(final String databaseName, final boolean internalLoadMetaData, final MetaDataBasedPersistService persistService, final DatabaseConfiguration databaseConfig, final ConfigurationProperties props, final InstanceContext instanceContext) throws SQLException { return internalLoadMetaData diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java index 16eef1c5844a9..84fff446e03a3 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/NewResourceSwitchManager.java @@ -18,14 +18,17 @@ package org.apache.shardingsphere.mode.manager.switcher; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageResourceWithProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageUnit; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceWithProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitNodeMapper; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import javax.sql.DataSource; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; @@ -40,56 +43,58 @@ public final class NewResourceSwitchManager { * Register storage unit. * * @param resourceMetaData resource meta data - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map * @return created switching resource */ - public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMetaData, final Map dataSourceProps) { - resourceMetaData.getDataSourcePropsMap().putAll(dataSourceProps); - StorageResourceWithProperties toBeCreatedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(dataSourceProps); + public SwitchingResource registerStorageUnit(final ResourceMetaData resourceMetaData, final Map propsMap) { + Map mergedPropsMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedPropsMap.putAll(propsMap); + StorageResourceWithProperties toBeCreatedStorageResource = StorageResourceCreator.createStorageResourceWithoutDataSource(propsMap); return new SwitchingResource(resourceMetaData, getRegisterNewStorageResource(resourceMetaData, toBeCreatedStorageResource), - new StorageResource(Collections.emptyMap(), Collections.emptyMap()), resourceMetaData.getDataSourcePropsMap()); + new StorageResource(Collections.emptyMap(), Collections.emptyMap()), mergedPropsMap); } private StorageResource getRegisterNewStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeCreatedStorageResource) { - Map storageNodes = new LinkedHashMap<>(toBeCreatedStorageResource.getStorageNodes().size(), 1F); - for (String each : toBeCreatedStorageResource.getStorageNodes().keySet()) { - if (!resourceMetaData.getStorageNodeMetaData().getDataSources().containsKey(each)) { - storageNodes.put(each, DataSourcePoolCreator.create(toBeCreatedStorageResource.getDataSourcePropertiesMap().get(each))); + Map storageNodes = new LinkedHashMap<>(toBeCreatedStorageResource.getStorageNodeDataSources().size(), 1F); + for (StorageNode each : toBeCreatedStorageResource.getStorageNodeDataSources().keySet()) { + if (!resourceMetaData.getStorageNodeDataSources().containsKey(each)) { + storageNodes.put(each, DataSourcePoolCreator.create(toBeCreatedStorageResource.getDataSourcePoolPropertiesMap().get(each.getName()))); } } - return new StorageResource(storageNodes, toBeCreatedStorageResource.getStorageUnits()); + return new StorageResource(storageNodes, toBeCreatedStorageResource.getStorageUnitNodeMappers()); } /** * Alter storage unit. * * @param resourceMetaData resource meta data - * @param dataSourceProps data source properties + * @param propsMap data source pool properties map * @return created switching resource */ - public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaData, final Map dataSourceProps) { - resourceMetaData.getDataSourcePropsMap().putAll(dataSourceProps); - StorageResourceWithProperties toBeAlteredStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(dataSourceProps); + public SwitchingResource alterStorageUnit(final ResourceMetaData resourceMetaData, final Map propsMap) { + Map mergedDataSourcePoolPropertiesMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedDataSourcePoolPropertiesMap.putAll(propsMap); + StorageResourceWithProperties toBeAlteredStorageResource = StorageResourceCreator.createStorageResourceWithoutDataSource(mergedDataSourcePoolPropertiesMap); return new SwitchingResource(resourceMetaData, getAlterNewStorageResource(toBeAlteredStorageResource), - getStaleStorageResource(resourceMetaData, toBeAlteredStorageResource), resourceMetaData.getDataSourcePropsMap()); + getStaleStorageResource(resourceMetaData, toBeAlteredStorageResource), mergedDataSourcePoolPropertiesMap); } private StorageResource getAlterNewStorageResource(final StorageResourceWithProperties toBeAlteredStorageResource) { - Map storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodes().size(), 1F); - for (String each : toBeAlteredStorageResource.getStorageNodes().keySet()) { - storageNodes.put(each, DataSourcePoolCreator.create(toBeAlteredStorageResource.getDataSourcePropertiesMap().get(each))); + Map storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodeDataSources().size(), 1F); + for (StorageNode each : toBeAlteredStorageResource.getStorageNodeDataSources().keySet()) { + storageNodes.put(each, DataSourcePoolCreator.create(toBeAlteredStorageResource.getDataSourcePoolPropertiesMap().get(each.getName()))); } - return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnits()); + return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnitNodeMappers()); } private StorageResource getStaleStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeAlteredStorageResource) { - Map storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodes().size(), 1F); - for (Entry entry : resourceMetaData.getStorageNodeMetaData().getDataSources().entrySet()) { - if (toBeAlteredStorageResource.getStorageNodes().containsKey(entry.getKey())) { + Map storageNodes = new LinkedHashMap<>(toBeAlteredStorageResource.getStorageNodeDataSources().size(), 1F); + for (Entry entry : resourceMetaData.getStorageNodeDataSources().entrySet()) { + if (toBeAlteredStorageResource.getStorageNodeDataSources().containsKey(entry.getKey())) { storageNodes.put(entry.getKey(), entry.getValue()); } } - return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnits()); + return new StorageResource(storageNodes, toBeAlteredStorageResource.getStorageUnitNodeMappers()); } /** @@ -100,18 +105,20 @@ private StorageResource getStaleStorageResource(final ResourceMetaData resourceM * @return created switching resource */ public SwitchingResource unregisterStorageUnit(final ResourceMetaData resourceMetaData, final String storageUnitName) { - resourceMetaData.getDataSourcePropsMap().remove(storageUnitName); + Map mergedDataSourcePoolPropertiesMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedDataSourcePoolPropertiesMap.keySet().removeIf(each -> each.equals(storageUnitName)); + resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap().remove(storageUnitName); return new SwitchingResource(resourceMetaData, new StorageResource(Collections.emptyMap(), Collections.emptyMap()), - getToBeRemovedStaleStorageResource(resourceMetaData, storageUnitName), resourceMetaData.getDataSourcePropsMap()); + getToBeRemovedStaleStorageResource(resourceMetaData, storageUnitName), mergedDataSourcePoolPropertiesMap); } private StorageResource getToBeRemovedStaleStorageResource(final ResourceMetaData resourceMetaData, final String storageUnitName) { - StorageUnit storageUnit = resourceMetaData.getStorageUnitMetaData().getStorageUnits().remove(storageUnitName); - Map reservedStorageUnits = resourceMetaData.getStorageUnitMetaData().getStorageUnits(); - Map storageNodes = new LinkedHashMap<>(1, 1F); - if (reservedStorageUnits.values().stream().noneMatch(each -> each.getNodeName().equals(storageUnit.getNodeName()))) { - storageNodes.put(storageUnit.getNodeName(), resourceMetaData.getStorageNodeMetaData().getDataSources().get(storageUnit.getNodeName())); + StorageUnitNodeMapper storageUnitNodeMapper = resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers().remove(storageUnitName); + Map reservedStorageUnitNodeMappers = resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers(); + Map storageNodes = new LinkedHashMap<>(1, 1F); + if (reservedStorageUnitNodeMappers.values().stream().noneMatch(each -> each.getStorageNode().equals(storageUnitNodeMapper.getStorageNode()))) { + storageNodes.put(storageUnitNodeMapper.getStorageNode(), resourceMetaData.getStorageNodeDataSources().get(storageUnitNodeMapper.getStorageNode())); } - return new StorageResource(storageNodes, Collections.singletonMap(storageUnitName, storageUnit)); + return new StorageResource(storageNodes, Collections.singletonMap(storageUnitName, storageUnitNodeMapper)); } } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java index 47463d3699def..e2b5da6e102a8 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManager.java @@ -18,16 +18,20 @@ package org.apache.shardingsphere.mode.manager.switcher; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; -import org.apache.shardingsphere.infra.datasource.storage.StorageResourceWithProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageUnit; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceCreator; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceWithProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitNodeMapper; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import javax.sql.DataSource; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; @@ -42,133 +46,136 @@ public final class ResourceSwitchManager { * Create switching resource. * * @param resourceMetaData resource meta data - * @param toBeChangedDataSourceProps to be changed data source properties map + * @param toBeChangedPropsMap to be changed data source pool properties map * @return created switching resource */ - public SwitchingResource create(final ResourceMetaData resourceMetaData, final Map toBeChangedDataSourceProps) { - resourceMetaData.getDataSourcePropsMap().putAll(toBeChangedDataSourceProps); - StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedDataSourceProps); + public SwitchingResource create(final ResourceMetaData resourceMetaData, final Map toBeChangedPropsMap) { + Map mergedPropsMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedPropsMap.putAll(toBeChangedPropsMap); + StorageResourceWithProperties toBeChangedStorageResource = StorageResourceCreator.createStorageResourceWithoutDataSource(toBeChangedPropsMap); return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), - getStaleDataSources(resourceMetaData, toBeChangedStorageResource), toBeChangedDataSourceProps); + getStaleDataSources(resourceMetaData, toBeChangedStorageResource), mergedPropsMap); } /** * Create switching resource by drop resource. * * @param resourceMetaData resource meta data - * @param toBeDeletedDataSourceProps to be deleted data source properties map + * @param toBeDeletedPropsMap to be deleted data source pool properties map * @return created switching resource */ - public SwitchingResource createByDropResource(final ResourceMetaData resourceMetaData, final Map toBeDeletedDataSourceProps) { - resourceMetaData.getDataSourcePropsMap().keySet().removeIf(toBeDeletedDataSourceProps::containsKey); - StorageResourceWithProperties toToBeRemovedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeDeletedDataSourceProps); + public SwitchingResource createByDropResource(final ResourceMetaData resourceMetaData, final Map toBeDeletedPropsMap) { + Map mergedDataSourcePoolPropertiesMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedDataSourcePoolPropertiesMap.keySet().removeIf(toBeDeletedPropsMap::containsKey); + StorageResourceWithProperties toToBeRemovedStorageResource = StorageResourceCreator.createStorageResourceWithoutDataSource(toBeDeletedPropsMap); return new SwitchingResource(resourceMetaData, new StorageResource(Collections.emptyMap(), Collections.emptyMap()), - getToBeRemovedStaleDataSources(resourceMetaData, toToBeRemovedStorageResource), - getToBeReversedDataSourcePropsMap(resourceMetaData.getDataSourcePropsMap(), toBeDeletedDataSourceProps.keySet())); - } - - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + getToBeRemovedStaleDataSources(resourceMetaData, toToBeRemovedStorageResource), mergedDataSourcePoolPropertiesMap); } /** - * Create switching resource by alter data source props. + * Create switching resource by alter data source pool properties. * * @param resourceMetaData resource meta data - * @param toBeChangedDataSourceProps to be changed data source properties map + * @param toBeChangedPropsMap to be changed data source pool properties map * @return created switching resource */ - public SwitchingResource createByAlterDataSourceProps(final ResourceMetaData resourceMetaData, final Map toBeChangedDataSourceProps) { - resourceMetaData.getDataSourcePropsMap().keySet().removeIf(each -> !toBeChangedDataSourceProps.containsKey(each)); - resourceMetaData.getDataSourcePropsMap().putAll(toBeChangedDataSourceProps); - StorageResourceWithProperties toBeChangedStorageResource = DataSourcePoolCreator.createStorageResourceWithoutDataSource(toBeChangedDataSourceProps); + public SwitchingResource createByAlterDataSourcePoolProperties(final ResourceMetaData resourceMetaData, final Map toBeChangedPropsMap) { + Map mergedDataSourcePoolPropertiesMap = new HashMap<>(resourceMetaData.getStorageUnitMetaData().getDataSourcePoolPropertiesMap()); + mergedDataSourcePoolPropertiesMap.keySet().removeIf(each -> !toBeChangedPropsMap.containsKey(each)); + mergedDataSourcePoolPropertiesMap.putAll(toBeChangedPropsMap); + StorageResourceWithProperties toBeChangedStorageResource = StorageResourceCreator.createStorageResourceWithoutDataSource(toBeChangedPropsMap); StorageResource staleStorageResource = getStaleDataSources(resourceMetaData, toBeChangedStorageResource); - staleStorageResource.getStorageNodes().putAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeMetaData().getDataSources(), toBeChangedStorageResource.getStorageNodes().keySet())); - staleStorageResource.getStorageUnits().putAll(getToBeDeletedStorageUnits(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageResource.getStorageUnits().keySet())); - return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), staleStorageResource, toBeChangedDataSourceProps); + staleStorageResource.getStorageNodeDataSources() + .putAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageResource.getStorageNodeDataSources().keySet())); + staleStorageResource.getStorageUnitNodeMappers().putAll( + getToBeDeletedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageResource.getStorageUnitNodeMappers().keySet())); + return new SwitchingResource(resourceMetaData, createNewStorageResource(resourceMetaData, toBeChangedStorageResource), staleStorageResource, mergedDataSourcePoolPropertiesMap); } private StorageResource createNewStorageResource(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeChangedStorageResource) { - Map storageNodes = getNewStorageNodes(resourceMetaData, toBeChangedStorageResource.getStorageNodes(), toBeChangedStorageResource.getDataSourcePropertiesMap()); - Map storageUnits = getNewStorageUnits(resourceMetaData, toBeChangedStorageResource.getStorageUnits()); - return new StorageResource(storageNodes, storageUnits); - } - - private Map getNewStorageNodes(final ResourceMetaData resourceMetaData, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { - Map result = new LinkedHashMap<>(resourceMetaData.getStorageNodeMetaData().getDataSources()); - result.keySet().removeAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeMetaData().getDataSources(), toBeChangedStorageNodes.keySet()).keySet()); - result.putAll(getChangedDataSources(resourceMetaData.getStorageNodeMetaData().getDataSources(), toBeChangedStorageNodes, dataSourcePropertiesMap)); - result.putAll(getToBeAddedDataSources(resourceMetaData.getStorageNodeMetaData().getDataSources(), toBeChangedStorageNodes, dataSourcePropertiesMap)); + Map storageNodes = + getNewStorageNodes(resourceMetaData, toBeChangedStorageResource.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePoolPropertiesMap()); + Map storageUnitNodeMappers = getNewStorageUnitNodeMappers(resourceMetaData, toBeChangedStorageResource.getStorageUnitNodeMappers()); + return new StorageResource(storageNodes, storageUnitNodeMappers); + } + + private Map getNewStorageNodes(final ResourceMetaData resourceMetaData, + final Map toBeChangedStorageNodes, final Map propsMap) { + Map result = new LinkedHashMap<>(resourceMetaData.getStorageNodeDataSources()); + result.keySet().removeAll(getToBeDeletedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes.keySet()).keySet()); + result.putAll(getChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, propsMap)); + result.putAll(getToBeAddedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageNodes, propsMap)); return result; } - private Map getNewStorageUnits(final ResourceMetaData resourceMetaData, final Map toBeChangedStorageUnits) { - Map result = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getStorageUnits()); - result.keySet().removeAll(getToBeDeletedStorageUnits(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnits.keySet()).keySet()); - result.putAll(getChangedStorageUnits(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnits)); - result.putAll(getToBeAddedStorageUnits(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnits)); + private Map getNewStorageUnitNodeMappers(final ResourceMetaData resourceMetaData, final Map toBeChangedStorageUnitNodeMappers) { + Map result = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers()); + result.keySet().removeAll(getToBeDeletedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnitNodeMappers.keySet()).keySet()); + result.putAll(getChangedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageUnitNodeMappers)); + result.putAll(getToBeAddedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers(), toBeChangedStorageUnitNodeMappers)); return result; } - private Map getChangedDataSources(final Map storageNodes, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { - Collection toBeChangedDataSourceNames = toBeChangedStorageNodes.keySet().stream() - .filter(each -> isModifiedDataSource(storageNodes, each, dataSourcePropertiesMap.get(each))).collect(Collectors.toList()); - Map result = new LinkedHashMap<>(toBeChangedStorageNodes.size(), 1F); - for (String each : toBeChangedDataSourceNames) { - result.put(each, DataSourcePoolCreator.create(dataSourcePropertiesMap.get(each))); + private Map getChangedDataSources(final Map storageNodes, + final Map toBeChangedStorageNodes, final Map propsMap) { + Collection toBeChangedDataSourceNames = toBeChangedStorageNodes.keySet().stream() + .filter(each -> isModifiedDataSource(storageNodes, each, propsMap.get(each.getName()))).collect(Collectors.toList()); + Map result = new LinkedHashMap<>(toBeChangedStorageNodes.size(), 1F); + for (StorageNode each : toBeChangedDataSourceNames) { + result.put(each, DataSourcePoolCreator.create(propsMap.get(each.getName()))); } return result; } - private boolean isModifiedDataSource(final Map originalDataSources, final String dataSourceName, final DataSourceProperties dataSourceProps) { - return originalDataSources.containsKey(dataSourceName) && !dataSourceProps.equals(DataSourcePropertiesCreator.create(originalDataSources.get(dataSourceName))); + private boolean isModifiedDataSource(final Map originalDataSources, final StorageNode storageNode, final DataSourcePoolProperties propsMap) { + return originalDataSources.containsKey(storageNode) && !propsMap.equals(DataSourcePoolPropertiesCreator.create(originalDataSources.get(storageNode))); } - private Map getToBeAddedDataSources(final Map storageNodes, final Map toBeChangedStorageNodes, - final Map dataSourcePropertiesMap) { - Collection toBeAddedDataSourceNames = toBeChangedStorageNodes.keySet().stream().filter(each -> !storageNodes.containsKey(each)).collect(Collectors.toList()); - Map result = new LinkedHashMap<>(); - for (String each : toBeAddedDataSourceNames) { - result.put(each, DataSourcePoolCreator.create(dataSourcePropertiesMap.get(each))); + private Map getToBeAddedDataSources(final Map storageNodes, final Map toBeChangedStorageNodes, + final Map propsMap) { + Collection toBeAddedDataSourceNames = toBeChangedStorageNodes.keySet().stream().filter(each -> !storageNodes.containsKey(each)).collect(Collectors.toList()); + Map result = new LinkedHashMap<>(); + for (StorageNode each : toBeAddedDataSourceNames) { + result.put(each, DataSourcePoolCreator.create(propsMap.get(each.getName()))); } return result; } private StorageResource getToBeRemovedStaleDataSources(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeRemovedStorageResource) { - Map reservedStorageUnits = resourceMetaData.getStorageUnitMetaData().getStorageUnits().entrySet().stream() - .filter(entry -> !toBeRemovedStorageResource.getStorageUnits().containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); - Collection inUsedDataSourceNames = reservedStorageUnits.values().stream().map(StorageUnit::getNodeName).collect(Collectors.toSet()); - Map staleStorageNodes = resourceMetaData.getStorageNodeMetaData().getDataSources().entrySet().stream() - .filter(entry -> toBeRemovedStorageResource.getStorageNodes().containsKey(entry.getKey()) && !inUsedDataSourceNames.contains(entry.getKey())) + Map reservedStorageUnitNodeMappers = resourceMetaData.getStorageUnitMetaData().getStorageUnits().entrySet().stream() + .filter(entry -> !toBeRemovedStorageResource.getStorageUnitNodeMappers().containsKey(entry.getKey())) + .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getUnitNodeMapper())); + Collection inUsedDataSourceNames = reservedStorageUnitNodeMappers.values().stream().map(StorageUnitNodeMapper::getStorageNode).collect(Collectors.toSet()); + Map staleStorageNodes = resourceMetaData.getStorageNodeDataSources().entrySet().stream() + .filter(entry -> toBeRemovedStorageResource.getStorageNodeDataSources().containsKey(entry.getKey()) && !inUsedDataSourceNames.contains(entry.getKey())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); - Map staleStorageUnits = resourceMetaData.getStorageUnitMetaData().getStorageUnits().entrySet().stream() - .filter(entry -> !reservedStorageUnits.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); - return new StorageResource(staleStorageNodes, staleStorageUnits); + Map staleStorageUnitNodeMappers = resourceMetaData.getStorageUnitMetaData().getStorageUnits().entrySet().stream() + .filter(entry -> !reservedStorageUnitNodeMappers.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getUnitNodeMapper())); + return new StorageResource(staleStorageNodes, staleStorageUnitNodeMappers); } private StorageResource getStaleDataSources(final ResourceMetaData resourceMetaData, final StorageResourceWithProperties toBeChangedStorageResource) { - Map storageNodes = new LinkedHashMap<>(resourceMetaData.getStorageNodeMetaData().getDataSources().size(), 1F); - Map storageUnits = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getStorageUnits().size(), 1F); - storageNodes.putAll(getToBeChangedDataSources(resourceMetaData.getStorageNodeMetaData().getDataSources(), toBeChangedStorageResource.getDataSourcePropertiesMap())); - storageUnits.putAll(getChangedStorageUnits(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageResource.getStorageUnits())); - return new StorageResource(storageNodes, storageUnits); - } - - private Map getToBeChangedDataSources(final Map storageNodes, final Map dataSourcePropertiesMap) { - Map result = new LinkedHashMap<>(storageNodes.size(), 1F); - for (Entry entry : dataSourcePropertiesMap.entrySet()) { - if (isModifiedDataSource(storageNodes, entry.getKey(), entry.getValue())) { - result.put(entry.getKey(), storageNodes.get(entry.getKey())); + Map storageNodes = new LinkedHashMap<>(resourceMetaData.getStorageNodeDataSources().size(), 1F); + Map storageUnitNodeMappers = new LinkedHashMap<>(resourceMetaData.getStorageUnitMetaData().getUnitNodeMappers().size(), 1F); + storageNodes.putAll(getToBeChangedDataSources(resourceMetaData.getStorageNodeDataSources(), toBeChangedStorageResource.getDataSourcePoolPropertiesMap())); + storageUnitNodeMappers.putAll(getChangedStorageUnitNodeMappers(resourceMetaData.getStorageUnitMetaData().getStorageUnits(), toBeChangedStorageResource.getStorageUnitNodeMappers())); + return new StorageResource(storageNodes, storageUnitNodeMappers); + } + + private Map getToBeChangedDataSources(final Map storageNodes, final Map propsMap) { + Map result = new LinkedHashMap<>(storageNodes.size(), 1F); + for (Entry entry : propsMap.entrySet()) { + StorageNode storageNode = new StorageNode(entry.getKey()); + if (isModifiedDataSource(storageNodes, storageNode, entry.getValue())) { + result.put(storageNode, storageNodes.get(storageNode)); } } return result; } - private Map getToBeDeletedDataSources(final Map storageNodes, final Collection toBeChangedDataSourceNames) { - Map result = new LinkedHashMap<>(storageNodes.size(), 1F); - for (Entry entry : storageNodes.entrySet()) { + private Map getToBeDeletedDataSources(final Map storageNodes, final Collection toBeChangedDataSourceNames) { + Map result = new LinkedHashMap<>(storageNodes.size(), 1F); + for (Entry entry : storageNodes.entrySet()) { if (!toBeChangedDataSourceNames.contains(entry.getKey())) { result.put(entry.getKey(), entry.getValue()); } @@ -176,27 +183,30 @@ private Map getToBeDeletedDataSources(final Map getToBeDeletedStorageUnits(final Map storageUnits, final Collection toBeChangedStorageUnitNames) { - Map result = new LinkedHashMap<>(storageUnits.size(), 1F); + private Map getToBeDeletedStorageUnitNodeMappers(final Map storageUnits, + final Collection toBeChangedStorageUnitNames) { + Map result = new LinkedHashMap<>(storageUnits.size(), 1F); for (Entry entry : storageUnits.entrySet()) { if (!toBeChangedStorageUnitNames.contains(entry.getKey())) { - result.put(entry.getKey(), entry.getValue()); + result.put(entry.getKey(), entry.getValue().getUnitNodeMapper()); } } return result; } - private Map getChangedStorageUnits(final Map storageUnits, final Map toBeChangedStorageUnits) { - return toBeChangedStorageUnits.entrySet().stream().filter(entry -> isModifiedStorageUnit(storageUnits, entry.getKey(), entry.getValue())) + private Map getChangedStorageUnitNodeMappers(final Map storageUnits, + final Map toBeChangedStorageUnitNodeMappers) { + return toBeChangedStorageUnitNodeMappers.entrySet().stream().filter(entry -> isModifiedStorageUnitNodeMapper(storageUnits, entry.getKey(), entry.getValue())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); } - private boolean isModifiedStorageUnit(final Map originalStorageUnits, final String dataSourceName, final StorageUnit storageUnit) { - return originalStorageUnits.containsKey(dataSourceName) && !storageUnit.equals(originalStorageUnits.get(dataSourceName)); + private boolean isModifiedStorageUnitNodeMapper(final Map originalStorageUnits, + final String dataSourceName, final StorageUnitNodeMapper storageUnitNodeMapper) { + return originalStorageUnits.containsKey(dataSourceName) && !storageUnitNodeMapper.equals(originalStorageUnits.get(dataSourceName).getUnitNodeMapper()); } - private Map getToBeAddedStorageUnits(final Map storageUnits, final Map toBeChangedStorageUnits) { - return toBeChangedStorageUnits.entrySet().stream() - .filter(entry -> !storageUnits.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeAddedStorageUnitNodeMappers(final Map storageUnitNodeMappers, + final Map toBeChangedStorageUnitNodeMappers) { + return toBeChangedStorageUnitNodeMappers.entrySet().stream().filter(entry -> !storageUnitNodeMappers.containsKey(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java index f5c49360ff165..3fe81345e195a 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResource.java @@ -17,11 +17,12 @@ package org.apache.shardingsphere.mode.manager.switcher; +import lombok.AccessLevel; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; import java.util.Map; import java.util.Objects; @@ -30,23 +31,22 @@ * Switching resource. */ @RequiredArgsConstructor +@Getter public final class SwitchingResource { + @Getter(AccessLevel.NONE) private final ResourceMetaData resourceMetaData; - @Getter private final StorageResource newStorageResource; - @Getter private final StorageResource staleStorageResource; - @Getter - private final Map dataSourcePropsMap; + private final Map mergedDataSourcePoolPropertiesMap; /** * Close stale data sources. */ public void closeStaleDataSources() { - staleStorageResource.getStorageNodes().values().stream().filter(Objects::nonNull).forEach(resourceMetaData::close); + staleStorageResource.getStorageNodeDataSources().values().stream().filter(Objects::nonNull).forEach(resourceMetaData::close); } } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContexts.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContexts.java index 36c21b964a297..aaad8956375f7 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContexts.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContexts.java @@ -58,9 +58,6 @@ private ShardingSphereStatistics initStatistics(final ShardingSphereMetaData met return new ShardingSphereStatistics(); } DatabaseType protocolType = metaData.getDatabases().values().iterator().next().getProtocolType(); - if (null == protocolType) { - return new ShardingSphereStatistics(); - } DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(protocolType).getDialectDatabaseMetaData(); // TODO can `protocolType instanceof SchemaSupportedDatabaseType ? "PostgreSQL" : protocolType.getType()` replace to trunk database type? DatabaseType databaseType = dialectDatabaseMetaData.getDefaultSchema().isPresent() ? TypedSPILoader.getService(DatabaseType.class, "PostgreSQL") : protocolType; @@ -69,8 +66,7 @@ private ShardingSphereStatistics initStatistics(final ShardingSphereMetaData met return new ShardingSphereStatistics(); } ShardingSphereStatistics result = statisticsBuilder.get().build(metaData); - Optional loadedStatistics = Optional.ofNullable(persistService.getShardingSphereDataPersistService()) - .flatMap(shardingSphereDataPersistService -> shardingSphereDataPersistService.load(metaData)); + Optional loadedStatistics = persistService.getShardingSphereDataPersistService().load(metaData); loadedStatistics.ifPresent(optional -> useLoadedToReplaceInit(result, optional)); return result; } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactory.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactory.java index 0da2c2f2dbe9e..7a7a2d8e0c311 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactory.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactory.java @@ -25,7 +25,7 @@ import org.apache.shardingsphere.infra.config.database.impl.DataSourceGeneratedDatabaseConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.state.datasource.DataSourceState; import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; import org.apache.shardingsphere.infra.instance.InstanceContext; @@ -110,8 +110,7 @@ private static Collection getDatabaseNames(final InstanceContext instanc private static Map createEffectiveDatabaseConfigurations(final Collection databaseNames, final Map databaseConfigs, final MetaDataPersistService persistService) { - return databaseNames.stream().collect( - Collectors.toMap(each -> each, each -> createEffectiveDatabaseConfiguration(each, databaseConfigs, persistService), (a, b) -> b, () -> new HashMap<>(databaseNames.size(), 1F))); + return databaseNames.stream().collect(Collectors.toMap(each -> each, each -> createEffectiveDatabaseConfiguration(each, databaseConfigs, persistService))); } private static DatabaseConfiguration createEffectiveDatabaseConfiguration(final String databaseName, @@ -147,7 +146,7 @@ private static void persistDatabaseConfigurations(final MetaDataContexts metadat for (Entry entry : param.getDatabaseConfigs().entrySet()) { String databaseName = entry.getKey(); metadataContexts.getPersistService().persistConfigurations(entry.getKey(), entry.getValue(), - metadataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(), + metadataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getDataSources(), metadataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules()); } } diff --git a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/NewMetaDataContextsFactory.java b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/NewMetaDataContextsFactory.java index be5942efd6338..199911a512d2c 100644 --- a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/NewMetaDataContextsFactory.java +++ b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/NewMetaDataContextsFactory.java @@ -25,7 +25,7 @@ import org.apache.shardingsphere.infra.config.database.impl.DataSourceGeneratedDatabaseConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.state.datasource.DataSourceState; import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; import org.apache.shardingsphere.infra.instance.InstanceContext; @@ -112,8 +112,7 @@ private static Collection getDatabaseNames(final InstanceContext instanc private static Map createEffectiveDatabaseConfigurations(final Collection databaseNames, final Map databaseConfigs, final NewMetaDataPersistService persistService) { - return databaseNames.stream().collect( - Collectors.toMap(each -> each, each -> createEffectiveDatabaseConfiguration(each, databaseConfigs, persistService), (a, b) -> b, () -> new HashMap<>(databaseNames.size(), 1F))); + return databaseNames.stream().collect(Collectors.toMap(each -> each, each -> createEffectiveDatabaseConfiguration(each, databaseConfigs, persistService))); } private static DatabaseConfiguration createEffectiveDatabaseConfiguration(final String databaseName, @@ -149,7 +148,7 @@ private static void persistDatabaseConfigurations(final MetaDataContexts metadat for (Entry entry : param.getDatabaseConfigs().entrySet()) { String databaseName = entry.getKey(); metadataContexts.getPersistService().persistConfigurations(entry.getKey(), entry.getValue(), - metadataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(), + metadataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getDataSources(), metadataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules()); } } diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/fixture/FixtureDatabaseRule.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/fixture/FixtureDatabaseRule.java index 99e158cf50690..9529c462968d2 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/fixture/FixtureDatabaseRule.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/fixture/FixtureDatabaseRule.java @@ -31,9 +31,4 @@ public final class FixtureDatabaseRule implements DatabaseRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return FixtureDatabaseRule.class.getSimpleName(); - } } diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java index c94bcdb919b72..f1ef8afd9b14d 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/ContextManagerTest.java @@ -23,12 +23,16 @@ import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.datanode.DataNode; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.storage.StorageUtils; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceUtils; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitNodeMapper; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -54,6 +58,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.Properties; @@ -95,11 +100,13 @@ void setUp() { private ShardingSphereDatabase mockDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(result.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); - when(result.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "FIXTURE"))); MutableDataNodeRule mutableDataNodeRule = mock(MutableDataNodeRule.class, RETURNS_DEEP_STUBS); when(mutableDataNodeRule.findTableDataNode("foo_schema", "foo_tbl")).thenReturn(Optional.of(mock(DataNode.class))); when(result.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(mutableDataNodeRule))); when(result.getSchemas()).thenReturn(new HashMap<>(Collections.singletonMap("foo_schema", new ShardingSphereSchema()))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); + when(result.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); return result; } @@ -109,7 +116,7 @@ void assertGetDataSourceMap() { ShardingSphereDatabase database = new ShardingSphereDatabase(DefaultDatabase.LOGIC_NAME, mock(DatabaseType.class), resourceMetaData, mock(RuleMetaData.class), Collections.emptyMap()); when(metaDataContexts.getMetaData().getDatabase(DefaultDatabase.LOGIC_NAME)).thenReturn(database); - assertThat(contextManager.getDataSourceMap(DefaultDatabase.LOGIC_NAME).size(), is(1)); + assertThat(contextManager.getStorageUnits(DefaultDatabase.LOGIC_NAME).size(), is(1)); } @Test @@ -213,9 +220,11 @@ private void assertAlteredDataSource(final MockedDataSource actual) { @Test void assertAlterRuleConfiguration() { ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); - Map dataSources = Collections.singletonMap("ds_0", new MockedDataSource()); - when(resourceMetaData.getStorageNodeMetaData().getDataSources()).thenReturn(dataSources); - when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(StorageUtils.getStorageUnits(dataSources)); + Map dataSources = Collections.singletonMap("foo_ds", new MockedDataSource()); + when(resourceMetaData.getStorageNodeDataSources()).thenReturn(StorageResourceUtils.getStorageNodeDataSources(dataSources)); + StorageUnitMetaData storageUnitMetaData = mock(StorageUnitMetaData.class); + when(resourceMetaData.getStorageUnitMetaData()).thenReturn(storageUnitMetaData); + when(storageUnitMetaData.getStorageUnits()).thenReturn(Collections.emptyMap()); ShardingSphereDatabase database = new ShardingSphereDatabase("foo_db", TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), resourceMetaData, mock(RuleMetaData.class), Collections.emptyMap()); when(metaDataContexts.getMetaData().getDatabase("foo_db")).thenReturn(database); @@ -232,10 +241,10 @@ void assertAlterDataSourceConfiguration() { when(metaDataContexts.getMetaData().getDatabase("foo_db")).thenReturn(originalDatabaseMetaData); when(metaDataContexts.getMetaData().getGlobalRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration("foo_db", - Collections.singletonMap("foo_ds", new DataSourceProperties(MockedDataSource.class.getName(), createProperties("test", "test")))); - assertThat(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources().size(), is(3)); + Collections.singletonMap("foo_ds", new DataSourcePoolProperties(MockedDataSource.class.getName(), createProperties("test", "test")))); + assertThat(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size(), is(3)); assertAlteredDataSource((MockedDataSource) contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db") - .getResourceMetaData().getStorageNodeMetaData().getDataSources().get("foo_ds")); + .getResourceMetaData().getStorageNodeDataSources().get(new StorageNode("foo_ds"))); } private ResourceMetaData createOriginalResource() { @@ -243,9 +252,16 @@ private ResourceMetaData createOriginalResource() { Map originalDataSources = new LinkedHashMap<>(2, 1F); originalDataSources.put("ds_1", new MockedDataSource()); originalDataSources.put("ds_2", new MockedDataSource()); - when(result.getDataSources()).thenReturn(originalDataSources); - when(result.getStorageNodeMetaData().getDataSources()).thenReturn(originalDataSources); - when(result.getStorageUnitMetaData().getStorageUnits()).thenReturn(StorageUtils.getStorageUnits(originalDataSources)); + when(result.getStorageUnitMetaData().getDataSources()).thenReturn(originalDataSources); + Map storageNodeDataSourceMap = StorageResourceUtils.getStorageNodeDataSources(originalDataSources); + Map storageUnits = new LinkedHashMap<>(2, 1F); + Map storageUnitNodeMappers = StorageResourceUtils.getStorageUnitNodeMappers(originalDataSources); + for (Entry entry : storageUnitNodeMappers.entrySet()) { + storageUnits.put(entry.getKey(), new StorageUnit("foo_db", storageNodeDataSourceMap, mock(DataSourcePoolProperties.class), entry.getValue())); + } + when(result.getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); + when(result.getStorageUnitMetaData().getUnitNodeMappers()).thenReturn(storageUnitNodeMappers); + when(result.getStorageNodeDataSources()).thenReturn(storageNodeDataSourceMap); return result; } @@ -270,8 +286,9 @@ void assertAlterProperties() { @Test void assertReloadSchema() { - when(metaDataContexts.getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_ds", new MockedDataSource())); when(metaDataContexts.getMetaData().getDatabase("foo_db").getName()).thenReturn("foo_db"); + when(metaDataContexts.getMetaData().getDatabase("foo_db").getResourceMetaData() + .getStorageUnitMetaData().getStorageUnits().get("foo_ds").getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); DatabaseMetaDataPersistService databaseMetaDataPersistService = mock(DatabaseMetaDataPersistService.class, RETURNS_DEEP_STUBS); MetaDataPersistService persistService = mock(MetaDataPersistService.class); when(persistService.getDatabaseMetaDataService()).thenReturn(databaseMetaDataPersistService); @@ -282,15 +299,14 @@ void assertReloadSchema() { @Test void assertReloadTable() { - when(metaDataContexts.getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_ds", new MockedDataSource())); - when(metaDataContexts.getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("foo_ds", - TypedSPILoader.getService(DatabaseType.class, "MySQL"))); + when(metaDataContexts.getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageUnitMetaData().getDataSources()) + .thenReturn(Collections.singletonMap("foo_ds", new MockedDataSource())); DatabaseMetaDataPersistService databaseMetaDataPersistService = mock(DatabaseMetaDataPersistService.class, RETURNS_DEEP_STUBS); MetaDataPersistService persistService = mock(MetaDataPersistService.class); when(persistService.getDatabaseMetaDataService()).thenReturn(databaseMetaDataPersistService); when(metaDataContexts.getPersistService()).thenReturn(persistService); contextManager.reloadTable("foo_db", "foo_schema", "foo_table"); - assertTrue(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources().containsKey("foo_ds")); + assertTrue(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageUnitMetaData().getDataSources().containsKey("foo_ds")); } private Map createProperties(final String username, final String password) { diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java index c981dfd887bb6..73ec3bbd89c7e 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/ResourceSwitchManagerTest.java @@ -17,7 +17,8 @@ package org.apache.shardingsphere.mode.manager.switcher; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.awaitility.Awaitility; @@ -40,20 +41,20 @@ class ResourceSwitchManagerTest { @Test void assertCreate() { Map dataSourceMap = createDataSourceMap(); - SwitchingResource actual = new ResourceSwitchManager().create(new ResourceMetaData("sharding_db", dataSourceMap), createToBeChangedDataSourcePropsMap()); + SwitchingResource actual = new ResourceSwitchManager().create(new ResourceMetaData("sharding_db", dataSourceMap), createToBeChangedDataSourcePoolPropertiesMap()); assertNewDataSources(actual); actual.closeStaleDataSources(); assertStaleDataSources(dataSourceMap); } @Test - void assertCreateByAlterDataSourceProps() { + void assertCreateByAlterDataSourcePoolProperties() { Map dataSourceMap = new HashMap<>(3, 1F); dataSourceMap.put("ds_0", new MockedDataSource()); dataSourceMap.put("ds_1", new MockedDataSource()); - SwitchingResource actual = new ResourceSwitchManager().createByAlterDataSourceProps(new ResourceMetaData("sharding_db", dataSourceMap), Collections.emptyMap()); - assertTrue(actual.getNewStorageResource().getStorageNodes().isEmpty()); - assertThat(actual.getStaleStorageResource().getStorageNodes().size(), is(2)); + SwitchingResource actual = new ResourceSwitchManager().createByAlterDataSourcePoolProperties(new ResourceMetaData("sharding_db", dataSourceMap), Collections.emptyMap()); + assertTrue(actual.getNewStorageResource().getStorageNodeDataSources().isEmpty()); + assertThat(actual.getStaleStorageResource().getStorageNodeDataSources().size(), is(2)); actual.closeStaleDataSources(); assertStaleDataSource((MockedDataSource) dataSourceMap.get("ds_0")); assertStaleDataSource((MockedDataSource) dataSourceMap.get("ds_1")); @@ -66,17 +67,17 @@ private Map createDataSourceMap() { return result; } - private Map createToBeChangedDataSourcePropsMap() { - Map result = new HashMap<>(3, 1F); - result.put("new", new DataSourceProperties(MockedDataSource.class.getName(), getDataSourceProps(2))); - result.put("not_change", new DataSourceProperties(MockedDataSource.class.getName(), getDataSourceProps(2))); - Map replaceProps = getDataSourceProps(3); + private Map createToBeChangedDataSourcePoolPropertiesMap() { + Map result = new HashMap<>(3, 1F); + result.put("new", new DataSourcePoolProperties(MockedDataSource.class.getName(), getDataSourcePoolProperties(2))); + result.put("not_change", new DataSourcePoolProperties(MockedDataSource.class.getName(), getDataSourcePoolProperties(2))); + Map replaceProps = getDataSourcePoolProperties(3); replaceProps.put("password", "new_pwd"); - result.put("replace", new DataSourceProperties(MockedDataSource.class.getName(), replaceProps)); + result.put("replace", new DataSourcePoolProperties(MockedDataSource.class.getName(), replaceProps)); return result; } - private Map getDataSourceProps(final int initialCapacity) { + private Map getDataSourcePoolProperties(final int initialCapacity) { Map result = new LinkedHashMap<>(initialCapacity, 1F); result.put("url", new MockedDataSource().getUrl()); result.put("username", "root"); @@ -84,10 +85,10 @@ private Map getDataSourceProps(final int initialCapacity) { } private void assertNewDataSources(final SwitchingResource actual) { - assertThat(actual.getNewStorageResource().getStorageNodes().size(), is(3)); - assertTrue(actual.getNewStorageResource().getStorageNodes().containsKey("not_change")); - assertTrue(actual.getNewStorageResource().getStorageNodes().containsKey("new")); - assertTrue(actual.getNewStorageResource().getStorageNodes().containsKey("replace")); + assertThat(actual.getNewStorageResource().getStorageNodeDataSources().size(), is(3)); + assertTrue(actual.getNewStorageResource().getStorageNodeDataSources().containsKey(new StorageNode("not_change"))); + assertTrue(actual.getNewStorageResource().getStorageNodeDataSources().containsKey(new StorageNode("new"))); + assertTrue(actual.getNewStorageResource().getStorageNodeDataSources().containsKey(new StorageNode("replace"))); } private void assertStaleDataSources(final Map originalDataSourceMap) { diff --git a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java index 6cc4f6f1b506d..63da6f7e4a103 100644 --- a/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java +++ b/mode/core/src/test/java/org/apache/shardingsphere/mode/manager/switcher/SwitchingResourceTest.java @@ -17,7 +17,8 @@ package org.apache.shardingsphere.mode.manager.switcher; -import org.apache.shardingsphere.infra.datasource.storage.StorageResource; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.junit.jupiter.api.Test; @@ -33,8 +34,8 @@ class SwitchingResourceTest { void assertCloseStaleDataSources() { MockedDataSource staleDataSource = new MockedDataSource(); ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); - StorageResource newStorageResource = new StorageResource(Collections.singletonMap("new_ds", new MockedDataSource()), Collections.emptyMap()); - StorageResource staleStorageResource = new StorageResource(Collections.singletonMap("stale_ds", staleDataSource), Collections.emptyMap()); + StorageResource newStorageResource = new StorageResource(Collections.singletonMap(new StorageNode("new_ds"), new MockedDataSource()), Collections.emptyMap()); + StorageResource staleStorageResource = new StorageResource(Collections.singletonMap(new StorageNode("stale_ds"), staleDataSource), Collections.emptyMap()); new SwitchingResource(resourceMetaData, newStorageResource, staleStorageResource, Collections.emptyMap()).closeStaleDataSources(); verify(resourceMetaData).close(staleDataSource); } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java index 1cfbc067d3bc3..2e25594130c5b 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.mode.manager.cluster; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; @@ -88,23 +88,23 @@ public void alterSchemaMetaData(final AlterSchemaMetaDataPOJO alterSchemaMetaDat } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeRegisterStorageUnitProps); + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeRegisteredProps); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeUpdatedStorageUnitProps); + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(databaseName, toBeUpdatedProps); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) { contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist(databaseName, - getToBeReversedDataSourcePropsMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); + getToBeReversedDataSourcePoolPropertiesMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); } - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } @Override diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java index 4f87edd053054..c5bc6037617d1 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/NewClusterModeContextManager.java @@ -18,7 +18,7 @@ package org.apache.shardingsphere.mode.manager.cluster; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; @@ -98,27 +98,27 @@ public void alterSchemaMetaData(final AlterSchemaMetaDataPOJO alterSchemaMetaDat } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) { - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persistConfig(databaseName, toBeRegisterStorageUnitProps); + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) { + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persistConfig(databaseName, toBeRegisteredProps); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) { - DatabaseBasedPersistService> dataSourceService = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService(); - contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(dataSourceService.persistConfig(databaseName, toBeUpdatedStorageUnitProps)); + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) { + DatabaseBasedPersistService> dataSourceService = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService(); + contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(dataSourceService.persistConfig(databaseName, toBeUpdatedProps)); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) { contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().delete(databaseName, - getToBeDroppedDataSourcePropsMap(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); + getToBeDroppedDataSourcePoolProperties(contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().load(databaseName), toBeDroppedStorageUnitNames)); } - private Map getToBeDroppedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - Map result = new LinkedHashMap<>(); + private Map getToBeDroppedDataSourcePoolProperties(final Map propsMap, final Collection toBeDroppedResourceNames) { + Map result = new LinkedHashMap<>(); for (String each : toBeDroppedResourceNames) { - if (dataSourcePropsMap.containsKey(each)) { - result.put(each, dataSourcePropsMap.get(each)); + if (propsMap.containsKey(each)) { + result.put(each, propsMap.get(each)); } } return result; diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java index 40064ab0be40b..b2aca1cb13940 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceNodesChangedEvent.java @@ -19,7 +19,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.event.GovernanceEvent; import java.util.Map; @@ -35,5 +35,5 @@ public final class DataSourceNodesChangedEvent implements GovernanceEvent { private final String databaseVersion; - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java index d9d1be503c6e9..dbba07e26858b 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/config/event/datasource/DataSourceUnitsChangedEvent.java @@ -19,8 +19,8 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.event.GovernanceEvent; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; import java.util.Map; @@ -35,5 +35,5 @@ public final class DataSourceUnitsChangedEvent implements GovernanceEvent { private final String databaseVersion; - private final Map dataSourcePropertiesMap; + private final Map dataSourcePoolPropertiesMap; } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/event/ShardingSphereSchemaDataAlteredEvent.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/data/event/ShardingSphereSchemaDataAlteredEvent.java similarity index 86% rename from infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/event/ShardingSphereSchemaDataAlteredEvent.java rename to mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/data/event/ShardingSphereSchemaDataAlteredEvent.java index 39d085f4d72d9..aa9ec92858113 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/statistics/event/ShardingSphereSchemaDataAlteredEvent.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/data/event/ShardingSphereSchemaDataAlteredEvent.java @@ -15,10 +15,11 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.metadata.statistics.event; +package org.apache.shardingsphere.mode.manager.cluster.coordinator.registry.data.event; import lombok.Getter; import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.infra.rule.event.GovernanceEvent; import org.apache.shardingsphere.infra.yaml.data.pojo.YamlShardingSphereRowData; import java.util.Collection; @@ -29,7 +30,7 @@ */ @RequiredArgsConstructor @Getter -public final class ShardingSphereSchemaDataAlteredEvent { +public final class ShardingSphereSchemaDataAlteredEvent implements GovernanceEvent { private final String databaseName; diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/subscriber/ShardingSphereSchemaDataRegistrySubscriber.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/subscriber/ShardingSphereSchemaDataRegistrySubscriber.java index 6c27d1e13743b..102ae795959ce 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/subscriber/ShardingSphereSchemaDataRegistrySubscriber.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/subscriber/ShardingSphereSchemaDataRegistrySubscriber.java @@ -19,11 +19,11 @@ import com.google.common.eventbus.Subscribe; import org.apache.shardingsphere.infra.lock.GlobalLockNames; -import org.apache.shardingsphere.infra.metadata.statistics.event.ShardingSphereSchemaDataAlteredEvent; import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.lock.GlobalLockDefinition; import org.apache.shardingsphere.mode.manager.cluster.coordinator.lock.GlobalLockPersistService; +import org.apache.shardingsphere.mode.manager.cluster.coordinator.registry.data.event.ShardingSphereSchemaDataAlteredEvent; import org.apache.shardingsphere.mode.repository.cluster.ClusterPersistRepository; /** diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java index f188bc90b090b..60fbaae8e2108 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/metadata/watcher/MetaDataChangedWatcher.java @@ -20,7 +20,7 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.schema.builder.SystemSchemaBuilderRule; import org.apache.shardingsphere.infra.yaml.config.pojo.rule.YamlRuleConfiguration; import org.apache.shardingsphere.infra.yaml.config.swapper.resource.YamlDataSourceConfigurationSwapper; @@ -160,21 +160,21 @@ private Optional createRuleAndDataSourceChangedEvent(final Data @SuppressWarnings("unchecked") private DataSourceUnitsChangedEvent createDataSourceUnitsChangedEvent(final String databaseName, final String databaseVersion, final DataChangedEvent event) { Map> yamlDataSources = YamlEngine.unmarshal(event.getValue(), Map.class); - Map dataSourcePropertiesMap = yamlDataSources.isEmpty() + Map propsMap = yamlDataSources.isEmpty() ? new HashMap<>() : yamlDataSources.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); - return new DataSourceUnitsChangedEvent(databaseName, databaseVersion, dataSourcePropertiesMap); + Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + return new DataSourceUnitsChangedEvent(databaseName, databaseVersion, propsMap); } @SuppressWarnings("unchecked") private DataSourceNodesChangedEvent createDataSourceNodesChangedEvent(final String databaseName, final String databaseVersion, final DataChangedEvent event) { Map> yamlDataSources = YamlEngine.unmarshal(event.getValue(), Map.class); - Map dataSourcePropertiesMap = yamlDataSources.isEmpty() + Map propsMap = yamlDataSources.isEmpty() ? new HashMap<>() : yamlDataSources.entrySet().stream().collect(Collectors.toMap( - Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); - return new DataSourceNodesChangedEvent(databaseName, databaseVersion, dataSourcePropertiesMap); + Entry::getKey, entry -> new YamlDataSourceConfigurationSwapper().swapToDataSourcePoolProperties(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + return new DataSourceNodesChangedEvent(databaseName, databaseVersion, propsMap); } @SuppressWarnings("unchecked") diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNode.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNode.java index 82811f7a91088..aa35e35a075f1 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNode.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNode.java @@ -30,11 +30,11 @@ public final class WorkerIdNode { /** * Get worker id generator path. - * - * @param instanceId instance id + * + * @param workerId worker id * @return worker id generator path */ - public static String getWorkerIdGeneratorPath(final String instanceId) { - return String.join("/", "", ROOT_NODE, instanceId); + public static String getWorkerIdGeneratorPath(final String workerId) { + return String.join("/", "", ROOT_NODE, workerId); } } diff --git a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java index a2ec8a703d563..c10adcd1f19f3 100644 --- a/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java +++ b/mode/type/cluster/core/src/main/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriber.java @@ -60,7 +60,7 @@ public ConfigurationChangedSubscriber(final RegistryCenter registryCenter, final */ @Subscribe public synchronized void renew(final DataSourceUnitsChangedEvent event) { - contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration(event.getDatabaseName(), event.getDataSourcePropertiesMap()); + contextManager.getConfigurationContextManager().alterDataSourceUnitsConfiguration(event.getDatabaseName(), event.getDataSourcePoolPropertiesMap()); disableDataSources(); } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java index 404922bad54e7..2c15d64164a89 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/ClusterModeContextManagerTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.metadata.jdbc.JDBCInstanceMetaData; @@ -224,7 +224,7 @@ void assertAlterStorageUnitsWithProcessListClusterPerRepoFix() { } @Test - void assertAlterStorageUnitsWithDataSourceProperties() { + void assertAlterStorageUnitsWithDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); MetaDataPersistService persistService = new MetaDataPersistService(new ClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); @@ -233,12 +233,11 @@ void assertAlterStorageUnitsWithDataSourceProperties() { ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("active_version", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("active_version", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test @@ -251,70 +250,60 @@ void assertAlterStorageUnitsInvalidName() { ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("\n", new DataSourceProperties("\n", new HashMap<>())); - stringDataSourcePropertiesMap.put("active_version", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("\n", new DataSourcePoolProperties("\n", new HashMap<>())); + propsMap.put("active_version", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test - void assertAlterStorageUnitsWithoutDataSourceProperties() { + void assertAlterStorageUnitsWithoutDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); Map databases = new HashMap<>(); RuleMetaData globalRuleMetaData = new RuleMetaData(new LinkedList<>()); - MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); + MetaDataContexts metaDataContexts = new MetaDataContexts( + persistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", new TreeMap<>())); } @Test - void assertAlterStorageUnitsWithEmptyDataSourcePropertiesMap() { + void assertAlterStorageUnitsWithEmptyDataSourcePoolPropertiesMap() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(new TreeMap<>()); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(new TreeMap<>()); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test - void assertAlterStorageUnitsWithOneDataSourceProperties() { + void assertAlterStorageUnitsWithOneDataSourcePoolProperties() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Map stringDataSourcePropertiesMap = new HashMap<>(); - stringDataSourcePropertiesMap.put("42", new DataSourceProperties("active_version", new HashMap<>())); - assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", stringDataSourcePropertiesMap)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Map propsMap = new HashMap<>(); + propsMap.put("42", new DataSourcePoolProperties("active_version", new HashMap<>())); + assertDoesNotThrow(() -> clusterModeContextManager.alterStorageUnits("db", propsMap)); } @Test @@ -324,29 +313,24 @@ void assertUnregisterStorageUnits() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.unregisterStorageUnits("db", new LinkedList<>())); } @Test void assertUnregisterStorageUnitsWithProcessListClusterPersistRepoFixture() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.unregisterStorageUnits("db", new LinkedList<>())); } @@ -357,11 +341,10 @@ void assertUnregisterStorageUnitsWithClusterPersistRepoFixture() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfiguration = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); clusterModeContextManager.setContextManagerAware( - new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfiguration, modeContextManager, null, new EventBusContext()))); + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", new LinkedList<>())); } @@ -372,15 +355,13 @@ void assertAlterRuleConfiguration() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigs)); } private ShardingSphereMetaData createShardingSphereMetaData() { @@ -396,12 +377,10 @@ void assertAlterRuleConfigurationMultiple() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); Collection ruleConfigurationList = new LinkedList<>(); ruleConfigurationList.add(new RuleConfigurationFixture()); ruleConfigurationList.add(new RuleConfigurationFixture()); @@ -411,20 +390,17 @@ void assertAlterRuleConfigurationMultiple() { @Test void assertAlterRuleConfigurationWithPersistService() { ClusterModeContextManager clusterModeContextManager = new ClusterModeContextManager(); - MetaDataPersistService persistService = new MetaDataPersistService( - new ProcessListClusterPersistRepositoryFixture()); + MetaDataPersistService persistService = new MetaDataPersistService(new ProcessListClusterPersistRepositoryFixture()); MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterRuleConfiguration("db", ruleConfigs)); } @Test @@ -434,12 +410,10 @@ void assertAlterGlobalRuleConfigurationWithEmptyRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, createShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(new LinkedList<>())); } @@ -450,15 +424,13 @@ void assertAlterGlobalRuleConfigurationWithSingleRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigs)); } @Test @@ -468,16 +440,14 @@ void assertAlterGlobalRuleConfigurationWithMultipleRuleConfigurations() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); - Collection ruleConfigurationList = new LinkedList<>(); - ruleConfigurationList.add(new RuleConfigurationFixture()); - ruleConfigurationList.add(new RuleConfigurationFixture()); - assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigurationList)); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); + Collection ruleConfigs = new LinkedList<>(); + ruleConfigs.add(new RuleConfigurationFixture()); + ruleConfigs.add(new RuleConfigurationFixture()); + assertDoesNotThrow(() -> clusterModeContextManager.alterGlobalRuleConfiguration(ruleConfigs)); } @Test @@ -487,12 +457,10 @@ void assertAlterProperties() { MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData()); ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); - clusterModeContextManager - .setContextManagerAware(new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, - modeConfig, modeContextManager, null, new EventBusContext()))); + clusterModeContextManager.setContextManagerAware( + new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext()))); assertDoesNotThrow(() -> clusterModeContextManager.alterProperties(new Properties())); } @@ -502,8 +470,7 @@ void assertConstructor() { try (MetaDataContexts metaDataContexts = new MetaDataContexts(persistService, new ShardingSphereMetaData())) { ComputeNodeInstance instance = new ComputeNodeInstance(new JDBCInstanceMetaData("42")); WorkerIdGenerator workerIdGenerator = mock(WorkerIdGenerator.class); - ModeConfiguration modeConfig = new ModeConfiguration("Type", - new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); + ModeConfiguration modeConfig = new ModeConfiguration("Type", new ClusterPersistRepositoryConfiguration("Type", "Namespace", "Server Lists", new Properties())); ClusterModeContextManager modeContextManager = new ClusterModeContextManager(); assertDoesNotThrow(() -> new ClusterModeContextManager().setContextManagerAware( new ContextManager(metaDataContexts, new InstanceContext(instance, workerIdGenerator, modeConfig, modeContextManager, null, new EventBusContext())))); diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/process/subscriber/ProcessListChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/process/subscriber/ProcessListChangedSubscriberTest.java index 66fc5fc329266..69cc6d4f5039a 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/process/subscriber/ProcessListChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/process/subscriber/ProcessListChangedSubscriberTest.java @@ -30,8 +30,8 @@ import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rule.identifier.type.ResourceHeldRule; -import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.manager.ContextManagerBuilderParameter; import org.apache.shardingsphere.mode.manager.cluster.ClusterContextManagerBuilder; @@ -53,7 +53,6 @@ import java.sql.SQLException; import java.util.Collections; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.Properties; @@ -98,8 +97,6 @@ private ContextManagerBuilderParameter createContextManagerBuilderParameter() { } private Map createDatabases() { - when(database.getResourceMetaData().getDataSources()).thenReturn(new LinkedHashMap<>()); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "MySQL"))); when(database.getSchemas()).thenReturn(Collections.singletonMap("foo_schema", new ShardingSphereSchema())); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); when(database.getSchema("foo_schema")).thenReturn(mock(ShardingSphereSchema.class)); @@ -119,7 +116,8 @@ void assertReportLocalProcesses() { subscriber.reportLocalProcesses(new ReportLocalProcessesEvent(instanceId, processId)); ClusterPersistRepository repository = registryCenter.getRepository(); verify(repository).persist("/execution_nodes/foo_id/" + instanceId, - "processes:" + System.lineSeparator() + "- completedUnitCount: 0\n id: foo_id\n idle: false\n startMillis: 0\n totalUnitCount: 0" + System.lineSeparator()); + "processes:" + System.lineSeparator() + "- completedUnitCount: 0\n heldByConnection: false\n id: foo_id\n idle: false\n startMillis: 0\n totalUnitCount: 0" + + System.lineSeparator()); verify(repository).delete("/nodes/compute_nodes/show_process_list_trigger/" + instanceId + ":foo_id"); } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNodeTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNodeTest.java index a30ad91db2ab1..8eb7cfb6d3ec6 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNodeTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/registry/workerid/node/WorkerIdNodeTest.java @@ -26,6 +26,6 @@ class WorkerIdNodeTest { @Test void assertGetWorkerIdGeneratorPath() { - assertThat(WorkerIdNode.getWorkerIdGeneratorPath("instanceId"), is("/worker_id/instanceId")); + assertThat(WorkerIdNode.getWorkerIdGeneratorPath("1"), is("/worker_id/1")); } } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java index d922b52c01063..3b76384bebba1 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ConfigurationChangedSubscriberTest.java @@ -25,8 +25,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -114,8 +114,6 @@ private Map createDatabases() { when(database.getName()).thenReturn("db"); ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); when(database.getResourceMetaData()).thenReturn(resourceMetaData); - when(resourceMetaData.getStorageNodeMetaData().getDataSources()).thenReturn(Collections.emptyMap()); - when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.emptyMap()); when(database.getSchemas()).thenReturn(Collections.singletonMap("foo_schema", new ShardingSphereSchema())); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(database.getSchema("foo_schema")).thenReturn(mock(ShardingSphereSchema.class)); @@ -134,16 +132,16 @@ void assertRenewForRuleConfigurationsChanged() { @Test void assertRenewForDataSourceChanged() { - subscriber.renew(new DataSourceUnitsChangedEvent("db", "0", createChangedDataSourcePropertiesMap())); - assertTrue(contextManager.getMetaDataContexts().getMetaData().getDatabase("db").getResourceMetaData().getDataSources().containsKey("ds_2")); + subscriber.renew(new DataSourceUnitsChangedEvent("db", "0", createChangedDataSourcePoolPropertiesMap())); + assertTrue(contextManager.getMetaDataContexts().getMetaData().getDatabase("db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits().containsKey("ds_2")); } - private Map createChangedDataSourcePropertiesMap() { + private Map createChangedDataSourcePoolPropertiesMap() { MockedDataSource dataSource = new MockedDataSource(); - Map result = new LinkedHashMap<>(3, 1F); - result.put("primary_ds", DataSourcePropertiesCreator.create(dataSource)); - result.put("ds_1", DataSourcePropertiesCreator.create(dataSource)); - result.put("ds_2", DataSourcePropertiesCreator.create(dataSource)); + Map result = new LinkedHashMap<>(3, 1F); + result.put("primary_ds", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("ds_1", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("ds_2", DataSourcePoolPropertiesCreator.create(dataSource)); return result; } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java index c0b832b58b7bd..69f62bcf9e535 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/ResourceMetaDataChangedSubscriberTest.java @@ -20,8 +20,8 @@ import org.apache.shardingsphere.infra.config.mode.ModeConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; @@ -100,8 +100,6 @@ private ContextManagerBuilderParameter createContextManagerBuilderParameter() { private Map createDatabases() { when(database.getName()).thenReturn("db"); - when(database.getResourceMetaData().getDataSources()).thenReturn(new LinkedHashMap<>()); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "FIXTURE"))); when(database.getSchemas()).thenReturn(Collections.singletonMap("foo_schema", new ShardingSphereSchema())); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(database.getSchema("foo_schema")).thenReturn(mock(ShardingSphereSchema.class)); @@ -113,18 +111,18 @@ private Map createDatabases() { @Test void assertRenewForDatabaseAdded() { - when(persistService.getDataSourceUnitService().load("db_added")).thenReturn(createDataSourcePropertiesMap()); + when(persistService.getDataSourceUnitService().load("db_added")).thenReturn(createDataSourcePoolPropertiesMap()); when(persistService.getDatabaseRulePersistService().load("db_added")).thenReturn(Collections.emptyList()); subscriber.renew(new DatabaseAddedEvent("db_added")); - assertNotNull(contextManager.getMetaDataContexts().getMetaData().getDatabase("db_added").getResourceMetaData().getDataSources()); + assertNotNull(contextManager.getMetaDataContexts().getMetaData().getDatabase("db_added").getResourceMetaData().getStorageUnitMetaData().getStorageUnits()); } - private Map createDataSourcePropertiesMap() { + private Map createDataSourcePoolPropertiesMap() { MockedDataSource dataSource = new MockedDataSource(); - Map result = new LinkedHashMap<>(3, 1F); - result.put("primary_ds", DataSourcePropertiesCreator.create(dataSource)); - result.put("replica_ds_0", DataSourcePropertiesCreator.create(dataSource)); - result.put("replica_ds_1", DataSourcePropertiesCreator.create(dataSource)); + Map result = new LinkedHashMap<>(3, 1F); + result.put("primary_ds", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("replica_ds_0", DataSourcePoolPropertiesCreator.create(dataSource)); + result.put("replica_ds_1", DataSourcePoolPropertiesCreator.create(dataSource)); return result; } diff --git a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/StateChangedSubscriberTest.java b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/StateChangedSubscriberTest.java index e46b6839056ef..30191e958b511 100644 --- a/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/StateChangedSubscriberTest.java +++ b/mode/type/cluster/core/src/test/java/org/apache/shardingsphere/mode/manager/cluster/coordinator/subscriber/StateChangedSubscriberTest.java @@ -20,7 +20,6 @@ import org.apache.shardingsphere.infra.config.mode.ModeConfiguration; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.state.datasource.DataSourceState; import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; import org.apache.shardingsphere.infra.instance.metadata.proxy.ProxyInstanceMetaData; @@ -30,10 +29,11 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rule.identifier.type.ResourceHeldRule; import org.apache.shardingsphere.infra.rule.identifier.type.StaticDataSourceContainedRule; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.state.cluster.ClusterState; +import org.apache.shardingsphere.infra.state.datasource.DataSourceState; import org.apache.shardingsphere.infra.state.instance.InstanceState; import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.event.storage.StorageNodeDataSource; import org.apache.shardingsphere.mode.event.storage.StorageNodeDataSourceChangedEvent; import org.apache.shardingsphere.mode.event.storage.StorageNodeRole; @@ -65,7 +65,6 @@ import java.sql.SQLException; import java.util.Collection; import java.util.Collections; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.Objects; @@ -107,8 +106,6 @@ private ContextManagerBuilderParameter createContextManagerBuilderParameter() { } private Map createDatabases() { - when(database.getResourceMetaData().getDataSources()).thenReturn(new LinkedHashMap<>()); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "FIXTURE"))); when(database.getSchemas()).thenReturn(Collections.singletonMap("foo_schema", new ShardingSphereSchema())); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(database.getSchema("foo_schema")).thenReturn(mock(ShardingSphereSchema.class)); diff --git a/mode/type/cluster/repository/provider/consul/src/main/java/org/apache/shardingsphere/mode/repository/cluster/consul/lock/ConsulDistributedLock.java b/mode/type/cluster/repository/provider/consul/src/main/java/org/apache/shardingsphere/mode/repository/cluster/consul/lock/ConsulDistributedLock.java index 0c7c76c505bfa..18c43de83aacc 100644 --- a/mode/type/cluster/repository/provider/consul/src/main/java/org/apache/shardingsphere/mode/repository/cluster/consul/lock/ConsulDistributedLock.java +++ b/mode/type/cluster/repository/provider/consul/src/main/java/org/apache/shardingsphere/mode/repository/cluster/consul/lock/ConsulDistributedLock.java @@ -18,7 +18,6 @@ package org.apache.shardingsphere.mode.repository.cluster.consul.lock; import com.ecwid.consul.ConsulException; -import com.ecwid.consul.json.GsonFactory; import com.ecwid.consul.transport.RawResponse; import com.ecwid.consul.v1.ConsulClient; import com.ecwid.consul.v1.OperationException; @@ -28,8 +27,9 @@ import com.ecwid.consul.v1.kv.model.PutParams; import com.ecwid.consul.v1.session.model.NewSession; import com.ecwid.consul.v1.session.model.Session.Behavior; +import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Strings; -import com.google.common.reflect.TypeToken; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.mode.repository.cluster.consul.ShardingSphereConsulClient; import org.apache.shardingsphere.mode.repository.cluster.consul.ShardingSphereQueryParams; import org.apache.shardingsphere.mode.repository.cluster.consul.props.ConsulProperties; @@ -135,11 +135,8 @@ private long waitUntilRelease(final long valueIndex, final long timeoutMillis) { private Response getResponse(final RawResponse rawResponse) { if (200 == rawResponse.getStatusCode()) { - List value = GsonFactory.getGson().fromJson(rawResponse.getContent(), new TypeToken>() { - - private static final long serialVersionUID = -5065504617907914417L; - - }.getType()); + List value = JsonUtils.fromJsonString(rawResponse.getContent(), new TypeReference>() { + }); if (value.isEmpty()) { return new Response<>(null, rawResponse); } diff --git a/mode/type/cluster/repository/provider/etcd/pom.xml b/mode/type/cluster/repository/provider/etcd/pom.xml index 50b9ac893295b..ff846395d5ccd 100644 --- a/mode/type/cluster/repository/provider/etcd/pom.xml +++ b/mode/type/cluster/repository/provider/etcd/pom.xml @@ -44,5 +44,9 @@ io.etcd jetcd-core + + io.grpc + grpc-all + diff --git a/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepository.java b/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepository.java index 0426110681b56..0a076804be0f7 100644 --- a/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepository.java +++ b/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepository.java @@ -22,8 +22,6 @@ import com.alibaba.nacos.api.naming.NamingService; import com.alibaba.nacos.api.naming.PreservedMetadataKeys; import com.alibaba.nacos.api.naming.pojo.Instance; -import com.alibaba.nacos.common.utils.CollectionUtils; -import com.alibaba.nacos.common.utils.StringUtils; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import lombok.SneakyThrows; @@ -97,7 +95,7 @@ private void initServiceMetaData() { serviceController = new ServiceController(); for (ServiceMetaData each : serviceController.getAllServices()) { Integer port = client.getAllInstances(each.getServiceName(), false).stream() - .filter(instance -> StringUtils.equals(instance.getIp(), ip)).map(Instance::getPort).max(Comparator.naturalOrder()).orElse(Integer.MIN_VALUE); + .filter(instance -> ip.equals(instance.getIp())).map(Instance::getPort).max(Comparator.naturalOrder()).orElse(Integer.MIN_VALUE); each.setIp(ip); each.setPort(new AtomicInteger(port)); } @@ -292,7 +290,7 @@ public void delete(final String key) { Collection instances = findExistedInstance(each.isEphemeral()).stream() .filter(instance -> { String fullPath = NacosMetaDataUtils.getKey(instance); - return fullPath.startsWith(key + PATH_SEPARATOR) || StringUtils.equals(fullPath, key); + return fullPath.startsWith(key + PATH_SEPARATOR) || key.equals(fullPath); }) .sorted(Comparator.comparing(NacosMetaDataUtils::getKey).reversed()).collect(Collectors.toList()); Collection keyValues = new LinkedList<>(); @@ -337,9 +335,9 @@ private boolean isAvailable(final Collection keyValues) throws NacosEx ServiceMetaData service = serviceController.getService(entry.getKey()); Map> instanceMap = client.getAllInstances(service.getServiceName(), false).stream().collect(Collectors.groupingBy(NacosMetaDataUtils::getKey)); keyValues.removeIf(keyValue -> { - Collection instances = instanceMap.get(keyValue.getKey()); + String key = keyValue.getKey(); String value = keyValue.getValue(); - return CollectionUtils.isNotEmpty(instances) ? instances.stream().anyMatch(instance -> StringUtils.equals(NacosMetaDataUtils.getValue(instance), value)) : null == value; + return instanceMap.containsKey(key) ? instanceMap.get(key).stream().anyMatch(each -> Objects.equals(NacosMetaDataUtils.getValue(each), value)) : null == value; }); } return keyValues.isEmpty(); diff --git a/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/listener/NamingEventListener.java b/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/listener/NamingEventListener.java index bff18134f091d..09516f5bce07c 100644 --- a/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/listener/NamingEventListener.java +++ b/mode/type/cluster/repository/provider/nacos/src/main/java/org/apache/shardingsphere/mode/repository/cluster/nacos/listener/NamingEventListener.java @@ -116,7 +116,8 @@ public void setPreInstances(final Collection instances) { } } return false; - }).collect(Collectors.toMap(NacosMetaDataUtils::getKey, Function.identity(), (a, b) -> NacosMetaDataUtils.getTimestamp(a) > NacosMetaDataUtils.getTimestamp(b) ? a : b)); + }).collect(Collectors.toMap(NacosMetaDataUtils::getKey, Function.identity(), + (oldValue, currentValue) -> NacosMetaDataUtils.getTimestamp(oldValue) > NacosMetaDataUtils.getTimestamp(currentValue) ? oldValue : currentValue)); } /** diff --git a/mode/type/cluster/repository/provider/nacos/src/test/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepositoryTest.java b/mode/type/cluster/repository/provider/nacos/src/test/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepositoryTest.java index 02e5f43abbde5..efccc41fb4e2f 100644 --- a/mode/type/cluster/repository/provider/nacos/src/test/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepositoryTest.java +++ b/mode/type/cluster/repository/provider/nacos/src/test/java/org/apache/shardingsphere/mode/repository/cluster/nacos/NacosRepositoryTest.java @@ -24,10 +24,9 @@ import com.alibaba.nacos.api.naming.listener.EventListener; import com.alibaba.nacos.api.naming.listener.NamingEvent; import com.alibaba.nacos.api.naming.pojo.Instance; -import com.alibaba.nacos.common.utils.StringUtils; import com.google.common.util.concurrent.SettableFuture; -import org.apache.shardingsphere.mode.repository.cluster.exception.ClusterPersistRepositoryException; import org.apache.shardingsphere.mode.event.DataChangedEvent; +import org.apache.shardingsphere.mode.repository.cluster.exception.ClusterPersistRepositoryException; import org.apache.shardingsphere.mode.repository.cluster.nacos.entity.ServiceController; import org.apache.shardingsphere.mode.repository.cluster.nacos.entity.ServiceMetaData; import org.apache.shardingsphere.mode.repository.cluster.nacos.props.NacosProperties; @@ -50,6 +49,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; @@ -361,7 +361,7 @@ private VoidAnswer2 getListenerAnswer(final Instance preI private VoidAnswer2 getRegisterInstanceAnswer() { return (serviceName, instance) -> { List instances = client.getAllInstances(serviceName, false); - instances.removeIf(each -> StringUtils.equals(each.getIp(), instance.getIp()) && each.getPort() == instance.getPort()); + instances.removeIf(each -> Objects.equals(each.getIp(), instance.getIp()) && each.getPort() == instance.getPort()); instances.add(instance); when(client.getAllInstances(serviceName, false)).thenReturn(instances); }; diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneContextManagerBuilder.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneContextManagerBuilder.java new file mode 100644 index 0000000000000..eeb80576e0667 --- /dev/null +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneContextManagerBuilder.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.mode.manager.standalone; + +import org.apache.shardingsphere.infra.config.mode.PersistRepositoryConfiguration; +import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; +import org.apache.shardingsphere.infra.instance.InstanceContext; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; +import org.apache.shardingsphere.metadata.persist.NewMetaDataPersistService; +import org.apache.shardingsphere.mode.lock.GlobalLockContext; +import org.apache.shardingsphere.mode.manager.ContextManager; +import org.apache.shardingsphere.mode.manager.ContextManagerBuilder; +import org.apache.shardingsphere.mode.manager.ContextManagerBuilderParameter; +import org.apache.shardingsphere.mode.manager.standalone.subscriber.StandaloneProcessSubscriber; +import org.apache.shardingsphere.mode.manager.standalone.workerid.generator.StandaloneWorkerIdGenerator; +import org.apache.shardingsphere.mode.metadata.MetaDataContexts; +import org.apache.shardingsphere.mode.metadata.NewMetaDataContextsFactory; +import org.apache.shardingsphere.mode.repository.standalone.StandalonePersistRepository; + +import java.sql.SQLException; +import java.util.Properties; + +/** + * TODO Rename StandaloneContextManagerBuilder when metadata structure adjustment completed. #25485 + * New Standalone context manager builder. + */ +public final class NewStandaloneContextManagerBuilder implements ContextManagerBuilder { + + @Override + public ContextManager build(final ContextManagerBuilderParameter param) throws SQLException { + PersistRepositoryConfiguration repositoryConfig = param.getModeConfiguration().getRepository(); + StandalonePersistRepository repository = TypedSPILoader.getService( + StandalonePersistRepository.class, null == repositoryConfig ? null : repositoryConfig.getType(), null == repositoryConfig ? new Properties() : repositoryConfig.getProps()); + NewMetaDataPersistService persistService = new NewMetaDataPersistService(repository); + InstanceContext instanceContext = buildInstanceContext(param); + new StandaloneProcessSubscriber(instanceContext.getEventBusContext()); + MetaDataContexts metaDataContexts = NewMetaDataContextsFactory.create(persistService, param, instanceContext); + ContextManager result = new ContextManager(metaDataContexts, instanceContext); + setContextManagerAware(result); + return result; + } + + private InstanceContext buildInstanceContext(final ContextManagerBuilderParameter param) { + return new InstanceContext(new ComputeNodeInstance(param.getInstanceMetaData()), + new StandaloneWorkerIdGenerator(), param.getModeConfiguration(), new NewStandaloneModeContextManager(), new GlobalLockContext(null), new EventBusContext()); + } + + private void setContextManagerAware(final ContextManager contextManager) { + ((NewStandaloneModeContextManager) contextManager.getInstanceContext().getModeContextManager()).setContextManagerAware(contextManager); + } + + @Override + public String getType() { + return "Standalone"; + } + + @Override + public boolean isDefault() { + return true; + } +} diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java new file mode 100644 index 0000000000000..945fcfe7af3a2 --- /dev/null +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/NewStandaloneModeContextManager.java @@ -0,0 +1,333 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.mode.manager.standalone; + +import com.google.common.base.Strings; +import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereView; +import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaMetaDataPOJO; +import org.apache.shardingsphere.infra.metadata.database.schema.pojo.AlterSchemaPOJO; +import org.apache.shardingsphere.infra.metadata.version.MetaDataVersion; +import org.apache.shardingsphere.infra.rule.identifier.type.MetaDataHeldRule; +import org.apache.shardingsphere.infra.rule.identifier.type.MutableDataNodeRule; +import org.apache.shardingsphere.infra.rule.identifier.type.ResourceHeldRule; +import org.apache.shardingsphere.infra.rule.identifier.type.TableContainedRule; +import org.apache.shardingsphere.infra.spi.type.ordered.cache.OrderedServicesCache; +import org.apache.shardingsphere.metadata.persist.service.config.global.GlobalPersistService; +import org.apache.shardingsphere.metadata.persist.service.database.DatabaseMetaDataBasedPersistService; +import org.apache.shardingsphere.mode.manager.ContextManager; +import org.apache.shardingsphere.mode.manager.ContextManagerAware; +import org.apache.shardingsphere.mode.manager.switcher.ResourceSwitchManager; +import org.apache.shardingsphere.mode.manager.switcher.SwitchingResource; +import org.apache.shardingsphere.mode.metadata.MetaDataContexts; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; +import java.util.stream.Collectors; + +/** + * TODO Rename StandaloneModeContextManager when metadata structure adjustment completed. #25485 + * New Standalone mode context manager. + */ +public final class NewStandaloneModeContextManager implements ModeContextManager, ContextManagerAware { + + private ContextManager contextManager; + + @Override + public void createDatabase(final String databaseName) { + contextManager.getResourceMetaDataContextManager().addDatabase(databaseName); + contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService().addDatabase(databaseName); + clearServiceCache(); + } + + @Override + public void dropDatabase(final String databaseName) { + contextManager.getResourceMetaDataContextManager().dropDatabase(databaseName); + contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService().dropDatabase(databaseName); + clearServiceCache(); + } + + @Override + public void createSchema(final String databaseName, final String schemaName) { + ShardingSphereSchema schema = new ShardingSphereSchema(); + ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName); + database.putSchema(schemaName, schema); + refreshMetaDataHeldRule(database); + contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService().persist(databaseName, schemaName, schema); + } + + @Override + public void alterSchema(final AlterSchemaPOJO alterSchemaPOJO) { + ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(alterSchemaPOJO.getDatabaseName()); + putSchemaMetaData(database, alterSchemaPOJO.getSchemaName(), alterSchemaPOJO.getRenameSchemaName(), alterSchemaPOJO.getLogicDataSourceName()); + removeSchemaMetaData(database, alterSchemaPOJO.getSchemaName()); + refreshMetaDataHeldRule(database); + DatabaseMetaDataBasedPersistService databaseMetaDataService = contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService(); + databaseMetaDataService.persist(alterSchemaPOJO.getDatabaseName(), alterSchemaPOJO.getRenameSchemaName(), database.getSchema(alterSchemaPOJO.getRenameSchemaName())); + databaseMetaDataService.getViewMetaDataPersistService().persist(alterSchemaPOJO.getDatabaseName(), alterSchemaPOJO.getRenameSchemaName(), + database.getSchema(alterSchemaPOJO.getRenameSchemaName()).getViews()); + databaseMetaDataService.dropSchema(alterSchemaPOJO.getDatabaseName(), alterSchemaPOJO.getSchemaName()); + } + + private void putSchemaMetaData(final ShardingSphereDatabase database, final String schemaName, final String renameSchemaName, final String logicDataSourceName) { + ShardingSphereSchema schema = database.getSchema(schemaName); + database.putSchema(renameSchemaName, schema); + addDataNode(database, logicDataSourceName, schemaName, schema.getAllTableNames()); + } + + private void addDataNode(final ShardingSphereDatabase database, final String logicDataSourceName, final String schemaName, final Collection tobeAddedTableNames) { + tobeAddedTableNames.forEach(each -> { + if (!Strings.isNullOrEmpty(logicDataSourceName) && isSingleTable(each, database)) { + database.getRuleMetaData().findRules(MutableDataNodeRule.class).forEach(rule -> rule.put(logicDataSourceName, schemaName, each)); + } + }); + } + + private void addDataNode(final ShardingSphereDatabase database, final String logicDataSourceName, final String schemaName, final Map toBeAddedTables, + final Map toBeAddedViews) { + addTablesToDataNode(database, schemaName, logicDataSourceName, toBeAddedTables); + addViewsToDataNode(database, schemaName, logicDataSourceName, toBeAddedTables, toBeAddedViews); + } + + private void addTablesToDataNode(final ShardingSphereDatabase database, final String schemaName, final String logicDataSourceName, final Map toBeAddedTables) { + for (Entry entry : toBeAddedTables.entrySet()) { + if (!Strings.isNullOrEmpty(logicDataSourceName) && isSingleTable(entry.getKey(), database)) { + database.getRuleMetaData().findRules(MutableDataNodeRule.class).forEach(rule -> rule.put(logicDataSourceName, schemaName, entry.getKey())); + } + database.getSchema(schemaName).putTable(entry.getKey(), entry.getValue()); + } + } + + private void addViewsToDataNode(final ShardingSphereDatabase database, final String schemaName, final String logicDataSourceName, + final Map toBeAddedTables, final Map toBeAddedViews) { + for (Entry entry : toBeAddedViews.entrySet()) { + if (!Strings.isNullOrEmpty(logicDataSourceName) && isSingleTable(entry.getKey(), database)) { + database.getRuleMetaData().findRules(MutableDataNodeRule.class).forEach(rule -> rule.put(logicDataSourceName, schemaName, entry.getKey())); + } + database.getSchema(schemaName).putTable(entry.getKey(), toBeAddedTables.get(entry.getKey().toLowerCase())); + database.getSchema(schemaName).putView(entry.getKey(), entry.getValue()); + } + } + + private boolean isSingleTable(final String tableName, final ShardingSphereDatabase database) { + return database.getRuleMetaData().findRules(TableContainedRule.class).stream().noneMatch(each -> each.getDistributedTableMapper().contains(tableName)); + } + + private void removeSchemaMetaData(final ShardingSphereDatabase database, final String schemaName) { + ShardingSphereSchema schema = new ShardingSphereSchema(database.getSchema(schemaName).getTables(), database.getSchema(schemaName).getViews()); + database.removeSchema(schemaName); + removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), Collections.singletonList(schemaName), schema.getAllTableNames()); + } + + private void removeDataNode(final Collection rules, final Collection schemaNames, final Collection tobeRemovedTables) { + tobeRemovedTables.forEach(each -> rules.forEach(rule -> rule.remove(schemaNames, each))); + } + + private void removeDataNode(final ShardingSphereDatabase database, final String schemaName, final Collection tobeRemovedTables, final Collection tobeRemovedViews) { + removeTablesToDataNode(database, schemaName, tobeRemovedTables); + removeViewsToDataNode(database, schemaName, tobeRemovedTables, tobeRemovedViews); + } + + private void removeDataNode(final Collection rules, final String schemaName, final Collection tobeRemovedTables) { + tobeRemovedTables.forEach(each -> rules.forEach(rule -> rule.remove(schemaName, each))); + } + + private void removeTablesToDataNode(final ShardingSphereDatabase database, final String schemaName, final Collection toBeDroppedTables) { + removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), schemaName, toBeDroppedTables); + toBeDroppedTables.forEach(each -> database.getSchema(schemaName).removeTable(each)); + } + + private void removeViewsToDataNode(final ShardingSphereDatabase database, final String schemaName, final Collection toBeDroppedTables, final Collection toBeDroppedViews) { + removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), schemaName, toBeDroppedViews); + ShardingSphereSchema schema = database.getSchema(schemaName); + toBeDroppedTables.forEach(schema::removeTable); + toBeDroppedViews.forEach(schema::removeView); + } + + @Override + public void dropSchema(final String databaseName, final Collection schemaNames) { + Collection tobeRemovedTables = new LinkedHashSet<>(); + Collection tobeRemovedSchemas = new LinkedHashSet<>(); + ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName); + for (String each : schemaNames) { + ShardingSphereSchema schema = new ShardingSphereSchema(database.getSchema(each).getTables(), database.getSchema(each).getViews()); + database.removeSchema(each); + Optional.of(schema).ifPresent(optional -> tobeRemovedTables.addAll(optional.getAllTableNames())); + tobeRemovedSchemas.add(each.toLowerCase()); + } + removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), tobeRemovedSchemas, tobeRemovedTables); + refreshMetaDataHeldRule(database); + } + + @Override + public void alterSchemaMetaData(final AlterSchemaMetaDataPOJO alterSchemaMetaDataPOJO) { + String databaseName = alterSchemaMetaDataPOJO.getDatabaseName(); + String schemaName = alterSchemaMetaDataPOJO.getSchemaName(); + ShardingSphereDatabase database = contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName); + Map tables = alterSchemaMetaDataPOJO.getAlteredTables().stream().collect(Collectors.toMap(ShardingSphereTable::getName, table -> table)); + Map views = alterSchemaMetaDataPOJO.getAlteredViews().stream().collect(Collectors.toMap(ShardingSphereView::getName, view -> view)); + addDataNode(database, alterSchemaMetaDataPOJO.getLogicDataSourceName(), schemaName, tables, views); + removeDataNode(database, schemaName, alterSchemaMetaDataPOJO.getDroppedTables(), alterSchemaMetaDataPOJO.getDroppedViews()); + refreshMetaDataHeldRule(database); + DatabaseMetaDataBasedPersistService databaseMetaDataService = contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService(); + databaseMetaDataService.getTableMetaDataPersistService().persist(databaseName, schemaName, tables); + databaseMetaDataService.getViewMetaDataPersistService().persist(databaseName, schemaName, views); + alterSchemaMetaDataPOJO.getDroppedTables().forEach(each -> databaseMetaDataService.getTableMetaDataPersistService().delete(databaseName, schemaName, each)); + alterSchemaMetaDataPOJO.getDroppedViews().forEach(each -> databaseMetaDataService.getViewMetaDataPersistService().delete(databaseName, schemaName, each)); + } + + private void refreshMetaDataHeldRule(final ShardingSphereDatabase database) { + contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(MetaDataHeldRule.class).forEach(each -> each.alterDatabase(database)); + } + + @Override + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) throws SQLException { + SwitchingResource switchingResource = + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisteredProps); + contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, false, switchingResource, null)); + contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) + .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getSchemas() + .forEach((schemaName, schema) -> contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService() + .persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), schemaName, schema)); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeRegisteredProps); + clearServiceCache(); + } + + @Override + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) throws SQLException { + SwitchingResource switchingResource = + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedProps); + contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, true, switchingResource, null)); + contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) + .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeUpdatedProps); + switchingResource.closeStaleDataSources(); + clearServiceCache(); + } + + @Override + public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) throws SQLException { + Map propsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() + .load(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName()); + Map toBeDeletedPropsMap = getToBeDeletedPropertiesMap(propsMap, toBeDroppedStorageUnitNames); + SwitchingResource switchingResource = + new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedPropsMap); + contextManager.getMetaDataContexts().getMetaData().getDatabases() + .putAll(contextManager.getConfigurationContextManager().renewDatabase(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName), switchingResource)); + MetaDataContexts reloadMetaDataContexts = contextManager.getConfigurationContextManager().createMetaDataContexts(databaseName, false, switchingResource, null); + contextManager.getConfigurationContextManager().alterSchemaMetaData(databaseName, reloadMetaDataContexts.getMetaData().getDatabase(databaseName), + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName)); + contextManager.deletedSchemaNames(databaseName, reloadMetaDataContexts.getMetaData().getDatabase(databaseName), contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName)); + contextManager.renewMetaDataContexts(reloadMetaDataContexts); + Map toBeReversedPropsMap = getToBeReversedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeReversedPropsMap); + switchingResource.closeStaleDataSources(); + clearServiceCache(); + } + + private Map getToBeDeletedPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + } + + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + } + + @Override + public void alterRuleConfiguration(final String databaseName, final Collection ruleConfigs) { + contextManager.getConfigurationContextManager().alterRuleConfiguration(databaseName, ruleConfigs); + contextManager.getMetaDataContexts().getPersistService() + .getDatabaseRulePersistService().persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), ruleConfigs); + clearServiceCache(); + } + + @Override + public Collection alterRuleConfiguration(final String databaseName, final RuleConfiguration toBeAlteredRuleConfig) { + if (null != toBeAlteredRuleConfig) { + contextManager.getConfigurationContextManager().alterRuleConfiguration(databaseName, Collections.singletonList(toBeAlteredRuleConfig)); + contextManager.getMetaDataContexts().getPersistService() + .getDatabaseRulePersistService().persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), Collections.singletonList(toBeAlteredRuleConfig)); + clearServiceCache(); + } + return Collections.emptyList(); + } + + @Override + public void removeRuleConfigurationItem(final String databaseName, final RuleConfiguration toBeRemovedRuleConfig) { + if (null != toBeRemovedRuleConfig) { + contextManager.getConfigurationContextManager().dropRuleConfiguration(databaseName, toBeRemovedRuleConfig); + contextManager.getMetaDataContexts().getPersistService().getDatabaseRulePersistService().delete(databaseName, Collections.singleton(toBeRemovedRuleConfig)); + clearServiceCache(); + } + } + + @Override + public void removeRuleConfiguration(final String databaseName, final String ruleName) { + contextManager.getMetaDataContexts().getPersistService().getDatabaseRulePersistService().delete(databaseName, ruleName); + clearServiceCache(); + } + + @Override + public void alterGlobalRuleConfiguration(final Collection globalRuleConfigs) { + contextManager.getConfigurationContextManager().alterGlobalRuleConfiguration(globalRuleConfigs); + contextManager.getMetaDataContexts().getPersistService().getGlobalRuleService().persist(contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getConfigurations()); + clearServiceCache(); + } + + @Override + public void alterGlobalRuleConfiguration(final RuleConfiguration toBeAlteredRuleConfig) { + contextManager.getConfigurationContextManager().alterGlobalRuleConfiguration(toBeAlteredRuleConfig); + GlobalPersistService> globalRuleService = contextManager.getMetaDataContexts().getPersistService().getGlobalRuleService(); + contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(globalRuleService.persistConfig(Collections.singleton(toBeAlteredRuleConfig))); + clearServiceCache(); + } + + @Override + public void alterProperties(final Properties props) { + contextManager.getConfigurationContextManager().alterProperties(props); + if (null != contextManager.getMetaDataContexts().getPersistService().getPropsService()) { + Collection versions = contextManager.getMetaDataContexts().getPersistService().getPropsService().persistConfig(props); + contextManager.getMetaDataContexts().getPersistService().getMetaDataVersionPersistService().switchActiveVersion(versions); + } + clearServiceCache(); + } + + private void clearServiceCache() { + OrderedServicesCache.clearCache(); + } + + @Override + public void setContextManagerAware(final ContextManager contextManager) { + this.contextManager = contextManager; + } +} diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneContextManagerBuilder.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneContextManagerBuilder.java index 3495bd889c04e..10d41830418ed 100644 --- a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneContextManagerBuilder.java +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneContextManagerBuilder.java @@ -66,11 +66,6 @@ private void setContextManagerAware(final ContextManager contextManager) { @Override public String getType() { - return "Standalone"; - } - - @Override - public boolean isDefault() { - return true; + return "Compatible_Standalone"; } } diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java index e7e3666d54d7f..586314b999e01 100644 --- a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/StandaloneModeContextManager.java @@ -19,7 +19,7 @@ import com.google.common.base.Strings; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -139,7 +139,7 @@ private boolean isSingleTable(final String tableName, final ShardingSphereDataba private void removeSchemaMetaData(final ShardingSphereDatabase database, final String schemaName) { ShardingSphereSchema schema = new ShardingSphereSchema(database.getSchema(schemaName).getTables(), database.getSchema(schemaName).getViews()); database.removeSchema(schemaName); - removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), Collections.singletonList(schemaName), schema.getAllTableNames()); + removeDataNode(database.getRuleMetaData().findRules(MutableDataNodeRule.class), Collections.singleton(schemaName), schema.getAllTableNames()); } private void removeDataNode(final Collection rules, final Collection schemaNames, final Collection tobeRemovedTables) { @@ -204,40 +204,40 @@ private void refreshMetaDataHeldRule(final ShardingSphereDatabase database) { } @Override - public void registerStorageUnits(final String databaseName, final Map toBeRegisterStorageUnitProps) throws SQLException { + public void registerStorageUnits(final String databaseName, final Map toBeRegisteredProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisterStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeRegisteredProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, false, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getSchemas() .forEach((schemaName, schema) -> contextManager.getMetaDataContexts().getPersistService().getDatabaseMetaDataService() .persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), schemaName, schema)); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeRegisterStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeRegisteredProps); clearServiceCache(); } @Override - public void alterStorageUnits(final String databaseName, final Map toBeUpdatedStorageUnitProps) throws SQLException { + public void alterStorageUnits(final String databaseName, final Map toBeUpdatedProps) throws SQLException { SwitchingResource switchingResource = - new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedStorageUnitProps); + new ResourceSwitchManager().create(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeUpdatedProps); contextManager.getMetaDataContexts().getMetaData().getDatabases().putAll(contextManager.getConfigurationContextManager().createChangedDatabases(databaseName, true, switchingResource, null)); contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().findRules(ResourceHeldRule.class) .forEach(each -> each.addResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName))); - contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeUpdatedStorageUnitProps); + contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().append( + contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), toBeUpdatedProps); switchingResource.closeStaleDataSources(); clearServiceCache(); } @Override public void unregisterStorageUnits(final String databaseName, final Collection toBeDroppedStorageUnitNames) throws SQLException { - Map dataSourcePropsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() + Map propsMap = contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService() .load(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName()); - Map toBeDeletedDataSourcePropsMap = getToBeDeletedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); + Map toBeDeletedPropsMap = getToBeDeletedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); SwitchingResource switchingResource = - new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedDataSourcePropsMap); + new ResourceSwitchManager().createByDropResource(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(), toBeDeletedPropsMap); contextManager.getMetaDataContexts().getMetaData().getDatabases() .putAll(contextManager.getConfigurationContextManager().renewDatabase(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName), switchingResource)); MetaDataContexts reloadMetaDataContexts = contextManager.getConfigurationContextManager().createMetaDataContexts(databaseName, false, switchingResource, null); @@ -245,19 +245,19 @@ public void unregisterStorageUnits(final String databaseName, final Collection toBeReversedDataSourcePropsMap = getToBeReversedDataSourcePropsMap(dataSourcePropsMap, toBeDroppedStorageUnitNames); + Map toBeReversedPropsMap = getToBeReversedDataSourcePoolPropertiesMap(propsMap, toBeDroppedStorageUnitNames); contextManager.getMetaDataContexts().getPersistService().getDataSourceUnitService().persist(contextManager.getMetaDataContexts().getMetaData().getDatabase(databaseName).getName(), - toBeReversedDataSourcePropsMap); + toBeReversedPropsMap); switchingResource.closeStaleDataSources(); clearServiceCache(); } - private Map getToBeDeletedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeDeletedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } - private Map getToBeReversedDataSourcePropsMap(final Map dataSourcePropsMap, final Collection toBeDroppedResourceNames) { - return dataSourcePropsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + private Map getToBeReversedDataSourcePoolPropertiesMap(final Map propsMap, final Collection toBeDroppedResourceNames) { + return propsMap.entrySet().stream().filter(entry -> !toBeDroppedResourceNames.contains(entry.getKey())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } @Override diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/NewStandaloneYamlPersistRepositoryConfigurationSwapper.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/NewStandaloneYamlPersistRepositoryConfigurationSwapper.java new file mode 100644 index 0000000000000..7d0fd7ce34db7 --- /dev/null +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/NewStandaloneYamlPersistRepositoryConfigurationSwapper.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.mode.manager.standalone.yaml; + +import org.apache.shardingsphere.infra.yaml.config.pojo.mode.YamlPersistRepositoryConfiguration; +import org.apache.shardingsphere.infra.yaml.config.swapper.mode.YamlPersistRepositoryConfigurationSwapper; +import org.apache.shardingsphere.mode.repository.standalone.StandalonePersistRepositoryConfiguration; + +/** + * TODO Rename StandaloneYamlPersistRepositoryConfigurationSwapper when metadata structure adjustment completed. #25485 + * New Standalone YAML persist repository configuration swapper. + */ +public final class NewStandaloneYamlPersistRepositoryConfigurationSwapper implements YamlPersistRepositoryConfigurationSwapper { + + @Override + public YamlPersistRepositoryConfiguration swapToYamlConfiguration(final StandalonePersistRepositoryConfiguration data) { + YamlPersistRepositoryConfiguration result = new YamlPersistRepositoryConfiguration(); + result.setType(data.getType()); + result.setProps(data.getProps()); + return result; + } + + @Override + public StandalonePersistRepositoryConfiguration swapToObject(final YamlPersistRepositoryConfiguration yamlConfig) { + return new StandalonePersistRepositoryConfiguration(yamlConfig.getType(), yamlConfig.getProps()); + } + + @Override + public String getType() { + return "Standalone"; + } +} diff --git a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/StandaloneYamlPersistRepositoryConfigurationSwapper.java b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/StandaloneYamlPersistRepositoryConfigurationSwapper.java index 333044900c793..847cdf5bdeeaf 100644 --- a/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/StandaloneYamlPersistRepositoryConfigurationSwapper.java +++ b/mode/type/standalone/core/src/main/java/org/apache/shardingsphere/mode/manager/standalone/yaml/StandaloneYamlPersistRepositoryConfigurationSwapper.java @@ -41,6 +41,6 @@ public StandalonePersistRepositoryConfiguration swapToObject(final YamlPersistRe @Override public String getType() { - return "Standalone"; + return "Compatible_Standalone"; } } diff --git a/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.yaml.config.swapper.mode.YamlPersistRepositoryConfigurationSwapper b/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.yaml.config.swapper.mode.YamlPersistRepositoryConfigurationSwapper index a88aa27daa70d..846ad4f18d6e7 100644 --- a/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.yaml.config.swapper.mode.YamlPersistRepositoryConfigurationSwapper +++ b/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.yaml.config.swapper.mode.YamlPersistRepositoryConfigurationSwapper @@ -16,3 +16,4 @@ # org.apache.shardingsphere.mode.manager.standalone.yaml.StandaloneYamlPersistRepositoryConfigurationSwapper +org.apache.shardingsphere.mode.manager.standalone.yaml.NewStandaloneYamlPersistRepositoryConfigurationSwapper diff --git a/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mode.manager.ContextManagerBuilder b/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mode.manager.ContextManagerBuilder index ee1dd93b9fa03..71671fd356b60 100644 --- a/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mode.manager.ContextManagerBuilder +++ b/mode/type/standalone/core/src/main/resources/META-INF/services/org.apache.shardingsphere.mode.manager.ContextManagerBuilder @@ -16,3 +16,4 @@ # org.apache.shardingsphere.mode.manager.standalone.StandaloneContextManagerBuilder +org.apache.shardingsphere.mode.manager.standalone.NewStandaloneContextManagerBuilder diff --git a/mode/type/standalone/core/src/test/java/org/apache/shardingsphere/mode/manager/standalone/fixture/StandalonePersistRepositoryFixture.java b/mode/type/standalone/core/src/test/java/org/apache/shardingsphere/mode/manager/standalone/fixture/StandalonePersistRepositoryFixture.java index f5b5a0471dd32..37587c0751dde 100644 --- a/mode/type/standalone/core/src/test/java/org/apache/shardingsphere/mode/manager/standalone/fixture/StandalonePersistRepositoryFixture.java +++ b/mode/type/standalone/core/src/test/java/org/apache/shardingsphere/mode/manager/standalone/fixture/StandalonePersistRepositoryFixture.java @@ -59,7 +59,6 @@ public void persist(final String key, final String value) { @Override public void update(final String key, final String value) { - } @Override diff --git a/mode/type/standalone/repository/provider/jdbc/src/main/java/org/apache/shardingsphere/mode/repository/standalone/jdbc/sql/JDBCRepositorySQLLoader.java b/mode/type/standalone/repository/provider/jdbc/src/main/java/org/apache/shardingsphere/mode/repository/standalone/jdbc/sql/JDBCRepositorySQLLoader.java index 6794a0d7ad50f..94b72e46f7933 100644 --- a/mode/type/standalone/repository/provider/jdbc/src/main/java/org/apache/shardingsphere/mode/repository/standalone/jdbc/sql/JDBCRepositorySQLLoader.java +++ b/mode/type/standalone/repository/provider/jdbc/src/main/java/org/apache/shardingsphere/mode/repository/standalone/jdbc/sql/JDBCRepositorySQLLoader.java @@ -70,7 +70,7 @@ public static JDBCRepositorySQL load(final String type) { while (resources.hasMoreElements()) { URL resource = resources.nextElement(); result = JAR_URL_PROTOCOLS.contains(resource.getProtocol()) ? loadFromJar(resource, type) : loadFromDirectory(resource, type); - if (null != result && Objects.equals(result.isDefault(), false)) { + if (null != result && !result.isDefault()) { break; } } @@ -85,8 +85,7 @@ private static JDBCRepositorySQL loadFromDirectory(final URL url, final String t @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attributes) throws IOException { if (file.toString().endsWith(FILE_EXTENSION)) { - JDBCRepositorySQL provider = (JDBCRepositorySQL) JAXBContext.newInstance(JDBCRepositorySQL.class).createUnmarshaller() - .unmarshal(Files.newInputStream(file.toFile().toPath())); + JDBCRepositorySQL provider = (JDBCRepositorySQL) JAXBContext.newInstance(JDBCRepositorySQL.class).createUnmarshaller().unmarshal(Files.newInputStream(file.toFile().toPath())); if (provider.isDefault()) { result[0] = provider; } diff --git a/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/H2.xml b/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/H2.xml index ea5f5a8ee2857..752ed87ee6035 100644 --- a/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/H2.xml +++ b/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/H2.xml @@ -19,7 +19,7 @@ CREATE TABLE IF NOT EXISTS `repository`(id varchar(36) PRIMARY KEY, `key` TEXT, `value` TEXT, parent TEXT) SELECT `value` FROM `repository` WHERE `key` = ? - SELECT `key` FROM `repository` WHERE parent = ? + SELECT DISTINCT(`key`) FROM `repository` WHERE parent = ? INSERT INTO `repository` VALUES(?, ?, ?, ?) UPDATE `repository` SET `value` = ? WHERE `key` = ? DELETE FROM `repository` WHERE `key` = ? diff --git a/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/MySQL.xml b/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/MySQL.xml index 82151e2077497..6a34707dc71cc 100644 --- a/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/MySQL.xml +++ b/mode/type/standalone/repository/provider/jdbc/src/main/resources/sql/MySQL.xml @@ -19,7 +19,7 @@ CREATE TABLE IF NOT EXISTS `repository`(id varchar(36) PRIMARY KEY, `key` TEXT, `value` TEXT, parent TEXT) SELECT `value` FROM `repository` WHERE `key` = ? - SELECT `key` FROM `repository` WHERE parent = ? ORDER BY `key` ASC + SELECT DISTINCT(`key`) FROM `repository` WHERE parent = ? ORDER BY `key` ASC INSERT INTO `repository` VALUES(?, ?, ?, ?) UPDATE `repository` SET `value` = ? WHERE `key` = ? DELETE FROM `repository` WHERE `key` = ? diff --git a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java index b1391d1c32bad..881dc58c42558 100644 --- a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java +++ b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java @@ -137,9 +137,10 @@ public ASTNode visitStorageUnitDefinition(final StorageUnitDefinitionContext ctx String user = getIdentifierValue(ctx.user()); String password = null == ctx.password() ? "" : getPassword(ctx.password()); Properties props = getProperties(ctx.propertiesDefinition()); - return null != ctx.urlSource() ? new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(ctx.urlSource().url()), user, password, props) - : new HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(ctx.simpleSource().hostname()), - ctx.simpleSource().port().getText(), getIdentifierValue(ctx.simpleSource().dbName()), user, password, props); + return null == ctx.urlSource() + ? new HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), + getIdentifierValue(ctx.simpleSource().hostname()), ctx.simpleSource().port().getText(), getIdentifierValue(ctx.simpleSource().dbName()), user, password, props) + : new URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), getIdentifierValue(ctx.urlSource().url()), user, password, props); } private String getPassword(final PasswordContext ctx) { @@ -174,7 +175,7 @@ public ASTNode visitDisableComputeNode(final DisableComputeNodeContext ctx) { @Override public ASTNode visitLabelComputeNode(final LabelComputeNodeContext ctx) { Collection labels = ctx.label().stream().map(this::getIdentifierValue).collect(Collectors.toList()); - return new LabelComputeNodeStatement(ctx.RELABEL() != null, getIdentifierValue(ctx.instanceId()), labels); + return new LabelComputeNodeStatement(null != ctx.RELABEL(), getIdentifierValue(ctx.instanceId()), labels); } @Override @@ -206,7 +207,7 @@ private Properties getProperties(final PropertiesDefinitionContext ctx) { @Override public ASTNode visitUnregisterStorageUnit(final UnregisterStorageUnitContext ctx) { boolean ignoreSingleTables = null != ctx.ignoreSingleTables(); - return new UnregisterStorageUnitStatement(ctx.ifExists() != null, + return new UnregisterStorageUnitStatement(null != ctx.ifExists(), ctx.storageUnitName().stream().map(ParseTree::getText).map(each -> new IdentifierValue(each).getValue()).collect(Collectors.toList()), ignoreSingleTables); } diff --git a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java index 78b9d0cc9117b..d9632ce5515a2 100644 --- a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java +++ b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverter.java @@ -23,7 +23,7 @@ import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import java.util.Collection; import java.util.LinkedHashMap; @@ -42,10 +42,10 @@ public final class DataSourceSegmentsConverter { * @param dataSourceSegments data source segments * @return data source properties map */ - public static Map convert(final DatabaseType databaseType, final Collection dataSourceSegments) { - Map result = new LinkedHashMap<>(dataSourceSegments.size(), 1F); + public static Map convert(final DatabaseType databaseType, final Collection dataSourceSegments) { + Map result = new LinkedHashMap<>(dataSourceSegments.size(), 1F); for (DataSourceSegment each : dataSourceSegments) { - result.put(each.getName(), new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", createProperties(databaseType, each))); + result.put(each.getName(), new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", createProperties(databaseType, each))); } return result; } diff --git a/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java b/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java index e269c9c885cf8..9c365cc88c1f0 100644 --- a/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java +++ b/parser/distsql/statement/src/test/java/org/apache/shardingsphere/distsql/parser/segment/converter/DataSourceSegmentsConverterTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.util.PropertiesBuilder; import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; @@ -41,7 +41,7 @@ class DataSourceSegmentsConverterTest { @Test void assertConvert() { - Map actual = DataSourceSegmentsConverter.convert(TypedSPILoader.getService(DatabaseType.class, "MySQL"), createDataSourceSegments()); + Map actual = DataSourceSegmentsConverter.convert(TypedSPILoader.getService(DatabaseType.class, "MySQL"), createDataSourceSegments()); assertThat(actual.size(), is(2)); assertTrue(actual.keySet().containsAll(Arrays.asList("ds0", "ds1"))); assertThat(actual.values().iterator().next().getAllLocalProperties().get("username"), is("root0")); diff --git a/parser/sql/dialect/mysql/src/main/antlr4/imports/mysql/BaseRule.g4 b/parser/sql/dialect/mysql/src/main/antlr4/imports/mysql/BaseRule.g4 index a75bac4296f25..113ca78dc6094 100644 --- a/parser/sql/dialect/mysql/src/main/antlr4/imports/mysql/BaseRule.g4 +++ b/parser/sql/dialect/mysql/src/main/antlr4/imports/mysql/BaseRule.g4 @@ -947,7 +947,7 @@ columnRefList ; functionCall - : aggregationFunction | specialFunction | regularFunction | jsonFunction | udfFunction + : aggregationFunction | specialFunction | jsonFunction | regularFunction | udfFunction ; udfFunction diff --git a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/MySQLStatementVisitor.java b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/MySQLStatementVisitor.java index 960a876105c38..ae5ce5b7a28d8 100644 --- a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/MySQLStatementVisitor.java +++ b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/MySQLStatementVisitor.java @@ -152,6 +152,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType; import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection; import org.apache.shardingsphere.sql.parser.sql.common.enums.ParameterMarkerType; +import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; import org.apache.shardingsphere.sql.parser.sql.common.segment.dal.VariableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.engine.EngineSegment; @@ -176,6 +177,8 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MatchAgainstExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.RowExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.UnaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ValuesExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; @@ -485,17 +488,12 @@ private ASTNode createAssignmentSegment(final BooleanPrimaryContext ctx) { private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary()); ExpressionSegment right; - String operator; - if (null != ctx.ALL()) { - operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText() + " ALL"; - } else { - operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText(); - } if (null != ctx.predicate()) { right = (ExpressionSegment) visit(ctx.predicate()); } else { right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (MySQLSelectStatement) visit(ctx.subquery()))); } + String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -548,7 +546,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final listExpression.getItems().add((ExpressionSegment) visit(each)); } right = listExpression; - operator = null != ctx.NOT() ? "NOT LIKE" : "LIKE"; + operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE"; } String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); @@ -557,7 +555,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final private BinaryOperationExpression createBinaryOperationExpressionFromRegexp(final PredicateContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bitExpr(1)); - String operator = null != ctx.NOT() ? "NOT REGEXP" : "REGEXP"; + String operator = null == ctx.NOT() ? "REGEXP" : "NOT REGEXP"; String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -565,7 +563,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromRegexp(fina private BinaryOperationExpression createBinaryOperationExpressionFromRlike(final PredicateContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.bitExpr(0)); ExpressionSegment right = (ExpressionSegment) visit(ctx.bitExpr(1)); - String operator = null != ctx.NOT() ? "NOT RLIKE" : "RLIKE"; + String operator = null == ctx.NOT() ? "RLIKE" : "NOT RLIKE"; String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -596,7 +594,11 @@ public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) { int stopIndex = ctx.stop.getStopIndex(); if (null != ctx.subquery()) { SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().getStart().getStartIndex(), ctx.subquery().getStop().getStopIndex(), (MySQLSelectStatement) visit(ctx.subquery())); - return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(startIndex, stopIndex, subquerySegment); + if (null != ctx.EXISTS()) { + subquerySegment.setSubqueryType(SubqueryType.EXISTS_SUBQUERY); + return new ExistsSubqueryExpression(startIndex, stopIndex, subquerySegment); + } + return new SubqueryExpressionSegment(subquerySegment); } if (null != ctx.parameterMarker()) { ParameterMarkerValue parameterMarker = (ParameterMarkerValue) visit(ctx.parameterMarker()); @@ -614,7 +616,11 @@ public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) { return visit(ctx.functionCall()); } if (null != ctx.collateClause()) { - return new CollateExpression(startIndex, stopIndex, (SimpleExpressionSegment) visit(ctx.collateClause())); + if (null != ctx.simpleExpr()) { + ExpressionSegment expr = (ExpressionSegment) visit(ctx.simpleExpr(0)); + return new CollateExpression(startIndex, stopIndex, (SimpleExpressionSegment) visit(ctx.collateClause()), expr); + } + return new CollateExpression(startIndex, stopIndex, (SimpleExpressionSegment) visit(ctx.collateClause()), null); } if (null != ctx.columnRef()) { return visit(ctx.columnRef()); @@ -749,9 +755,9 @@ private CombineSegment createCombineSegment(final CombineClauseContext ctx, fina if (null != ctx.EXCEPT()) { combineType = CombineType.EXCEPT; } else { - combineType = null != ctx.combineOption() && null != ctx.combineOption().ALL() ? CombineType.UNION_ALL : CombineType.UNION; + combineType = null == ctx.combineOption() || null == ctx.combineOption().ALL() ? CombineType.UNION : CombineType.UNION_ALL; } - MySQLSelectStatement right = null != ctx.queryPrimary() ? (MySQLSelectStatement) visit(ctx.queryPrimary()) : (MySQLSelectStatement) visit(ctx.queryExpressionParens()); + MySQLSelectStatement right = null == ctx.queryPrimary() ? (MySQLSelectStatement) visit(ctx.queryExpressionParens()) : (MySQLSelectStatement) visit(ctx.queryPrimary()); return new CombineSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), left, combineType, right); } @@ -896,17 +902,21 @@ public final ASTNode visitAggregationFunction(final AggregationFunctionContext c public final ASTNode visitJsonFunction(final JsonFunctionContext ctx) { JsonFunctionNameContext functionNameContext = ctx.jsonFunctionName(); String functionName; + FunctionSegment result; if (null != functionNameContext) { functionName = functionNameContext.getText(); + result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), functionName, getOriginalText(ctx)); for (ExprContext each : ctx.expr()) { - visit(each); + result.getParameters().add((ExpressionSegment) visit(each)); } } else if (null != ctx.JSON_SEPARATOR()) { functionName = ctx.JSON_SEPARATOR().getText(); + result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), functionName, getOriginalText(ctx)); } else { functionName = ctx.JSON_UNQUOTED_SEPARATOR().getText(); + result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), functionName, getOriginalText(ctx)); } - return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), functionName, getOriginalText(ctx)); + return result; } private ASTNode createAggregationSegment(final AggregationFunctionContext ctx, final String aggregationType) { @@ -995,7 +1005,10 @@ public final ASTNode visitGroupConcatFunction(final GroupConcatFunctionContext c @Override public final ASTNode visitWindowFunction(final WindowFunctionContext ctx) { super.visitWindowFunction(ctx); - return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.funcName.getText(), getOriginalText(ctx)); + FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.windowingClause().OVER().getText(), getOriginalText(ctx)); + result.getParameters().add(new FunctionSegment(ctx.funcName.getStartIndex(), ctx.funcName.getStopIndex(), ctx.funcName.getText(), ctx.funcName.getText() + "()")); + result.getParameters().addAll(getWindowSpecification(ctx.windowingClause().windowSpecification())); + return result; } @Override @@ -1135,7 +1148,7 @@ public final ASTNode visitCurrentUserFunction(final CurrentUserFunctionContext c @Override public final ASTNode visitRegularFunction(final RegularFunctionContext ctx) { - return null != ctx.completeRegularFunction() ? visit(ctx.completeRegularFunction()) : visit(ctx.shorthandRegularFunction()); + return null == ctx.completeRegularFunction() ? visit(ctx.shorthandRegularFunction()) : visit(ctx.completeRegularFunction()); } @Override @@ -1169,9 +1182,29 @@ private ASTNode visitRemainSimpleExpr(final SimpleExprContext ctx) { if (null != ctx.BINARY()) { return visit(ctx.simpleExpr(0)); } + if (null != ctx.TILDE_()) { + return new UnaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (ExpressionSegment) visit(ctx.simpleExpr(0)), "~", ctx.getText()); + } if (null != ctx.variable()) { return visit(ctx.variable()); } + if (null != ctx.LP_()) { + RowExpression result = new RowExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); + for (ExprContext each : ctx.expr()) { + result.getItems().add((ExpressionSegment) visit(each)); + } + return result; + } + if (null != ctx.RETURNING()) { + ListExpression result = new ListExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); + result.getItems().add(new LiteralExpressionSegment(ctx.path().start.getStartIndex(), ctx.path().stop.getStopIndex(), ctx.path().getText())); + result.getItems().add(new LiteralExpressionSegment(ctx.RETURNING().getSymbol().getStartIndex(), ctx.RETURNING().getSymbol().getStopIndex(), ctx.RETURNING().getSymbol().getText())); + result.getItems().add((ExpressionSegment) visit(ctx.dataType())); + return result; + } + if (null != ctx.LBE_()) { + return visit(ctx.expr(0)); + } for (ExprContext each : ctx.expr()) { visit(each); } @@ -1197,7 +1230,7 @@ public ASTNode visitCaseExpression(final CaseExpressionContext ctx) { @Override public ASTNode visitVariable(final VariableContext ctx) { - return null != ctx.systemVariable() ? visit(ctx.systemVariable()) : visit(ctx.userVariable()); + return null == ctx.systemVariable() ? visit(ctx.userVariable()) : visit(ctx.systemVariable()); } @Override @@ -1287,7 +1320,7 @@ public final ASTNode visitOrderByClause(final OrderByClauseContext ctx) { public final ASTNode visitOrderByItem(final OrderByItemContext ctx) { OrderDirection orderDirection; if (null != ctx.direction()) { - orderDirection = null != ctx.direction().DESC() ? OrderDirection.DESC : OrderDirection.ASC; + orderDirection = null == ctx.direction().DESC() ? OrderDirection.ASC : OrderDirection.DESC; } else { orderDirection = OrderDirection.ASC; } @@ -1328,6 +1361,7 @@ public ASTNode visitInsert(final InsertContext ctx) { @Override public ASTNode visitInsertSelectClause(final InsertSelectClauseContext ctx) { MySQLInsertStatement result = new MySQLInsertStatement(); + result.setInsertSelect(createInsertSelectSegment(ctx)); if (null != ctx.LP_()) { if (null != ctx.fields()) { result.setInsertColumns(new InsertColumnsSegment(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex(), createInsertColumns(ctx.fields()))); @@ -1337,12 +1371,12 @@ public ASTNode visitInsertSelectClause(final InsertSelectClauseContext ctx) { } else { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } - result.setInsertSelect(createInsertSelectSegment(ctx)); return result; } private SubquerySegment createInsertSelectSegment(final InsertSelectClauseContext ctx) { MySQLSelectStatement selectStatement = (MySQLSelectStatement) visit(ctx.select()); + selectStatement.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); return new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement); } @@ -1694,7 +1728,7 @@ private ASTNode createProjection(final ProjectionContext ctx, final AliasSegment } if (projection instanceof BinaryOperationExpression) { int startIndex = projection.getStartIndex(); - int stopIndex = null != alias ? alias.getStopIndex() : projection.getStopIndex(); + int stopIndex = null == alias ? projection.getStopIndex() : alias.getStopIndex(); ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, projection.getText(), projection); result.setAlias(alias); return result; @@ -1764,7 +1798,7 @@ public ASTNode visitEscapedTableReference(final EscapedTableReferenceContext ctx public ASTNode visitTableReference(final TableReferenceContext ctx) { TableSegment result; TableSegment left; - left = null != ctx.tableFactor() ? (TableSegment) visit(ctx.tableFactor()) : (TableSegment) visit(ctx.escapedTableReference()); + left = null == ctx.tableFactor() ? (TableSegment) visit(ctx.escapedTableReference()) : (TableSegment) visit(ctx.tableFactor()); for (JoinedTableContext each : ctx.joinedTable()) { left = visitJoinedTable(each, left); } @@ -1800,9 +1834,9 @@ private JoinTableSegment visitJoinedTable(final JoinedTableContext ctx, final Ta result.setStopIndex(ctx.stop.getStopIndex()); result.setJoinType(getJoinType(ctx)); result.setNatural(null != ctx.naturalJoinType()); - TableSegment right = null != ctx.tableFactor() ? (TableSegment) visit(ctx.tableFactor()) : (TableSegment) visit(ctx.tableReference()); + TableSegment right = null == ctx.tableFactor() ? (TableSegment) visit(ctx.tableReference()) : (TableSegment) visit(ctx.tableFactor()); result.setRight(right); - return null != ctx.joinSpecification() ? visitJoinSpecification(ctx.joinSpecification(), result) : result; + return null == ctx.joinSpecification() ? result : visitJoinSpecification(ctx.joinSpecification(), result); } private String getJoinType(final JoinedTableContext ctx) { @@ -1810,7 +1844,7 @@ private String getJoinType(final JoinedTableContext ctx) { return JoinType.INNER.name(); } if (null != ctx.outerJoinType()) { - return ctx.outerJoinType().LEFT() != null ? JoinType.LEFT.name() : JoinType.RIGHT.name(); + return null == ctx.outerJoinType().LEFT() ? JoinType.RIGHT.name() : JoinType.LEFT.name(); } if (null != ctx.naturalJoinType()) { return getNaturalJoinType(ctx.naturalJoinType()); @@ -1821,11 +1855,11 @@ private String getJoinType(final JoinedTableContext ctx) { private String getNaturalJoinType(final NaturalJoinTypeContext ctx) { if (null != ctx.LEFT()) { return JoinType.LEFT.name(); - } else if (null != ctx.RIGHT()) { + } + if (null != ctx.RIGHT()) { return JoinType.RIGHT.name(); - } else { - return JoinType.INNER.name(); } + return JoinType.INNER.name(); } private JoinTableSegment visitJoinSpecification(final JoinSpecificationContext ctx, final JoinTableSegment result) { diff --git a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDALStatementVisitor.java b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDALStatementVisitor.java index a0e9e7410e3d9..d06c4985350a6 100644 --- a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDALStatementVisitor.java +++ b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDALStatementVisitor.java @@ -973,8 +973,8 @@ private VariableAssignSegment getVariableAssign(final OptionValueContext ctx) { @Override public ASTNode visitSetCharacter(final SetCharacterContext ctx) { VariableAssignSegment characterSet = new VariableAssignSegment(); - int startIndex = null != ctx.CHARSET() ? ctx.CHARSET().getSymbol().getStartIndex() : ctx.CHARACTER().getSymbol().getStartIndex(); - int stopIndex = null != ctx.CHARSET() ? ctx.CHARSET().getSymbol().getStopIndex() : ctx.SET(1).getSymbol().getStopIndex(); + int startIndex = null == ctx.CHARSET() ? ctx.CHARACTER().getSymbol().getStartIndex() : ctx.CHARSET().getSymbol().getStartIndex(); + int stopIndex = null == ctx.CHARSET() ? ctx.SET(1).getSymbol().getStopIndex() : ctx.CHARSET().getSymbol().getStopIndex(); // TODO Consider setting all three system variables: character_set_client, character_set_results, character_set_connection String variableName = (null != ctx.CHARSET()) ? ctx.CHARSET().getText() : "character_set_client"; VariableSegment variable = new VariableSegment(startIndex, stopIndex, variableName); diff --git a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDMLStatementVisitor.java b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDMLStatementVisitor.java index bc4cb9a55a813..010d3228a4835 100644 --- a/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDMLStatementVisitor.java +++ b/parser/sql/dialect/mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/visitor/statement/type/MySQLDMLStatementVisitor.java @@ -70,7 +70,7 @@ public ASTNode visitImportStatement(final ImportStatementContext ctx) { @Override public ASTNode visitLoadStatement(final LoadStatementContext ctx) { - return null != ctx.loadDataStatement() ? visit(ctx.loadDataStatement()) : visit(ctx.loadXmlStatement()); + return null == ctx.loadDataStatement() ? visit(ctx.loadXmlStatement()) : visit(ctx.loadDataStatement()); } @Override diff --git a/parser/sql/dialect/mysql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/mysql/external/ExternalMySQLParserIT.java b/parser/sql/dialect/mysql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/mysql/external/ExternalMySQLParserIT.java index 6bab3319a34ef..31733bafceb46 100644 --- a/parser/sql/dialect/mysql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/mysql/external/ExternalMySQLParserIT.java +++ b/parser/sql/dialect/mysql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/mysql/external/ExternalMySQLParserIT.java @@ -18,10 +18,10 @@ package org.apache.shardingsphere.test.it.sql.parser.it.mysql.external; import org.apache.shardingsphere.test.it.sql.parser.external.ExternalSQLParserIT; -import org.apache.shardingsphere.test.it.sql.parser.external.loader.ExternalMySQLTestParameterLoader; -import org.apache.shardingsphere.test.loader.ExternalCaseSettings; +import org.apache.shardingsphere.test.it.sql.parser.external.loader.MySQLExternalTestParameterLoadTemplate; +import org.apache.shardingsphere.test.it.sql.parser.loader.ExternalCaseSettings; -@ExternalCaseSettings(value = "MySQL", caseURL = ExternalMySQLParserIT.CASE_URL, resultURL = ExternalMySQLParserIT.RESULT_URL, caseLoader = ExternalMySQLTestParameterLoader.class) +@ExternalCaseSettings(value = "MySQL", caseURL = ExternalMySQLParserIT.CASE_URL, resultURL = ExternalMySQLParserIT.RESULT_URL, template = MySQLExternalTestParameterLoadTemplate.class) class ExternalMySQLParserIT extends ExternalSQLParserIT { static final String CASE_URL = "https://github.com/mysql/mysql-server/tree/8.0/mysql-test/t"; diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/BaseRule.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/BaseRule.g4 index af7d0efdae363..e852aa49f8ff2 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/BaseRule.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/BaseRule.g4 @@ -438,6 +438,19 @@ unreservedWord | YES | ZONE | JSON + | POSITION + | INET + | INT1 + | INT2 + | INT4 + | INT16 + | FLOAT4 + | ELEM_CONTAINED_BY_RANGE + | INT8RANGE + | INT4RANGE + | NUMRANGE + | DATERANGE + | TSQUERY ; typeFuncNameKeyword @@ -494,6 +507,10 @@ name : identifier ; +modelName + : identifier + ; + tableNames : LP_? tableName (COMMA_ tableName)* RP_? ; @@ -534,6 +551,10 @@ comparisonOperator : EQ_ | GTE_ | GT_ | LTE_ | LT_ | NEQ_ ; +inetOperator + : SIGNED_LEFT_SHIFT_ | SIGNED_LEFT_SHIFT_E_ | SIGNED_RIGHT_SHIFT_ | SIGNED_RIGHT_SHIFT_E_ + ; + patternMatchingOperator : LIKE | TILDE_TILDE_ @@ -569,11 +590,17 @@ aExpr | aExpr MOD_ aExpr | aExpr CARET_ aExpr | aExpr AMPERSAND_ aExpr + | DN_ aExpr + | aExpr NOT_ + | aExpr POUND_ aExpr + | TILDE_ aExpr + | CUBE_ROOT_ aExpr | aExpr VERTICAL_BAR_ aExpr | aExpr qualOp aExpr | qualOp aExpr | aExpr qualOp | aExpr comparisonOperator aExpr + | aExpr inetOperator aExpr | NOT aExpr | aExpr patternMatchingOperator aExpr ESCAPE aExpr | aExpr patternMatchingOperator aExpr @@ -1073,6 +1100,14 @@ functionExprCommonSubexpr | XMLPI LP_ NAME identifier COMMA_ aExpr RP_ | XMLROOT LP_ aExpr COMMA_ xmlRootVersion xmlRootStandalone? RP_ | XMLSERIALIZE LP_ documentOrContent aExpr AS simpleTypeName RP_ + | PREDICT BY modelName LP_ FEATURES name (COMMA_ name)* RP_ + | TS_REWRITE LP_ aExpr (TYPE_CAST_ TSQUERY)? (COMMA_ aExpr (TYPE_CAST_ TSQUERY)?)* RP_ + | ELEM_CONTAINED_BY_RANGE LP_ aExpr COMMA_ dataType RP_ + | (LOWER_INF | UPPER_INF) LP_ aExpr TYPE_CAST_ identifier RP_ + | ABBREV LP_ (INET | CIDR) STRING_ RP_ + | SET_MASKLEN LP_ STRING_ (TYPE_CAST_ CIDR)? COMMA_ numberLiterals RP_ + | TEXT LP_ INET STRING_ RP_ + | TRUNC LP_ MACADDR STRING_ RP_ ; typeName diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DCLStatement.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DCLStatement.g4 index af386c9e18612..d3757d5277feb 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DCLStatement.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DCLStatement.g4 @@ -56,8 +56,12 @@ alterOptRoleElem | identifier ; +dropBehavior + : CASCADE | RESTRICT + ; + dropUser - : DROP USER ifExists? roleList + : DROP USER ifExists? nameList dropBehavior? ; alterUser diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DDLStatement.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DDLStatement.g4 index 8aef4046a7061..6bc716b689a72 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DDLStatement.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DDLStatement.g4 @@ -1933,6 +1933,8 @@ onObjectClause | ALL FUNCTIONS IN SCHEMA nameList | ALL PROCEDURES IN SCHEMA nameList | ALL ROUTINES IN SCHEMA nameList + | CLIENT_MASTER_KEY nameList + | COLUMN_ENCRYPTION_KEY nameList ; numericOnlyList diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DMLStatement.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DMLStatement.g4 index 9440b79816c31..faa94011fac67 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DMLStatement.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/DMLStatement.g4 @@ -108,10 +108,7 @@ selectWithParens ; selectNoParens - : selectClauseN - | selectClauseN sortClause - | selectClauseN sortClause? forLockingClause selectLimit? - | selectClauseN sortClause? selectLimit forLockingClause? + : selectClauseN sortClause? (forLockingClause selectLimit? | selectLimit forLockingClause?)? | withClause selectClauseN | withClause selectClauseN sortClause | withClause selectClauseN sortClause? forLockingClause selectLimit? @@ -263,9 +260,7 @@ targetList targetEl : colId DOT_ASTERISK_ - | aExpr AS identifier - | aExpr identifier - | aExpr + | aExpr AS? identifier? | ASTERISK_ ; diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/OpenGaussKeyword.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/OpenGaussKeyword.g4 index 69ee2f8f2346d..bd9e06022eb64 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/OpenGaussKeyword.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/OpenGaussKeyword.g4 @@ -1396,3 +1396,55 @@ INOUT DUPLICATE : D U P L I C A T E ; + +PREDICT + : P R E D I C T + ; + +FEATURES + : F E A T U R E S + ; + +TS_REWRITE + : T S UL_ R E W R I T E + ; + +INT16 + : I N T [16] + ; + +INT1 + : I N T [1] + ; + +ELEM_CONTAINED_BY_RANGE + : E L E M UL_ C O N T A I N E D UL_ B Y UL_ R A N G E + ; + +LOWER_INF + : L O W E R UL_ I N F + ; + +UPPER_INF + : U P P E R UL_ I N F + ; + +ABBREV + : A B B R E V + ; + +SET_MASKLEN + : S E T UL_ M A S K L E N + ; + +TRUNC + : T R U N C + ; + +CLIENT_MASTER_KEY + : C L I E N T UL_ M A S T E R UL_ K E Y + ; + +COLUMN_ENCRYPTION_KEY + : C O L U M N UL_ E N C R Y P T I O N UL_ K E Y + ; diff --git a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/Symbol.g4 b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/Symbol.g4 index a509bbbaef349..c4c5ec707f0e2 100644 --- a/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/Symbol.g4 +++ b/parser/sql/dialect/opengauss/src/main/antlr4/imports/opengauss/Symbol.g4 @@ -88,3 +88,7 @@ GEOMETRIC_ABOVE_: '>^'; GEOMETRIC_INTERSECT_: '?#'; GEOMETRIC_PERPENDICULAR_: '?-|'; GEOMETRIC_SAME_AS_: '~='; +SIGNED_LEFT_SHIFT_E_: '<<='; +SIGNED_RIGHT_SHIFT_E_: '>>='; +DN_: '!!'; +CUBE_ROOT_: '||/'; diff --git a/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/OpenGaussStatementVisitor.java b/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/OpenGaussStatementVisitor.java index 182667c261941..b185b4c3a9856 100644 --- a/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/OpenGaussStatementVisitor.java +++ b/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/OpenGaussStatementVisitor.java @@ -116,6 +116,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType; import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection; import org.apache.shardingsphere.sql.parser.sql.common.enums.ParameterMarkerType; +import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexSegment; @@ -225,7 +226,7 @@ public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); - return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); + return null == unreservedWord ? new IdentifierValue(ctx.getText()) : visit(unreservedWord); } @Override @@ -414,6 +415,7 @@ private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (OpenGaussSelectStatement) visit(ctx.selectWithParens())); if (null != ctx.EXISTS()) { + subquerySegment.setSubqueryType(SubqueryType.EXISTS_SUBQUERY); return new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); } return new SubqueryExpressionSegment(subquerySegment); @@ -506,7 +508,7 @@ public ASTNode visitAexprConst(final AexprConstContext ctx) { } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); - String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); + String dataType = null == ctx.constTypeName() ? ctx.funcName().getText() : ctx.constTypeName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); @@ -629,7 +631,7 @@ public final ASTNode visitSortClause(final SortClauseContext ctx) { @Override public final ASTNode visitSortby(final SortbyContext ctx) { - OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; + OrderDirection orderDirection = null == ctx.ascDesc() ? OrderDirection.ASC : generateOrderDirection(ctx.ascDesc()); NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { @@ -749,6 +751,13 @@ public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { OpenGaussInsertStatement result = new OpenGaussInsertStatement(); + ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); + if (null == valuesClause) { + OpenGaussSelectStatement selectStatement = (OpenGaussSelectStatement) visit(ctx.select()); + result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); + } else { + result.getValues().addAll(createInsertValuesSegments(valuesClause)); + } if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { @@ -757,13 +766,6 @@ public ASTNode visitInsertRest(final InsertRestContext ctx) { InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } - ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); - if (null == valuesClause) { - OpenGaussSelectStatement selectStatement = (OpenGaussSelectStatement) visit(ctx.select()); - result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); - } else { - result.getValues().addAll(createInsertValuesSegments(valuesClause)); - } return result; } @@ -1183,14 +1185,14 @@ private SubqueryTableSegment getSubqueryTableSegment(final TableReferenceContext private JoinTableSegment getJoinTableSegment(final TableReferenceContext ctx) { JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); - int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); + int startIndex = null == ctx.LP_() ? ctx.tableReference().start.getStartIndex() : ctx.LP_().getSymbol().getStartIndex(); int stopIndex = 0; AliasSegment alias = null; - if (null != ctx.aliasClause()) { + if (null == ctx.aliasClause()) { + stopIndex = null == ctx.RP_() ? ctx.tableReference().start.getStopIndex() : ctx.RP_().getSymbol().getStopIndex(); + } else { alias = (AliasSegment) visit(ctx.aliasClause()); startIndex = null == ctx.RP_() ? ctx.joinedTable().stop.getStopIndex() : ctx.RP_().getSymbol().getStopIndex(); - } else { - stopIndex = null == ctx.RP_() ? ctx.tableReference().start.getStopIndex() : ctx.RP_().getSymbol().getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); diff --git a/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/type/OpenGaussDDLStatementVisitor.java b/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/type/OpenGaussDDLStatementVisitor.java index 4a63ec40777d0..ff57857de21c0 100644 --- a/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/type/OpenGaussDDLStatementVisitor.java +++ b/parser/sql/dialect/opengauss/src/main/java/org/apache/shardingsphere/sql/parser/opengauss/visitor/statement/type/OpenGaussDDLStatementVisitor.java @@ -428,7 +428,7 @@ public ASTNode visitAddColumnSpecification(final AddColumnSpecificationContext c ColumnDefinitionContext columnDefinition = ctx.columnDefinition(); if (null != columnDefinition) { AddColumnDefinitionSegment addColumnDefinition = new AddColumnDefinitionSegment( - ctx.columnDefinition().getStart().getStartIndex(), columnDefinition.getStop().getStopIndex(), Collections.singletonList((ColumnDefinitionSegment) visit(columnDefinition))); + ctx.columnDefinition().getStart().getStartIndex(), columnDefinition.getStop().getStopIndex(), Collections.singleton((ColumnDefinitionSegment) visit(columnDefinition))); result.getValue().add(addColumnDefinition); } return result; @@ -488,7 +488,7 @@ public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationCon @Override public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) { - return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singletonList((ColumnSegment) visit(ctx.columnName()))); + return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singleton((ColumnSegment) visit(ctx.columnName()))); } @Override diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/BaseRule.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/BaseRule.g4 index c662f1173122b..327a850c566ef 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/BaseRule.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/BaseRule.g4 @@ -76,18 +76,18 @@ nullValueLiterals ; identifier - : IDENTIFIER_ | unreservedWord | STRING_ + : IDENTIFIER_ | unreservedWord | DOUBLE_QUOTED_TEXT ; unreservedWord - : unreservedWord1 | unreservedWord2 | unreservedWord3 + : unreservedWord1 | unreservedWord2 | unreservedWord3 | capacityUnit ; unreservedWord1 : TRUNCATE | FUNCTION | PROCEDURE | CASE | WHEN | CAST | TRIM | SUBSTRING | NATURAL | JOIN | FULL | INNER | OUTER | LEFT | RIGHT | CROSS | USING | IF | TRUE | FALSE | LIMIT | OFFSET - | BEGIN | COMMIT | ROLLBACK | SAVEPOINT | BOOLEAN | DOUBLE | CHARACTER + | COMMIT | ROLLBACK | SAVEPOINT | ARRAY | INTERVAL | TIME | TIMESTAMP | LOCALTIME | LOCALTIMESTAMP | YEAR | QUARTER | MONTH | WEEK | DAY | HOUR | MINUTE | SECOND | MICROSECOND | MAX | MIN | SUM | COUNT | AVG | ENABLE @@ -97,7 +97,7 @@ unreservedWord1 | FLASHBACK | ARCHIVE | REFRESH | QUERY | REWRITE | KEEP | SEQUENCE | INHERIT | TRANSLATE | SQL | MERGE | AT | BITMAP | CACHE | CHECKPOINT | CONSTRAINTS | CYCLE | DBTIMEZONE | ENCRYPT | DECRYPT | DEFERRABLE - | DEFERRED | EDITION | ELEMENT | END | EXCEPTIONS | FORCE | GLOBAL + | DEFERRED | EDITION | ELEMENT | EXCEPTIONS | FORCE | GLOBAL | IDENTITY | INITIALLY | INVALIDATE | JAVA | LEVELS | LOCAL | MAXVALUE | MINVALUE | NOMAXVALUE | NOMINVALUE | MINING | MODEL | NATIONAL | NEW | NOCACHE | NOCYCLE | NOORDER | NORELY | NOVALIDATE | ONLY | PRESERVE @@ -114,11 +114,11 @@ unreservedWord1 | SYSGUID | SYSBACKUP | SYSDBA | SYSDG | SYSKM | SYSOPER | DBA_RECYCLEBIN |SCHEMA | DO | DEFINER | CURRENT_USER | CASCADED | CLOSE | OPEN | NEXT | NAME | NAMES | COLLATION | REAL | TYPE | FIRST | RANK | SAMPLE | SYSTIMESTAMP | MINUTE | ANY - | LENGTH | SINGLE_C | capacityUnit | TIME_UNIT | TARGET | PUBLIC | ID | STATE | PRIORITY + | LENGTH | SINGLE_C | TIME_UNIT | TARGET | PUBLIC | ID | STATE | PRIORITY | CONSTRAINT | PRIMARY | FOREIGN | KEY | POSITION | PRECISION | FUNCTION | PROCEDURE | SPECIFICATION | CASE | WHEN | CAST | TRIM | SUBSTRING | FULL | INNER | OUTER | LEFT | RIGHT | CROSS | USING | FALSE | SAVEPOINT | BODY | CHARACTER | ARRAY | TIME | TIMEOUT | TIMESTAMP | LOCALTIME - | DAY | ENABLE | DISABLE | CALL | INSTANCE | CLOSE | NEXT | NAME | INT | NUMERIC + | DAY | ENABLE | DISABLE | CALL | INSTANCE | CLOSE | NEXT | NAME | NUMERIC | TRIGGERS | GLOBAL_NAME | BINARY | MOD | XOR | UNKNOWN | ALWAYS | CASCADE | GENERATED | PRIVILEGES | READ | WRITE | ROLE | VISIBLE | INVISIBLE | EXECUTE | USE | DEBUG | UNDER | FLASHBACK | ARCHIVE | REFRESH | QUERY | REWRITE | CHECKPOINT | ENCRYPT | DIRECTORY | CREDENTIALS | EXCEPT | NOFORCE @@ -328,7 +328,7 @@ unreservedWord3 | WIDTH_BUCKET | WRAPPED | XID | XMLAGG | XMLATTRIBUTES | XMLCAST | XMLCDATA | XMLCOLATTVAL | XMLCOMMENT | XMLCONCAT | XMLDIFF | XMLEXISTS | XMLEXISTS2 | XMLFOREST | XMLINDEX_REWRITE | XMLINDEX_REWRITE_IN_SELECT | XMLINDEX_SEL_IDX_TBL | XMLISNODE | XMLISVALID | XMLNAMESPACES | XMLPARSE | XMLPATCH | XMLPI | XMLQUERY | XMLROOT | XMLSERIALIZE | XMLTABLE | XMLTOOBJECT - | XMLTRANSFORM | XMLTRANSFORMBLOB | XML_DML_RWT_STMT | XPATHTABLE | XS_SYS_CONTEXT | X_DYN_PRUNE | RESULT | TABLE + | XMLTRANSFORM | XMLTRANSFORMBLOB | XML_DML_RWT_STMT | XPATHTABLE | XS_SYS_CONTEXT | X_DYN_PRUNE | RESULT | TABLE | NUMBER | CHAR ; schemaName @@ -560,7 +560,7 @@ partitionSetName ; partitionKeyValue - : INTEGER_ | dateTimeLiterals + : INTEGER_ | dateTimeLiterals | toDateFunction ; subpartitionKeyValue @@ -571,6 +571,10 @@ encryptAlgorithmName : STRING_ ; +integrityAlgorithm + : STRING_ + ; + zonemapName : identifier ; @@ -584,7 +588,7 @@ roleName ; username - : identifier + : identifier | STRING_ ; password @@ -624,7 +628,7 @@ dataTypeLength ; primaryKey - : PRIMARY? KEY + : PRIMARY KEY ; exprs @@ -660,11 +664,11 @@ notOperator booleanPrimary : booleanPrimary IS NOT? (TRUE | FALSE | UNKNOWN | NULL) - | PRIOR predicate + | (PRIOR | DISTINCT) predicate | CONNECT_BY_ROOT predicate | booleanPrimary SAFE_EQ_ predicate - | booleanPrimary comparisonOperator predicate | booleanPrimary comparisonOperator (ALL | ANY) subquery + | booleanPrimary comparisonOperator predicate | predicate ; @@ -771,7 +775,15 @@ leadLagInfo specialFunction : castFunction | charFunction | extractFunction | formatFunction | firstOrLastValueFunction | trimFunction | featureFunction - | setFunction | translateFunction + | setFunction | translateFunction | cursorFunction | toDateFunction + ; + +toDateFunction + : TO_DATE LP_ char=STRING_ (DEFAULT returnValue=STRING_ ON CONVERSION ERROR)? (COMMA_ fmt=STRING_ (COMMA_ STRING_)?)? RP_ + ; + +cursorFunction + : CURSOR subquery ; translateFunction @@ -784,15 +796,20 @@ setFunction featureFunction : featureFunctionName LP_ (schemaName DOT_)? modelName (COMMA_ featureId)? (COMMA_ numberLiterals (COMMA_ numberLiterals)?)? - (DESC | ASC | ABS)? miningAttributeClause (AND miningAttributeClause)? RP_ + (DESC | ASC | ABS)? cost_matrix_clause? miningAttributeClause (AND miningAttributeClause)? RP_ ; featureFunctionName - : FEATURE_COMPARE | FEATURE_DETAILS | FEATURE_SET | FEATURE_ID | FEATURE_VALUE | CLUSTER_DETAILS | CLUSTER_DISTANCE | CLUSTER_ID | CLUSTER_PROBABILITY | CLUSTER_SET | PREDICTION_PROBABILITY + : FEATURE_COMPARE | FEATURE_DETAILS | FEATURE_SET | FEATURE_ID | FEATURE_VALUE | CLUSTER_DETAILS | CLUSTER_DISTANCE | CLUSTER_ID | CLUSTER_PROBABILITY | CLUSTER_SET + | PREDICTION_PROBABILITY | PREDICTION_SET | PREDICTION_BOUNDS | PREDICTION | PREDICTION_DETAILS + ; + +cost_matrix_clause + : COST (MODEL (AUTO)?)? | LP_ literals RP_ (COMMA_ LP_ literals RP_)* VALUES LP_ LP_ literals (COMMA_ literals)* RP_ (COMMA_ LP_ literals (COMMA_ literals)* RP_) RP_ ; miningAttributeClause - : USING (ASTERISK_ | (schemaName DOT_)? tableName DOT_ ASTERISK_ | expr (AS? alias)?) + : USING (ASTERISK_ | ((schemaName DOT_)? tableName DOT_ ASTERISK_ | expr (AS? alias)?) (COMMA_ ((schemaName DOT_)? tableName DOT_ ASTERISK_ | expr (AS? alias)?))*) ; trimFunction @@ -992,7 +1009,7 @@ hashSubpartitionQuantity ; odciParameters - : identifier + : STRING_ ; databaseName @@ -1004,7 +1021,7 @@ locationName ; fileName - : STRING_ + : identifier | STRING_ ; asmFileName @@ -1116,7 +1133,7 @@ logminerSessionName ; tablespaceGroupName - : identifier + : identifier | STRING_ ; copyName @@ -1775,7 +1792,7 @@ searchString ; attributeValue - : identifier + : STRING_ ; joinGroupName @@ -1795,7 +1812,7 @@ matchString ; parameterType - : identifier + : (owner DOT_)? identifier ; returnType @@ -1924,7 +1941,9 @@ datetimeExpr ; xmlFunction - : xmlAggFunction + : xmlElementFunction + | xmlCdataFunction + | xmlAggFunction | xmlColattvalFunction | xmlExistsFunction | xmlForestFunction @@ -1942,6 +1961,22 @@ xmlFunction | specifiedFunctionName = XMLCOMMENT LP_ stringLiterals RP_ ; +xmlElementFunction + : XMLELEMENT LP_ identifier (COMMA_ xmlAttributes)? (COMMA_ exprWithAlias)* RP_ + ; + +exprWithAlias + : expr (AS alias)? + ; + +xmlAttributes + : XMLATTRIBUTES LP_ exprWithAlias (COMMA_ exprWithAlias)* RP_ + ; + +xmlCdataFunction + : XMLCDATA LP_ stringLiterals RP_ + ; + xmlAggFunction : XMLAGG LP_ expr orderByClause? RP_ ; @@ -2024,41 +2059,6 @@ multisetOperator | UNION ; -columnDefinition - : columnName dataType SORT? visibleClause (defaultNullClause expr | identityClause)? (ENCRYPT encryptionSpecification)? (inlineConstraint+ | inlineRefConstraint)? - ; - -visibleClause - : (VISIBLE | INVISIBLE)? - ; - -defaultNullClause - : DEFAULT (ON NULL)? - ; - -identityClause - : GENERATED (ALWAYS | BY DEFAULT (ON NULL)?) AS IDENTITY identifyOptions - ; - -identifyOptions - : LP_? (identityOption+)? RP_? - ; - -identityOption - : START WITH (INTEGER_ | LIMIT VALUE) - | INCREMENT BY INTEGER_ - | MAXVALUE INTEGER_ - | NOMAXVALUE - | MINVALUE INTEGER_ - | NOMINVALUE - | CYCLE - | NOCYCLE - | CACHE INTEGER_ - | NOCACHE - | ORDER - | NOORDER - ; - -encryptionSpecification - : (USING STRING_)? (IDENTIFIED BY STRING_)? STRING_? (NO? SALT)? +superview + : identifier ; diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DCLStatement.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DCLStatement.g4 index 4651eaa5e6433..6fae13fe00793 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DCLStatement.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DCLStatement.g4 @@ -118,6 +118,7 @@ systemPrivilege | usersSystemPrivilege | viewsSystemPrivilege | miscellaneousSystemPrivilege + | ruleSystemPrivilege ; systemPrivilegeOperation @@ -288,6 +289,14 @@ usersSystemPrivilege : systemPrivilegeOperation USER ; +ruleSystemPrivilege + : createOperation* TO username + ; + +createOperation + : systemPrivilegeOperation (RULE SET? | EVALUATION CONTEXT) COMMA_? + ; + viewsSystemPrivilege : (systemPrivilegeOperation | (UNDER | MERGE) ANY) VIEW ; diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 index 3dfedb612322e..568a20d1a74e1 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DDLStatement.g4 @@ -19,6 +19,15 @@ grammar DDLStatement; import BaseRule, DCLStatement, DMLStatement; +createView + : CREATE (OR REPLACE)? (NO? FORCE)? (EDITIONING | EDITIONABLE EDITIONING? | NONEDITIONABLE)? VIEW viewName + ( SHARING EQ_ (METADATA | DATA | EXTENDED DATA | NONE))? + ( LP_ (alias (VISIBLE | INVISIBLE)? inlineConstraint* (COMMA_ alias (VISIBLE | INVISIBLE)? inlineConstraint*)* + | outOfLineConstraint) RP_ | objectViewClause | xmlTypeViewClause)? + ( DEFAULT COLLATION collationName)? (BEQUEATH (CURRENT_USER | DEFINER))? AS select subqueryRestrictionClause? + ( CONTAINER_MAP | CONTAINERS_DEFAULT)? + ; + createTable : CREATE createTableSpecification TABLE tableName createSharingClause createDefinitionClause createMemOptimizeClause createParentClause ; @@ -47,6 +56,11 @@ objectTypeDef : OBJECT LP_ dataTypeDefinition (COMMA_ dataTypeDefinition)* RP_ finalClause? instantiableClause? persistableClause? ; +objectViewClause + : OF typeName (WITH OBJECT (IDENTIFIER | ID) (DEFAULT | LP_ attribute (COMMA_ attribute)* RP_) | UNDER (schemaName DOT_)? superview) + ( LP_ outOfLineConstraint | attribute inlineConstraint* (COMMA_ outOfLineConstraint | attribute inlineConstraint*)* RP_)? + ; + finalClause : NOT? FINAL ; @@ -80,7 +94,7 @@ objectSubTypeDef ; alterTable - : ALTER TABLE tableName memOptimizeClause alterDefinitionClause enableDisableClauses + : ALTER TABLE tableName memOptimizeClause alterDefinitionClause enableDisableClauses? ; alterIndex @@ -178,7 +192,11 @@ createSharingClause ; createDefinitionClause - : createRelationalTableClause | createObjectTableClause | createXMLTypeTableClause + : createRelationalTableClause | createObjectTableClause | createTableAsSelectClause | createXMLTypeTableClause + ; + +createTableAsSelectClause + : AS selectSubquery ; createXMLTypeTableClause @@ -211,6 +229,15 @@ xmlTypeVirtualColumnsClause : VIRTUAL COLUMNS LP_ (columnName AS LP_ expr RP_ (COMMA_ columnName AS LP_ expr RP_)+) RP_ ; +xmlTypeViewClause + : OF XMLTYPE xmlSchemaSpec? WITH OBJECT (IDENTIFIER | ID) (DEFAULT | LP_ expr (COMMA_ expr)* RL_) + ; + +xmlSchemaSpec + : (XMLSCHEMA xmlSchemaURLName)? ELEMENT (elementName | xmlSchemaURLName POUND_ elementName) + ( STORE ALL VARRAYS AS (LOBS | TABLES))? ((ALLOW | DISALLOW) NONSCHEMA)? ((ALLOW | DISALLOW) ANYSCHEMA)? + ; + oidClause : OBJECT IDENTIFIER IS (SYSTEM GENERATED | PRIMARY KEY) ; @@ -220,7 +247,21 @@ oidIndexClause ; createRelationalTableClause - : (LP_ relationalProperties RP_) collationClause? commitClause? physicalProperties? tableProperties? + : (LP_ relationalProperties RP_) + | (LP_ relationalProperties RP_) collationClause + | (LP_ relationalProperties RP_) commitClause + | (LP_ relationalProperties RP_) physicalProperties + | (LP_ relationalProperties RP_) tableProperties + | (LP_ relationalProperties RP_) collationClause commitClause + | (LP_ relationalProperties RP_) collationClause physicalProperties + | (LP_ relationalProperties RP_) collationClause tableProperties + | (LP_ relationalProperties RP_) collationClause commitClause physicalProperties + | (LP_ relationalProperties RP_) collationClause commitClause tableProperties + | (LP_ relationalProperties RP_) collationClause commitClause physicalProperties tableProperties + | (LP_ relationalProperties RP_) commitClause physicalProperties + | (LP_ relationalProperties RP_) commitClause tableProperties + | (LP_ relationalProperties RP_) commitClause physicalProperties tableProperties + | (LP_ relationalProperties RP_) physicalProperties tableProperties ; createMemOptimizeClause @@ -245,6 +286,47 @@ relationalProperty : columnDefinition | virtualColumnDefinition | outOfLineConstraint | outOfLineRefConstraint ; +columnDefinition + : columnName REF? dataType SORT? visibleClause (defaultNullClause expr | identityClause)? (ENCRYPT encryptionSpecification)? (inlineConstraint+ | inlineRefConstraint)? + | REF LP_ columnName RP_ WITH ROWID + | SCOPE FOR LP_ columnName RP_ IS identifier + ; + +visibleClause + : (VISIBLE | INVISIBLE)? + ; + +defaultNullClause + : DEFAULT (ON NULL)? + ; + +identityClause + : GENERATED (ALWAYS | BY DEFAULT (ON NULL)?) AS IDENTITY identifyOptions + ; + +identifyOptions + : LP_? (identityOption+)? RP_? + ; + +identityOption + : START WITH (INTEGER_ | LIMIT VALUE) + | INCREMENT BY INTEGER_ + | MAXVALUE INTEGER_ + | NOMAXVALUE + | MINVALUE INTEGER_ + | NOMINVALUE + | CYCLE + | NOCYCLE + | CACHE INTEGER_ + | NOCACHE + | ORDER + | NOORDER + ; + +encryptionSpecification + : (USING STRING_)? (IDENTIFIED BY STRING_)? (integrityAlgorithm? (NO? SALT)? | (NO? SALT)? integrityAlgorithm?) + ; + inlineConstraint : (CONSTRAINT ignoredIdentifier)? (NOT? NULL | UNIQUE | primaryKey | referencesClause | CHECK LP_ expr RP_) constraintState? ; @@ -254,21 +336,7 @@ referencesClause ; constraintState - : notDeferrable - | initiallyClause - | RELY | NORELY - | usingIndexClause - | ENABLE | DISABLE - | VALIDATE | NOVALIDATE - | exceptionsClause - ; - -notDeferrable - : NOT? DEFERRABLE - ; - -initiallyClause - : INITIALLY (IMMEDIATE | DEFERRED) + : (NOT? DEFERRABLE (INITIALLY (DEFERRED | IMMEDIATE))? | INITIALLY (DEFERRED | IMMEDIATE) (NOT? DEFERRABLE)?)? (RELY | NORELY)? usingIndexClause? (ENABLE | DISABLE)? (VALIDATE | NOVALIDATE)? exceptionsClause? ; exceptionsClause @@ -353,13 +421,37 @@ tableAlias alterDefinitionClause : (alterTableProperties | columnClauses + | moveTableClause | constraintClauses | alterTablePartitioning ((DEFERRED| IMMEDIATE) INVALIDATION)? | alterExternalTable)? ; alterTableProperties - : renameTableSpecification | REKEY encryptionSpecification | supplementalTableLogging + : ((physicalAttributesClause + | loggingClause + | tableCompression + | inmemoryTableClause + | ilmClause + | supplementalTableLogging + | allocateExtentClause + | deallocateUnusedClause + | (CACHE | NOCACHE) + | upgradeTableClause + | recordsPerBlockClause + | parallelClause + | rowMovementClause + | logicalReplicationClause + | flashbackArchiveClause)+ | renameTableSpecification)? alterIotClauses? alterXMLSchemaClause? + | shrinkClause + | READ ONLY + | READ WRITE + | REKEY encryptionSpecification + | DEFAULT COLLATION collationName + | NO? ROW ARCHIVAL + | ADD attributeClusteringClause + | MODIFY CLUSTERING clusteringWhen? zonemapClause? + | DROP CLUSTERING ; renameTableSpecification @@ -376,7 +468,7 @@ dropSynonym ; columnClauses - : operateColumnClause+ | renameColumnClause + : operateColumnClause+ | renameColumnClause | modifyCollectionRetrieval ; operateColumnClause @@ -447,16 +539,25 @@ renameColumnClause : RENAME COLUMN columnName TO columnName ; +modifyCollectionRetrieval + : MODIFY NESTED TABLE tableName RETURN AS (LOCATOR | VALUE) + ; + +moveTableClause + : MOVE filterCondition? ONLINE? segmentAttributesClause? tableCompression? indexOrgTableClause? ((lobStorageClause | varrayColProperties)+)? parallelClause? allowDisallowClustering? + ( UPDATE INDEXES (LP_ indexName (segmentAttributesClause | updateIndexPartition) RP_ (COMMA_ indexName (segmentAttributesClause | updateIndexPartition))*)?)? + ; + constraintClauses : addConstraintSpecification | modifyConstraintClause | renameConstraintClause | dropConstraintClause+ ; addConstraintSpecification - : ADD (outOfLineConstraint+ | outOfLineRefConstraint) + : ADD (LP_? outOfLineConstraint (COMMA_ outOfLineConstraint)* RP_? | outOfLineRefConstraint) ; modifyConstraintClause - : MODIFY constraintOption constraintState+ CASCADE? + : MODIFY constraintOption constraintState CASCADE? ; constraintWithName @@ -591,11 +692,11 @@ memOptimizeWriteClause ; enableDisableClauses - : (enableDisableClause | enableDisableOthers)? + : (enableDisableClause | enableDisableOthers)+ ; enableDisableClause - : (ENABLE | DISABLE) (VALIDATE |NO VALIDATE)? ((UNIQUE columnName (COMMA_ columnName)*) | PRIMARY KEY | constraintWithName) usingIndexClause? exceptionsClause? CASCADE? ((KEEP | DROP) INDEX)? + : (ENABLE | DISABLE) (VALIDATE | NOVALIDATE)? ((UNIQUE columnName (COMMA_ columnName)*) | PRIMARY KEY | constraintWithName) usingIndexClause? exceptionsClause? CASCADE? ((KEEP | DROP) INDEX)? ; enableDisableOthers @@ -615,7 +716,7 @@ rebuildClause ; parallelClause - : NOPARALLEL | PARALLEL NUMBER_? + : NOPARALLEL | PARALLEL (INTEGER_ | LP_ DEGREE INTEGER_ RP_)? ; usableSpecification @@ -647,9 +748,7 @@ commitClause ; physicalProperties - : deferredSegmentCreation? segmentAttributesClause? tableCompression? inmemoryTableClause? ilmClause? - | deferredSegmentCreation? (organizationClause?|externalPartitionClause?) - | clusterClause + : (deferredSegmentCreation? segmentAttributesClause tableCompression? inmemoryTableClause? ilmClause? | deferredSegmentCreation? (organizationClause | externalPartitionClause) | clusterClause) ; deferredSegmentCreation @@ -854,7 +953,7 @@ rangePartitions ; rangeValuesClause - : VALUES LESS THAN LP_? (numberLiterals | MAXVALUE) (COMMA_ (numberLiterals | MAXVALUE))* RP_? + : VALUES LESS THAN LP_? (literals | MAXVALUE | toDateFunction) (COMMA_ (literals | MAXVALUE | toDateFunction))* RP_? ; tablePartitionDescription @@ -881,10 +980,8 @@ varrayColProperties ; nestedTableColProperties - : NESTED TABLE - (nestedItem | COLUMN_VALUE) substitutableColumnClause? (LOCAL | GLOBAL)? STORE AS storageTable - LP_ (LP_ objectProperties RP_ | physicalProperties | columnProperties) RP_ - (RETURN AS? (LOCATOR | VALUE))? + : NESTED TABLE (nestedItem | COLUMN_VALUE) substitutableColumnClause? (LOCAL | GLOBAL)? STORE AS storageTable + ( LP_ (LP_ objectProperties RP_ | physicalProperties | columnProperties)+ RP_)? (RETURN AS? (LOCATOR | VALUE))? ; lobStorageClause @@ -1134,6 +1231,10 @@ rowMovementClause : (ENABLE | DISABLE) ROW MOVEMENT ; +logicalReplicationClause + : (ENABLE | DISABLE) LOGICAL REPLICATION + ; + flashbackArchiveClause : FLASHBACK ARCHIVE flashbackArchiveName? | NO FLASHBACK ARCHIVE ; @@ -1188,11 +1289,26 @@ alterSynonym ; alterTablePartitioning - : modifyTablePartition + : modifyTableDefaultAttrs + | setSubpartitionTemplate + | modifyTablePartition + | modifyTableSubpartition | moveTablePartition + | moveTableSubPartition | addTablePartition | coalesceTablePartition | dropTablePartition + | renamePartitionSubpart + | alterIntervalPartitioning + ; + +modifyTableDefaultAttrs + : MODIFY DEFAULT ATTRIBUTES (FOR partitionExtendedName)? (DEFAULT DIRECTORY directoryName)? deferredSegmentCreation? readOnlyClause? indexingClause? segmentAttributesClause? alterOverflowClause? + ( ((LOB LP_ lobItem RP_ | VARRAY varrayType) LP_ lobParameters RP_)+)? + ; + +setSubpartitionTemplate + : SET SUBPARTITION TEMPLATE (LP_ (rangeSubpartitionDesc (COMMA_ rangeSubpartitionDesc)* | listSubpartitionDesc (COMMA_ listSubpartitionDesc)* | individualHashSubparts (COMMA_ individualHashSubparts)*)? RP_ | hashSubpartitionQuantity) ; modifyTablePartition @@ -1220,6 +1336,16 @@ modifyListPartition | coalesceTableSubpartition | REBUILD? UNUSABLE LOCAL INDEXES | readOnlyClause | indexingClause) ; +modifyTableSubpartition + : MODIFY subpartitionExtendedName (allocateExtentClause + | deallocateUnusedClause | shrinkClause | ((LOB lobItem | VARRAY varrayType) LP_ modifylobParameters RP_)+ | REBUILD? UNUSABLE LOCAL INDEXES + | (ADD | DROP) VALUES LP_ listValues RP_ | readOnlyClause | indexingClause) + ; + +subpartitionExtendedName + : SUBPARTITION (subpartitionName | FOR LP_ subpartitionKeyValue (COMMA_ subpartitionKeyValue)* RP_) + ; + partitionExtendedName : PARTITION partitionName | PARTITION FOR LP_ partitionKeyValue (COMMA_ partitionKeyValue)* RP_ @@ -1243,7 +1369,7 @@ addListSubpartition ; coalesceTableSubpartition - : COALESCE SUBPARTITION subpartitionName updateIndexClauses? parallelClause? allowDisallowClustering? + : COALESCE SUBPARTITION subpartitionName? updateIndexClauses? parallelClause? allowDisallowClustering? ; allowDisallowClustering @@ -1256,7 +1382,7 @@ alterMappingTableClauses alterView : ALTER VIEW viewName ( - | ADD outOfLineConstraint + | ADD LP_? outOfLineConstraint RP_? | MODIFY CONSTRAINT constraintName (RELY | NORELY) | DROP (CONSTRAINT constraintName | PRIMARY KEY | UNIQUE columnNames) | COMPILE @@ -1277,6 +1403,14 @@ partitionSpec : PARTITION partitionName? tablePartitionDescription? ; +upgradeTableClause + : UPGRADE (NOT? INCLUDING DATA)? columnProperties? + ; + +recordsPerBlockClause + : (MINIMIZE | NOMINIMIZE) RECORDS_PER_BLOCK + ; + partitionAttributes : (physicalAttributesClause | loggingClause | allocateExtentClause | deallocateUnusedClause | shrinkClause)* (OVERFLOW (physicalAttributesClause | loggingClause | allocateExtentClause | deallocateUnusedClause)*)? @@ -1291,6 +1425,10 @@ moveTablePartition : MOVE partitionExtendedName (MAPPING TABLE)? tablePartitionDescription? filterCondition? updateAllIndexesClause? parallelClause? allowDisallowClustering? ONLINE? ; +moveTableSubPartition + : MOVE subpartitionExtendedName indexingClause? partitioningStorageClause? updateIndexClauses? filterCondition? parallelClause? allowDisallowClustering? ONLINE? + ; + filterCondition : INCLUDING ROWS whereClause ; @@ -1304,12 +1442,10 @@ coalesceTablePartition ; addTablePartition - : ADD ((PARTITION partitionName? addRangePartitionClause (COMMA_ PARTITION partitionName? addRangePartitionClause)*) - | (PARTITION partitionName? addListPartitionClause (COMMA_ PARTITION partitionName? addListPartitionClause)*) - | (PARTITION partitionName? addSystemPartitionClause (COMMA_ PARTITION partitionName? addSystemPartitionClause)*) - (BEFORE (partitionName | NUMBER_))? - | (PARTITION partitionName? addHashPartitionClause) - ) dependentTablesClause? + : ADD (PARTITION partitionName? addRangePartitionClause (COMMA_ PARTITION partitionName? addRangePartitionClause)* + | PARTITION partitionName? addListPartitionClause (COMMA_ PARTITION partitionName? addListPartitionClause)* + | PARTITION partitionName? addSystemPartitionClause (COMMA_ PARTITION partitionName? addSystemPartitionClause)* (BEFORE? (partitionName | NUMBER_)?) + | PARTITION partitionName? addHashPartitionClause) dependentTablesClause? ; addRangePartitionClause @@ -1340,6 +1476,14 @@ dropTablePartition : DROP partitionExtendedNames (updateIndexClauses parallelClause?)? ; +renamePartitionSubpart + : RENAME (partitionExtendedName | subpartitionExtendedName) TO newName + ; + +alterIntervalPartitioning + : SET INTERVAL LP_ expr? RP_ | SET STORE IN LP_ tablespaceName (COMMA_ tablespaceName)* RP_ + ; + partitionExtendedNames : (PARTITION | PARTITIONS) (partitionName | partitionForClauses) (COMMA_ (partitionName | partitionForClauses))* ; @@ -2049,7 +2193,7 @@ scopeClause analyze : (ANALYZE ((TABLE tableName| INDEX indexName) partitionExtensionClause? | CLUSTER clusterName)) - (validationClauses | LIST CHAINED ROWS intoClause? | DELETE SYSTEM? STATISTICS) + (validationClauses | LIST CHAINED ROWS intoTableClause? | DELETE SYSTEM? STATISTICS) ; partitionExtensionClause @@ -2059,10 +2203,10 @@ partitionExtensionClause validationClauses : VALIDATE REF UPDATE (SET DANGLING TO NULL)? - | VALIDATE STRUCTURE (CASCADE (FAST | COMPLETE (OFFLINE | ONLINE) intoClause?))? + | VALIDATE STRUCTURE (CASCADE (FAST | COMPLETE? (OFFLINE | ONLINE) intoTableClause?)?)? ; -intoClause +intoTableClause : INTO tableName ; @@ -2110,6 +2254,53 @@ disassociateStatistics ; audit + : auditTraditional | auditUnified + ; + +auditTraditional + : AUDIT (auditOperationClause (auditingByClause | IN SESSION CURRENT)? | auditSchemaObjectClause | NETWORK | DIRECT_PATH LOAD auditingByClause?) + ( BY (SESSION | ACCESS))? (WHENEVER NOT? SUCCESSFUL)? (CONTAINER EQ_ (CURRENT | ALL))? + ; + +auditingByClause + : BY username (COMMA_ username)* + ; + +auditOperationClause + : (sqlStatementShortcut | ALL | ALL STATEMENTS) (COMMA_ sqlStatementShortcut | ALL | ALL STATEMENTS)* + | (systemPrivilege | ALL PRIVILEGES) (COMMA_ systemPrivilege | ALL PRIVILEGES) + ; + +sqlStatementShortcut + : ALTER SYSTEM | CLUSTER | CREATE CLUSTER | ALTER CLUSTER | DROP CLUSTER | TRUNCATE CLUSTER | CONTEXT | CREATE CONTEXT | DROP CONTEXT + | DATABASE LINK | CREATE DATABASE LINK | ALTER DATABASE LINK | DROP DATABASE LINK | DIMENSION | CREATE DIMENSION | ALTER DIMENSION | DROP DIMENSION + | DIRECTORY | CREATE DIRECTORY | DROP DIRECTORY | INDEX | CREATE INDEX | ALTER INDEX | ANALYZE INDEX | DROP INDEX + | MATERIALIZED VIEW | CREATE MATERIALIZED VIEW | ALTER MATERIALIZED VIEW | DROP MATERIALIZED VIEW | NOT EXISTS | OUTLINE | CREATE OUTLINE | ALTER OUTLINE | DROP OUTLINE + | PLUGGABLE DATABASE | CREATE PLUGGABLE DATABASE | ALTER PLUGGABLE DATABASE | DROP PLUGGABLE DATABASE + | PROCEDURE | CREATE FUNCTION | CREATE LIBRARY | CREATE PACKAGE | CREATE PACKAGE BODY | CREATE PROCEDURE | DROP FUNCTION | DROP LIBRARY | DROP PACKAGE | DROP PROCEDURE + | PROFILE | CREATE PROFILE | ALTER PROFILE | DROP PROFILE | PUBLIC DATABASE LINK | CREATE PUBLIC DATABASE LINK | ALTER PUBLIC DATABASE LINK | DROP PUBLIC DATABASE LINK + | PUBLIC SYNONYM | CREATE PUBLIC SYNONYM | DROP PUBLIC SYNONYM | ROLE | CREATE ROLE | ALTER ROLE | DROP ROLE | SET ROLE + | ROLLBACK SEGMENT | CREATE ROLLBACK SEGMENT | ALTER ROLLBACK SEGMENT | DROP ROLLBACK SEGMENT | SEQUENCE | CREATE SEQUENCE | DROP SEQUENCE | SESSION | SYNONYM | CREATE SYNONYM | DROP SYNONYM + | SYSTEM AUDIT | SYSTEM GRANT | TABLE | CREATE TABLE | DROP TABLE | TRUNCATE TABLE | TABLESPACE | CREATE TABLESPACE | ALTER TABLESPACE | DROP TABLESPACE + | TRIGGER | CREATE TRIGGER | ALTER TRIGGER | DROP TRIGGER | ALTER TABLE | TYPE | CREATE TYPE | CREATE TYPE BODY | ALTER TYPE | DROP TYPE | DROP TYPE BODY + | USER | CREATE USER | ALTER USER | DROP USER | VIEW | CREATE VIEW | DROP VIEW + | ALTER SEQUENCE | COMMENT TABLE | DELETE TABLE | EXECUTE DIRECTORY | EXECUTE PROCEDURE | GRANT DIRECTORY | GRANT PROCEDURE | GRANT SEQUENCE | GRANT TABLE | GRANT TYPE + | INSERT TABLE | LOCK TABLE | READ DIRECTORY | SELECT SEQUENCE | SELECT TABLE | UPDATE TABLE | WRITE DIRECTORY + ; + +auditSchemaObjectClause + : (sqlOperation (COMMA_ sqlOperation)* | ALL) auditingOnClause + ; + +auditingOnClause + : ON (DEFAULT | objectName | DIRECTORY directoryName | MINING MODEL modelName | SQL TRANSLATION PROFILE profileName) + ; + +sqlOperation + : ALTER | AUDIT | COMMENT | DELETE | FLASHBACK | GRANT | INDEX | INSERT | LOCK | RENAME | SELECT | UPDATE | EXECUTE | READ + ; + +auditUnified : AUDIT (auditPolicyClause | contextClause) ; @@ -2438,29 +2629,20 @@ alterAttributeDimension ; createSequence - : CREATE SEQUENCE (schemaName DOT_)? sequenceName (SHARING EQ_ (METADATA | DATA | NONE))? createSequenceClause+ + : CREATE SEQUENCE (schemaName DOT_)? sequenceName (SHARING EQ_ (METADATA | DATA | NONE))? createSequenceClause* ; createSequenceClause : (INCREMENT BY | START WITH) INTEGER_ - | MAXVALUE INTEGER_ - | NOMAXVALUE - | MINVALUE INTEGER_ - | NOMINVALUE - | CYCLE - | NOCYCLE - | CACHE INTEGER_ - | NOCACHE - | ORDER - | NOORDER - | KEEP - | NOKEEP - | SCALE (EXTEND | NOEXTEND) - | NOSCALE - | SHARD (EXTEND | NOEXTEND) - | NOSHARD - | SESSION - | GLOBAL + | (MAXVALUE INTEGER_ | NOMAXVALUE) + | (MINVALUE INTEGER_ | NOMINVALUE) + | (CYCLE | NOCYCLE) + | (CACHE INTEGER_ | NOCACHE) + | (ORDER | NOORDER) + | (KEEP | NOKEEP) + | (SCALE (EXTEND | NOEXTEND) | NOSCALE) + | (SHARD (EXTEND | NOEXTEND) | NOSHARD) + | (SESSION | GLOBAL) ; alterSequence @@ -2905,10 +3087,7 @@ alterOperator ; addBindingClause - : ADD BINDING LP_ parameterType (COMMA_ parameterType)* RP_ - RETURN LP_ returnType RP_ implementationClause usingFunctionClause - | ADD BINDING LP_ parameterType (COMMA_ parameterType)* RP_ - RETURN LP_ returnType RP_ usingFunctionClause + : ADD BINDING LP_ parameterType (COMMA_ parameterType)* RP_ RETURN (LP_ returnType RP_ | NUMBER) implementationClause? usingFunctionClause ; implementationClause @@ -3316,12 +3495,16 @@ modifylobParameters | deallocateUnusedClause ; - alterIotClauses +alterIotClauses : indexOrgTableClause | alterOverflowClause | COALESCE ; +alterXMLSchemaClause + : ALLOW (ANYSCHEMA | NONSCHEMA) | DISALLOW NONSCHEMA + ; + alterOverflowClause : addOverflowClause | overflowClause ; @@ -3331,7 +3514,7 @@ overflowClause ; addOverflowClause - : ADD OVERFLOW segmentAttributesClause? LP_ PARTITION segmentAttributesClause? (COMMA_ PARTITION segmentAttributesClause?)* RP_ + : ADD OVERFLOW segmentAttributesClause? (LP_ PARTITION segmentAttributesClause? (COMMA_ PARTITION segmentAttributesClause?)* RP_)? ; scopedTableRefConstraint @@ -3532,7 +3715,7 @@ pdbUnplugEncrypt pdbSettingsClauses : pdbName? pdbSettingClause - | CONTAINERS containersClause + | CONTAINERS (DEFAULT TARGET EQ_ ((LP_ containerName RP_) | NONE) | HOST EQ_ hostName | PORT EQ_ NUMBER_) ; pdbSettingClause @@ -3551,12 +3734,6 @@ pdbSettingClause | SET CONTAINER_MAP EQ_ mapObject ; -containersClause - : DEFAULT TARGET EQ_ ((LP_ containerName RP_) | NONE) - | HOST EQ_ hostName - | PORT EQ_ NUMBER_ - ; - pdbStorageClause : STORAGE ((LP_ storageMaxSizeClauses+ RP_) | UNLIMITED) ; diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DMLStatement.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DMLStatement.g4 index 1bf16972e2461..c511e5458f6d1 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DMLStatement.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/DMLStatement.g4 @@ -136,7 +136,7 @@ select ; selectSubquery - : (queryBlock | selectCombineClause | parenthesisSelectSubquery) orderByClause? rowLimitingClause + : (queryBlock | selectCombineClause | parenthesisSelectSubquery) pivotClause? orderByClause? rowLimitingClause ; selectCombineClause @@ -443,15 +443,7 @@ fromClauseOption | LP_ joinClause RP_ | selectTableReference | inlineAnalyticView - | xmlTable - ; - -xmlTable - : xmlTableFunction xmlTableFunctionAlias? - ; - -xmlTableFunctionAlias - : alias + | (regularFunction | xmlTableFunction) alias? ; selectTableReference @@ -486,8 +478,7 @@ queryTableExprSampleClause : (queryTableExprTableClause | queryTableExprViewClause | hierarchyName - | queryTableExprAnalyticClause - | (owner DOT_)? inlineExternalTable) sampleClause? + | queryTableExprAnalyticClause) sampleClause? ; queryTableExprTableClause @@ -502,10 +493,6 @@ queryTableExprAnalyticClause : analyticViewName (HIERARCHIES LP_ ((attrDim DOT_)? hierarchyName (COMMA_ (attrDim DOT_)? hierarchyName)*)? RP_)? ; -inlineExternalTable - : EXTERNAL LP_ LP_ columnDefinition (COMMA_ columnDefinition)* RP_ inlineExternalTableProperties RP_ - ; - inlineExternalTableProperties : (TYPE accessDriverType)? externalTableDataProperties (REJECT LIMIT (INTEGER_ | UNLIMITED))? ; @@ -526,7 +513,7 @@ modifyExternalTableProperties pivotClause : PIVOT XML? - LP_ aggregationFunctionName LP_ expr RP_ (AS? alias)? (COMMA_ aggregationFunctionName LP_ expr RP_ (AS? alias)?)* pivotForClause pivotInClause RP_ + LP_ aggregationFunction (AS? alias)? (COMMA_ aggregationFunction (AS? alias)?)* pivotForClause pivotInClause RP_ ; pivotForClause @@ -534,17 +521,25 @@ pivotForClause ; pivotInClause - : IN LP_ ((expr | exprList) (AS? alias)? (COMMA_ (expr | exprList) (AS? alias)?)* + : IN LP_ (pivotInClauseExpr (COMMA_ pivotInClauseExpr)* | selectSubquery | ANY (COMMA_ ANY)*) RP_ ; +pivotInClauseExpr + : (expr | exprList) (AS? alias)? + ; + unpivotClause : UNPIVOT ((INCLUDE | EXCLUDE) NULLS)? LP_ (columnName | columnNames) pivotForClause unpivotInClause RP_ ; unpivotInClause - : IN LP_ (columnName | columnNames) (AS (literals | LP_ literals (COMMA_ literals)* RP_))? (COMMA_ (columnName | columnNames) (AS (literals | LP_ literals (COMMA_ literals)* RP_))?)* RP_ + : IN LP_ unpivotInClauseExpr (COMMA_ unpivotInClauseExpr)* RP_ + ; + +unpivotInClauseExpr + : (columnName | columnNames) (AS (literals | LP_ literals (COMMA_ literals)* RP_))? ; sampleClause @@ -719,7 +714,7 @@ hint ; intoClause - : INTO (tableName | viewName) alias? + : INTO (tableName | viewName | subquery) alias? ; usingClause diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/Literals.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/Literals.g4 index ea52447e7c79f..9027672dcd381 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/Literals.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/Literals.g4 @@ -28,8 +28,7 @@ IDENTIFIER_ ; STRING_ - : SINGLE_QUOTED_TEXT - | DOUBLE_QUOTED_TEXT + : (N | U)? SINGLE_QUOTED_TEXT ; SINGLE_QUOTED_TEXT diff --git a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/OracleKeyword.g4 b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/OracleKeyword.g4 index 22f1943fb9265..f9d6f4ca486f8 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/OracleKeyword.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/imports/oracle/OracleKeyword.g4 @@ -19,6 +19,10 @@ lexer grammar OracleKeyword; import Alphabet; +BEQUEATH + : B E Q U E A T H + ; + BINARY : B I N A R Y ; @@ -59,6 +63,10 @@ CHECK : C H E C K ; +CONVERSION + : C O N V E R S I O N + ; + GENERATED : G E N E R A T E D ; @@ -219,6 +227,10 @@ DIRECTORY : D I R E C T O R Y ; +DIRECT_PATH + : D I R E C T UL_ P A T H + ; + CREDENTIALS : C R E D E N T I A L S ; diff --git a/parser/sql/dialect/oracle/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/OracleStatement.g4 b/parser/sql/dialect/oracle/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/OracleStatement.g4 index 1367144ca15a9..92800a99bca57 100644 --- a/parser/sql/dialect/oracle/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/OracleStatement.g4 +++ b/parser/sql/dialect/oracle/src/main/antlr4/org/apache/shardingsphere/sql/parser/autogen/OracleStatement.g4 @@ -17,13 +17,14 @@ grammar OracleStatement; -import DMLStatement, TCLStatement, DCLStatement, DALStatement, PLSQL; +import DMLStatement, DDLStatement, TCLStatement, DCLStatement, DALStatement, PLSQL; execute : (select | insert | update | delete + | createView | createTable | alterTable | dropTable diff --git a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/OracleStatementVisitor.java b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/OracleStatementVisitor.java index 4859ba3c92cba..4854c9107f0c5 100644 --- a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/OracleStatementVisitor.java +++ b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/OracleStatementVisitor.java @@ -25,7 +25,6 @@ import org.apache.shardingsphere.infra.database.core.metadata.database.enums.NullsOrderType; import org.apache.shardingsphere.sql.parser.api.ASTNode; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementBaseVisitor; -import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AggregationFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AnalyticFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.BitExprContext; @@ -33,16 +32,21 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.BooleanLiteralsContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.BooleanPrimaryContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CastFunctionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CaseExpressionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CaseWhenContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CharFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnNamesContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ConstraintNameContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CursorFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DataTypeContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DataTypeLengthContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DataTypeNameContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DateTimeLiteralsContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DatetimeExprContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ExprContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ExtractFunctionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ExprListContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.FeatureFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.FirstOrLastValueFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.FormatFunctionContext; @@ -67,18 +71,23 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.PrivateExprOfDbContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.RegularFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SchemaNameContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SetFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SimpleExprContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SpecialFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.StringLiteralsContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SynonymNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TableNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TableNamesContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ToDateFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TrimFunctionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TranslateFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TypeNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UnreservedWordContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ViewNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlAggFunctionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlCdataFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlColattvalFunctionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlElementFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlExistsFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlForestFunctionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlFunctionContext; @@ -105,6 +114,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CaseWhenExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.DatetimeExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; @@ -114,6 +124,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MultisetExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlElementFunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlNameSpaceStringAsIdentifierSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlNameSpacesClauseSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlPiFunctionSegment; @@ -148,6 +159,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.sql.parser.sql.common.value.keyword.KeywordValue; import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.BooleanLiteralValue; +import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.DateTimeLiteralValue; import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.NullLiteralValue; import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.NumberLiteralValue; import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.OtherLiteralValue; @@ -204,6 +216,22 @@ public final ASTNode visitLiterals(final LiteralsContext ctx) { throw new IllegalStateException("Literals must have string, number, dateTime, hex, bit, interval, boolean or null."); } + @Override + public ASTNode visitDateTimeLiterals(final DateTimeLiteralsContext ctx) { + if (null != ctx.LBE_()) { + return new DateTimeLiteralValue(ctx.identifier().getText(), ((StringLiteralValue) visit(ctx.stringLiterals())).getValue(), true); + } + String dateTimeType; + if (null != ctx.DATE()) { + dateTimeType = ctx.DATE().getText(); + } else if (null != ctx.TIME()) { + dateTimeType = ctx.TIME().getText(); + } else { + dateTimeType = ctx.TIMESTAMP().getText(); + } + return new DateTimeLiteralValue(dateTimeType, ((StringLiteralValue) visit(ctx.stringLiterals())).getValue(), false); + } + @Override public final ASTNode visitStringLiterals(final StringLiteralsContext ctx) { return new StringLiteralValue(ctx.getText()); @@ -239,7 +267,7 @@ public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); - return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); + return null == unreservedWord ? new IdentifierValue(ctx.getText()) : visit(unreservedWord); } @Override @@ -368,19 +396,19 @@ private ASTNode createMultisetExpression(final ExprContext ctx) { ExpressionSegment left = (ColumnSegment) visitColumnName(ctx.multisetExpr().columnName(0)); ExpressionSegment right = (ColumnSegment) visitColumnName(ctx.multisetExpr().columnName(1)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); - String keyWord = ctx.multisetExpr().DISTINCT() != null ? "DISTINCT" : "ALL"; + String keyWord = null == ctx.multisetExpr().DISTINCT() ? "ALL" : "DISTINCT"; return new MultisetExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, ctx.multisetExpr().multisetOperator().getText(), keyWord, text); } private ASTNode createDatetimeExpression(final ExprContext ctx, final DatetimeExprContext datetimeExpr) { ExpressionSegment left = (ExpressionSegment) visit(ctx.expr(0)); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); - if (null != datetimeExpr.expr()) { - ExpressionSegment right = new ExpressionProjectionSegment(datetimeExpr.getStart().getStartIndex(), - datetimeExpr.getStop().getStopIndex(), datetimeExpr.getText(), (ExpressionSegment) visit(datetimeExpr.expr())); - return new DatetimeExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, text); + if (null == datetimeExpr.expr()) { + return new DatetimeExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, text); } - return new DatetimeExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, text); + ExpressionSegment right = new ExpressionProjectionSegment(datetimeExpr.getStart().getStartIndex(), + datetimeExpr.getStop().getStopIndex(), datetimeExpr.getText(), (ExpressionSegment) visit(datetimeExpr.expr())); + return new DatetimeExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, text); } private ASTNode createBinaryOperationExpression(final ExprContext ctx, final String operator) { @@ -392,46 +420,44 @@ private ASTNode createBinaryOperationExpression(final ExprContext ctx, final Str @Override public final ASTNode visitBooleanPrimary(final BooleanPrimaryContext ctx) { - if (null == ctx.IS()) { - return null == ctx.comparisonOperator() && null == ctx.SAFE_EQ_() ? visit(ctx.predicate()) : createCompareSegment(ctx); - } - String rightText = ""; - if (null != ctx.NOT()) { - rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(ctx.NOT().getSymbol().getStartIndex(), ctx.NOT().getSymbol().getStopIndex()))).concat(" "); - } - Token operatorToken = null; - if (null != ctx.NULL()) { - operatorToken = ctx.NULL().getSymbol(); - } - if (null != ctx.TRUE()) { - operatorToken = ctx.TRUE().getSymbol(); + if (null != ctx.IS()) { + String rightText = ""; + if (null != ctx.NOT()) { + rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(ctx.NOT().getSymbol().getStartIndex(), ctx.NOT().getSymbol().getStopIndex()))).concat(" "); + } + Token operatorToken = null; + if (null != ctx.NULL()) { + operatorToken = ctx.NULL().getSymbol(); + } + if (null != ctx.TRUE()) { + operatorToken = ctx.TRUE().getSymbol(); + } + if (null != ctx.FALSE()) { + operatorToken = ctx.FALSE().getSymbol(); + } + int startIndex = null == operatorToken ? ctx.IS().getSymbol().getStopIndex() + 2 : operatorToken.getStartIndex(); + rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(startIndex, ctx.stop.getStopIndex()))); + ExpressionSegment right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); + String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); + ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary()); + String operator = "IS"; + return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } - if (null != ctx.FALSE()) { - operatorToken = ctx.FALSE().getSymbol(); + if (null != ctx.comparisonOperator() || null != ctx.SAFE_EQ_()) { + return createCompareSegment(ctx); } - int startIndex = null == operatorToken ? ctx.IS().getSymbol().getStopIndex() + 2 : operatorToken.getStartIndex(); - rightText = rightText.concat(ctx.start.getInputStream().getText(new Interval(startIndex, ctx.stop.getStopIndex()))); - ExpressionSegment right = new LiteralExpressionSegment(ctx.IS().getSymbol().getStopIndex() + 2, ctx.stop.getStopIndex(), rightText); - String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); - ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary()); - String operator = "IS"; - return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); + return visit(ctx.predicate()); } private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) { ExpressionSegment left = (ExpressionSegment) visit(ctx.booleanPrimary()); ExpressionSegment right; - String operator; - if (null != ctx.ALL()) { - operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText() + " ALL"; - } else { - operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText(); - } if (null != ctx.predicate()) { right = (ExpressionSegment) visit(ctx.predicate()); } else { right = new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (OracleSelectStatement) visit(ctx.subquery()))); } + String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -472,7 +498,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final for (SimpleExprContext each : ctx.simpleExpr()) { right.getItems().add((ExpressionSegment) visit(each)); } - String operator = null != ctx.NOT() ? "NOT LIKE" : "LIKE"; + String operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE"; String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -549,9 +575,52 @@ public final ASTNode visitSimpleExpr(final SimpleExprContext ctx) { if (null != ctx.privateExprOfDb()) { return visit(ctx.privateExprOfDb()); } + if (null != ctx.LP_()) { + if (1 == ctx.expr().size()) { + return visit(ctx.expr(0)); + } else { + ListExpression result = new ListExpression(ctx.LP_().getSymbol().getStartIndex(), ctx.RP_().getSymbol().getStopIndex()); + for (ExprContext each : ctx.expr()) { + result.getItems().add((ExpressionSegment) visit(each)); + } + return result; + } + } + return visitRemainSimpleExpr(ctx, startIndex, stopIndex); + } + + private ASTNode visitRemainSimpleExpr(final SimpleExprContext ctx, final int startIndex, final int stopIndex) { + if (null != ctx.OR_()) { + ExpressionSegment left = (ExpressionSegment) visit(ctx.simpleExpr(0)); + ExpressionSegment right = (ExpressionSegment) visit(ctx.simpleExpr(1)); + String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); + return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, ctx.OR_().getText(), text); + } + if (null != ctx.caseExpression()) { + return visit(ctx.caseExpression()); + } + if (null != ctx.BINARY()) { + return visit(ctx.simpleExpr(0)); + } + for (SimpleExprContext each : ctx.simpleExpr()) { + visit(each); + } return new CommonExpressionSegment(startIndex, stopIndex, ctx.getText()); } + @Override + public ASTNode visitCaseExpression(final CaseExpressionContext ctx) { + ExpressionSegment caseExpr = null == ctx.simpleExpr() ? null : (ExpressionSegment) visit(ctx.simpleExpr()); + Collection whenExprs = new ArrayList<>(ctx.caseWhen().size()); + Collection thenExprs = new ArrayList<>(ctx.caseWhen().size()); + for (CaseWhenContext each : ctx.caseWhen()) { + whenExprs.add((ExpressionSegment) visit(each.expr(0))); + thenExprs.add((ExpressionSegment) visit(each.expr(1))); + } + ExpressionSegment elseExpr = null == ctx.caseElse() ? null : (ExpressionSegment) visit(ctx.caseElse().expr()); + return new CaseWhenExpression(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), caseExpr, whenExprs, thenExprs, elseExpr); + } + @Override public ASTNode visitPrivateExprOfDb(final PrivateExprOfDbContext ctx) { if (null != ctx.intervalExpression()) { @@ -688,11 +757,34 @@ public ASTNode visitXmlFunction(final XmlFunctionContext ctx) { if (null != ctx.xmlTableFunction()) { return visit(ctx.xmlTableFunction()); } + if (null != ctx.xmlElementFunction()) { + return visit(ctx.xmlElementFunction()); + } FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.specifiedFunctionName.getText(), getOriginalText(ctx)); result.getParameters().addAll(getExpressions(ctx.exprList())); return result; } + @Override + public ASTNode visitXmlElementFunction(final XmlElementFunctionContext ctx) { + XmlElementFunctionSegment result = + new XmlElementFunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.XMLELEMENT().getText(), (IdentifierValue) visit(ctx.identifier()), getOriginalText(ctx)); + Collection expressionSegments = ctx.exprWithAlias().stream().map(each -> (ExpressionSegment) visit(each.expr())).collect(Collectors.toList()); + result.getParameters().addAll(expressionSegments); + if (null != ctx.xmlAttributes()) { + Collection xmlAttributes = ctx.xmlAttributes().exprWithAlias().stream().map(each -> (ExpressionSegment) visit(each.expr())).collect(Collectors.toList()); + result.getXmlAttributes().addAll(xmlAttributes); + } + return result; + } + + @Override + public ASTNode visitXmlCdataFunction(final XmlCdataFunctionContext ctx) { + FunctionSegment result = new FunctionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.XMLCDATA().getText(), getOriginalText(ctx)); + result.getParameters().add((ExpressionSegment) visit(ctx.stringLiterals())); + return result; + } + @Override public ASTNode visitXmlAggFunction(final XmlAggFunctionContext ctx) { return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.XMLAGG().getText(), getOriginalText(ctx)); @@ -819,7 +911,7 @@ public ASTNode visitXmlIsSchemaValidFunction(final XmlIsSchemaValidFunctionConte return result; } - private Collection getExpressions(final OracleStatementParser.ExprListContext exprList) { + private Collection getExpressions(final ExprListContext exprList) { if (null == exprList) { return Collections.emptyList(); } @@ -874,20 +966,40 @@ public final ASTNode visitSpecialFunction(final SpecialFunctionContext ctx) { if (null != ctx.translateFunction()) { return visit(ctx.translateFunction()); } - throw new IllegalStateException("SpecialFunctionContext must have castFunction, charFunction, extractFunction, formatFunction, firstOrLastValueFunction, trimFunction or featureFunction."); + if (null != ctx.cursorFunction()) { + return visit(ctx.cursorFunction()); + } + if (null != ctx.toDateFunction()) { + return visit(ctx.toDateFunction()); + } + throw new IllegalStateException( + "SpecialFunctionContext must have castFunction, charFunction, extractFunction, formatFunction, firstOrLastValueFunction, trimFunction, toDateFunction or featureFunction."); } @Override - public final ASTNode visitTranslateFunction(final OracleStatementParser.TranslateFunctionContext ctx) { + public ASTNode visitCursorFunction(final CursorFunctionContext ctx) { + FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.CURSOR().toString(), ctx.getText()); + result.getParameters() + .add(new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (OracleSelectStatement) visit(ctx.subquery())))); + return result; + } + + @Override + public ASTNode visitToDateFunction(final ToDateFunctionContext ctx) { + return new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.TO_DATE().getText(), getOriginalText(ctx)); + } + + @Override + public final ASTNode visitTranslateFunction(final TranslateFunctionContext ctx) { FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.TRANSLATE().getText(), getOriginalText(ctx)); result.getParameters().add((ExpressionSegment) visit(ctx.expr())); - TerminalNode charSet = null != ctx.NCHAR_CS() ? ctx.NCHAR_CS() : ctx.CHAR_CS(); + TerminalNode charSet = null == ctx.NCHAR_CS() ? ctx.CHAR_CS() : ctx.NCHAR_CS(); result.getParameters().add(new LiteralExpressionSegment(charSet.getSymbol().getStartIndex(), charSet.getSymbol().getStopIndex(), charSet.getText())); return result; } @Override - public final ASTNode visitSetFunction(final OracleStatementParser.SetFunctionContext ctx) { + public final ASTNode visitSetFunction(final SetFunctionContext ctx) { FunctionSegment result = new FunctionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.SET().getText(), getOriginalText(ctx)); result.getParameters().add((ExpressionSegment) visit(ctx.expr())); return result; @@ -905,12 +1017,7 @@ public final ASTNode visitCastFunction(final CastFunctionContext ctx) { result.getParameters() .add(new SubqueryExpressionSegment(new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), (OracleSelectStatement) visit(ctx.subquery())))); } else { - ASTNode exprSegment = visit(ctx.expr()); - if (exprSegment instanceof ColumnSegment) { - result.getParameters().add((ColumnSegment) exprSegment); - } else if (exprSegment instanceof LiteralExpressionSegment) { - result.getParameters().add((LiteralExpressionSegment) exprSegment); - } + result.getParameters().add((ExpressionSegment) visit(ctx.expr())); } result.getParameters().add((DataTypeSegment) visit(ctx.dataType())); return result; @@ -1001,7 +1108,7 @@ public final ASTNode visitOrderByClause(final OrderByClauseContext ctx) { @Override public final ASTNode visitOrderByItem(final OrderByItemContext ctx) { - OrderDirection orderDirection = null != ctx.DESC() ? OrderDirection.DESC : OrderDirection.ASC; + OrderDirection orderDirection = null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; NullsOrderType nullsOrderType = generateNullsOrderType(ctx); if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); diff --git a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDDLStatementVisitor.java b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDDLStatementVisitor.java index bc140104aeecd..20c623fc534ab 100644 --- a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDDLStatementVisitor.java +++ b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDDLStatementVisitor.java @@ -19,7 +19,6 @@ import org.apache.shardingsphere.sql.parser.api.ASTNode; import org.apache.shardingsphere.sql.parser.api.visitor.statement.type.DDLStatementVisitor; -import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AddColumnSpecificationContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AddConstraintSpecificationContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AlterAnalyticViewContext; @@ -63,9 +62,12 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AnalyzeContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AssociateStatisticsContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AuditContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AuditTraditionalContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.AuditUnifiedContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnDefinitionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnOrVirtualDefinitionContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ColumnClausesContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CommentContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ConstraintClausesContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateClusterContext; @@ -94,8 +96,12 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateTablespaceContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateMaterializedViewContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateMaterializedViewLogContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateTypeContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.CreateViewContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DataTypeDefinitionContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DisassociateStatisticsContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DropClusterContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DropColumnClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DropColumnSpecificationContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DropConstraintClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.DropContextContext; @@ -138,10 +144,15 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.IndexNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.IndexTypeNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.InlineConstraintContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ModifyCollectionRetrievalContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ModifyColPropertiesContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ModifyColumnSpecificationContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ModifyConstraintClauseContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.NestedTableTypeSpecContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.NoAuditContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ObjectBaseTypeDefContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ObjectSubTypeDefContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ObjectTypeDefContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.OperateColumnClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.OutOfLineConstraintContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.OutOfLineRefConstraintContext; @@ -153,12 +164,14 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TableNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TruncateTableContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TypeNameContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.VarrayTypeSpecContext; import org.apache.shardingsphere.sql.parser.oracle.visitor.statement.OracleStatementVisitor; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.AlterDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.CreateDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.ColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.AddColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.DropColumnDefinitionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyCollectionRetrievalSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintSegment; @@ -175,6 +188,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; import org.apache.shardingsphere.sql.parser.sql.common.value.collection.CollectionValue; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleAlterAnalyticViewStatement; @@ -246,6 +260,7 @@ import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateTablespaceStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateMaterializedViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateMaterializedViewLogStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateVarrayTypeStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleDisassociateStatisticsStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleDropClusterStatement; @@ -299,6 +314,18 @@ */ public final class OracleDDLStatementVisitor extends OracleStatementVisitor implements DDLStatementVisitor { + @Override + public ASTNode visitCreateView(final CreateViewContext ctx) { + OracleCreateViewStatement result = new OracleCreateViewStatement(); + OracleDMLStatementVisitor visitor = new OracleDMLStatementVisitor(); + visitor.getParameterMarkerSegments().addAll(getParameterMarkerSegments()); + result.setView((SimpleTableSegment) visit(ctx.viewName())); + result.setSelect((SelectStatement) visitor.visit(ctx.select())); + result.setViewDefinition(getOriginalText(ctx.select())); + result.addParameterMarkerSegments(getParameterMarkerSegments()); + return result; + } + @SuppressWarnings("unchecked") @Override public ASTNode visitCreateTable(final CreateTableContext ctx) { @@ -318,12 +345,12 @@ public ASTNode visitCreateTable(final CreateTableContext ctx) { } @Override - public ASTNode visitCreateType(final OracleStatementParser.CreateTypeContext ctx) { + public ASTNode visitCreateType(final CreateTypeContext ctx) { boolean isReplace = null != ctx.REPLACE(); boolean isEditionable = null == ctx.NONEDITIONABLE(); TypeSegment typeSegment = (TypeSegment) visit(ctx.plsqlTypeSource().typeName()); if (null != ctx.plsqlTypeSource().objectSubTypeDef()) { - OracleStatementParser.ObjectSubTypeDefContext objectSubTypeDefContext = ctx.plsqlTypeSource().objectSubTypeDef(); + ObjectSubTypeDefContext objectSubTypeDefContext = ctx.plsqlTypeSource().objectSubTypeDef(); return new OracleCreateSubTypeStatement(isReplace, isEditionable, null == objectSubTypeDefContext.finalClause() || null == objectSubTypeDefContext.finalClause().NOT(), null == objectSubTypeDefContext.instantiableClause() || null == objectSubTypeDefContext.instantiableClause().NOT(), @@ -334,15 +361,15 @@ public ASTNode visitCreateType(final OracleStatementParser.CreateTypeContext ctx } } - private ASTNode visitCreateTypeObjectBaseTypeDef(final OracleStatementParser.ObjectBaseTypeDefContext ctx, final boolean isReplace, final boolean isEditionable, final TypeSegment typeSegment) { + private ASTNode visitCreateTypeObjectBaseTypeDef(final ObjectBaseTypeDefContext ctx, final boolean isReplace, final boolean isEditionable, final TypeSegment typeSegment) { if (null != ctx.objectTypeDef()) { - OracleStatementParser.ObjectTypeDefContext objectTypeDefContext = ctx.objectTypeDef(); + ObjectTypeDefContext objectTypeDefContext = ctx.objectTypeDef(); return new OracleCreateObjectTypeStatement(isReplace, isEditionable, null == objectTypeDefContext.finalClause() || null == objectTypeDefContext.finalClause().NOT(), null == objectTypeDefContext.instantiableClause() || null == objectTypeDefContext.instantiableClause().NOT(), null == objectTypeDefContext.persistableClause() || null == objectTypeDefContext.persistableClause().NOT(), typeSegment, objectTypeDefContext.dataTypeDefinition().stream().map(definition -> (TypeDefinitionSegment) visit(definition)).collect(Collectors.toList())); } else if (null != ctx.varrayTypeSpec()) { - OracleStatementParser.VarrayTypeSpecContext varrayTypeSpecContext = ctx.varrayTypeSpec(); + VarrayTypeSpecContext varrayTypeSpecContext = ctx.varrayTypeSpec(); return new OracleCreateVarrayTypeStatement(isReplace, isEditionable, null == varrayTypeSpecContext.INTEGER_() ? -1 : Integer.parseInt(varrayTypeSpecContext.INTEGER_().getText()), null != varrayTypeSpecContext.typeSpec().NULL(), @@ -350,7 +377,7 @@ private ASTNode visitCreateTypeObjectBaseTypeDef(final OracleStatementParser.Obj typeSegment, (DataTypeSegment) visit(varrayTypeSpecContext.typeSpec().dataType())); } else { - OracleStatementParser.NestedTableTypeSpecContext nestedTableTypeSpecContext = ctx.nestedTableTypeSpec(); + NestedTableTypeSpecContext nestedTableTypeSpecContext = ctx.nestedTableTypeSpec(); return new OracleCreateNestedTableTypeStatement(isReplace, isEditionable, null != nestedTableTypeSpecContext.typeSpec().NULL(), null == nestedTableTypeSpecContext.typeSpec().persistableClause() || null == nestedTableTypeSpecContext.typeSpec().persistableClause().NOT(), @@ -360,7 +387,7 @@ private ASTNode visitCreateTypeObjectBaseTypeDef(final OracleStatementParser.Obj } @Override - public ASTNode visitDataTypeDefinition(final OracleStatementParser.DataTypeDefinitionContext ctx) { + public ASTNode visitDataTypeDefinition(final DataTypeDefinitionContext ctx) { return new TypeDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.name().getText(), (DataTypeSegment) visit(ctx.dataType())); } @@ -387,10 +414,13 @@ public ASTNode visitCreateDefinitionClause(final CreateDefinitionClauseContext c @Override public ASTNode visitColumnDefinition(final ColumnDefinitionContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); - DataTypeSegment dataType = (DataTypeSegment) visit(ctx.dataType()); + DataTypeSegment dataType = null == ctx.dataType() ? null : (DataTypeSegment) visit(ctx.dataType()); boolean isPrimaryKey = ctx.inlineConstraint().stream().anyMatch(each -> null != each.primaryKey()); boolean isNotNull = ctx.inlineConstraint().stream().anyMatch(each -> null != each.NOT() && null != each.NULL()); ColumnDefinitionSegment result = new ColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, dataType, isPrimaryKey, isNotNull); + if (null != ctx.REF() && null != ctx.dataType()) { + result.setRef(true); + } for (InlineConstraintContext each : ctx.inlineConstraint()) { if (null != each.referencesClause()) { result.getReferencedTables().add((SimpleTableSegment) visit(each.referencesClause().tableName())); @@ -452,6 +482,8 @@ public ASTNode visitAlterTable(final AlterTableContext ctx) { result.getModifyConstraintDefinitions().add((ModifyConstraintDefinitionSegment) each); } else if (each instanceof DropConstraintDefinitionSegment) { result.getDropConstraintDefinitions().add((DropConstraintDefinitionSegment) each); + } else if (each instanceof ModifyCollectionRetrievalSegment) { + result.setModifyCollectionRetrieval((ModifyCollectionRetrievalSegment) each); } } } @@ -477,36 +509,60 @@ public ASTNode visitAlterTablespace(final AlterTablespaceContext ctx) { public ASTNode visitAlterDefinitionClause(final AlterDefinitionClauseContext ctx) { CollectionValue result = new CollectionValue<>(); if (null != ctx.columnClauses()) { - for (OperateColumnClauseContext each : ctx.columnClauses().operateColumnClause()) { - if (null != each.addColumnSpecification()) { - result.getValue().addAll(((CollectionValue) visit(each.addColumnSpecification())).getValue()); - } - if (null != each.modifyColumnSpecification()) { - result.getValue().add((ModifyColumnDefinitionSegment) visit(each.modifyColumnSpecification())); - } - if (null != each.dropColumnClause()) { - result.getValue().add((DropColumnDefinitionSegment) visit(each.dropColumnClause())); - } - } + result.getValue().addAll(((CollectionValue) visit(ctx.columnClauses())).getValue()); } if (null != ctx.constraintClauses()) { - // TODO Support rename constraint - ConstraintClausesContext constraintClausesContext = ctx.constraintClauses(); - if (null != constraintClausesContext.addConstraintSpecification()) { - result.combine((CollectionValue) visit(constraintClausesContext.addConstraintSpecification())); + result.getValue().addAll(((CollectionValue) visit(ctx.constraintClauses())).getValue()); + } + // TODO More alter definition parse + return result; + } + + @SuppressWarnings("unchecked") + @Override + public ASTNode visitColumnClauses(final ColumnClausesContext ctx) { + CollectionValue result = new CollectionValue<>(); + for (OperateColumnClauseContext each : ctx.operateColumnClause()) { + if (null != each.addColumnSpecification()) { + result.getValue().addAll(((CollectionValue) visit(each.addColumnSpecification())).getValue()); } - if (null != constraintClausesContext.modifyConstraintClause()) { - result.getValue().add((AlterDefinitionSegment) visit(constraintClausesContext.modifyConstraintClause())); + if (null != each.modifyColumnSpecification()) { + result.getValue().add((ModifyColumnDefinitionSegment) visit(each.modifyColumnSpecification())); } - for (DropConstraintClauseContext each : constraintClausesContext.dropConstraintClause()) { - if (null != each.constraintName()) { - result.getValue().add((AlterDefinitionSegment) visit(each)); - } + if (null != each.dropColumnClause()) { + result.getValue().add((DropColumnDefinitionSegment) visit(each.dropColumnClause())); } } + if (null != ctx.modifyCollectionRetrieval()) { + result.getValue().add((ModifyCollectionRetrievalSegment) visit(ctx.modifyCollectionRetrieval())); + } return result; } + @SuppressWarnings("unchecked") + @Override + public ASTNode visitConstraintClauses(final ConstraintClausesContext ctx) { + // TODO Support rename constraint + CollectionValue result = new CollectionValue<>(); + if (null != ctx.addConstraintSpecification()) { + result.combine((CollectionValue) visit(ctx.addConstraintSpecification())); + } + if (null != ctx.modifyConstraintClause()) { + result.getValue().add((AlterDefinitionSegment) visit(ctx.modifyConstraintClause())); + } + for (DropConstraintClauseContext each : ctx.dropConstraintClause()) { + if (null != each.constraintName()) { + result.getValue().add((AlterDefinitionSegment) visit(each)); + } + } + return result; + } + + @Override + public ASTNode visitModifyCollectionRetrieval(final ModifyCollectionRetrievalContext ctx) { + return new ModifyCollectionRetrievalSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), (SimpleTableSegment) visit(ctx.tableName())); + } + @Override public ASTNode visitAddColumnSpecification(final AddColumnSpecificationContext ctx) { CollectionValue result = new CollectionValue<>(); @@ -531,10 +587,26 @@ public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationCon return new ModifyColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnDefinition); } + @Override + public ASTNode visitDropColumnClause(final DropColumnClauseContext ctx) { + if (null != ctx.dropColumnSpecification()) { + return visit(ctx.dropColumnSpecification()); + } + Collection columns = new LinkedList<>(); + if (null != ctx.columnOrColumnList().columnName()) { + columns.add((ColumnSegment) visit(ctx.columnOrColumnList().columnName())); + } else { + for (ColumnNameContext each : ctx.columnOrColumnList().columnNames().columnName()) { + columns.add((ColumnSegment) visit(each)); + } + } + return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columns); + } + @Override public ASTNode visitModifyColProperties(final ModifyColPropertiesContext ctx) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); - DataTypeSegment dataType = (DataTypeSegment) visit(ctx.dataType()); + DataTypeSegment dataType = null == ctx.dataType() ? null : (DataTypeSegment) visit(ctx.dataType()); // TODO visit pk and reference table return new ColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, dataType, false, false); } @@ -567,8 +639,12 @@ public ASTNode visitAddConstraintSpecification(final AddConstraintSpecificationC @Override public ASTNode visitModifyConstraintClause(final ModifyConstraintClauseContext ctx) { - return new ModifyConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), - (ConstraintSegment) visit(ctx.constraintOption().constraintWithName().constraintName())); + if (null != ctx.constraintOption().constraintWithName()) { + return new ModifyConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), + (ConstraintSegment) visit(ctx.constraintOption().constraintWithName().constraintName())); + } else { + return new ModifyConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), null); + } } @Override @@ -607,6 +683,18 @@ public ASTNode visitAlterDatabaseDictionary(final AlterDatabaseDictionaryContext public ASTNode visitAlterView(final AlterViewContext ctx) { OracleAlterViewStatement result = new OracleAlterViewStatement(); result.setView((SimpleTableSegment) visit(ctx.viewName())); + result.setConstraintDefinitionSegment((ConstraintDefinitionSegment) getAlterViewConstraintDefinition(ctx)); + return result; + } + + private ASTNode getAlterViewConstraintDefinition(final AlterViewContext ctx) { + ConstraintDefinitionSegment result = null; + if (null != ctx.outOfLineConstraint()) { + result = (ConstraintDefinitionSegment) visit(ctx.outOfLineConstraint()); + } else if (null != ctx.constraintName()) { + result = new ConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex()); + result.setConstraintName((ConstraintSegment) visit(ctx.constraintName())); + } return result; } @@ -696,7 +784,7 @@ public ASTNode visitIndexExpressions(final IndexExpressionsContext ctx) { @Override public ASTNode visitIndexExpression(final IndexExpressionContext ctx) { - return null != ctx.expr() ? visit(ctx.expr()) : visit(ctx.columnName()); + return null == ctx.expr() ? visit(ctx.columnName()) : visit(ctx.expr()); } @Override @@ -817,6 +905,16 @@ public ASTNode visitDisassociateStatistics(final DisassociateStatisticsContext c @Override public ASTNode visitAudit(final AuditContext ctx) { + return null == ctx.auditTraditional() ? visit(ctx.auditUnified()) : visit(ctx.auditTraditional()); + } + + @Override + public ASTNode visitAuditTraditional(final AuditTraditionalContext ctx) { + return new OracleAuditStatement(); + } + + @Override + public ASTNode visitAuditUnified(final AuditUnifiedContext ctx) { return new OracleAuditStatement(); } @@ -935,7 +1033,9 @@ public ASTNode visitAlterAttributeDimension(final AlterAttributeDimensionContext @Override public ASTNode visitCreateSequence(final CreateSequenceContext ctx) { - return new OracleCreateSequenceStatement(); + OracleCreateSequenceStatement result = new OracleCreateSequenceStatement(); + result.setSequenceName(ctx.sequenceName().getText()); + return result; } @Override diff --git a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDMLStatementVisitor.java b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDMLStatementVisitor.java index 5b946bc4754e5..39c6223ae87b0 100644 --- a/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDMLStatementVisitor.java +++ b/parser/sql/dialect/oracle/src/main/java/org/apache/shardingsphere/sql/parser/oracle/visitor/statement/type/OracleDMLStatementVisitor.java @@ -63,6 +63,9 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeAssignmentContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeAssignmentValueContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeColumnValueContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeInsertClauseContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeInsertColumnContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeSetAssignmentsClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.MergeUpdateClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ModelClauseContext; @@ -71,6 +74,7 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.OrderByClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.OuterJoinClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.ParenthesisSelectSubqueryContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.PivotClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.QueryBlockContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.QueryNameContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.QueryTableExprClauseContext; @@ -93,6 +97,7 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.SubqueryFactoringClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TableCollectionExprContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.TableNameContext; +import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UnpivotClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UpdateContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UpdateSetClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UpdateSetColumnClauseContext; @@ -101,21 +106,26 @@ import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.UsingClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.WhereClauseContext; import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.WithClauseContext; -import org.apache.shardingsphere.sql.parser.autogen.OracleStatementParser.XmlTableContext; import org.apache.shardingsphere.sql.parser.oracle.visitor.statement.OracleStatementVisitor; import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType; import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dal.VariableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.AssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.ColumnAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.InsertValuesSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.InsertColumnsSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BetweenExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CaseWhenExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CollateExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.DatetimeExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MultisetExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlElementFunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlPiFunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlQueryAndExistsFunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlSerializeFunctionSegment; @@ -146,20 +156,28 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasAvailable; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.ModelSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.PivotSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WithSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.CollectionTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.FunctionTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.XmlTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sql.parser.sql.common.util.SQLUtils; import org.apache.shardingsphere.sql.parser.sql.common.value.collection.CollectionValue; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl.BooleanLiteralValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoElseSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoThenSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoWhenThenSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertType; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleDeleteStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleInsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleLockTableStatement; @@ -167,6 +185,7 @@ import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleSelectStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleUpdateStatement; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; @@ -307,23 +326,26 @@ private Collection createInsertValuesSegments(final Assignm @Override public ASTNode visitInsertMultiTable(final InsertMultiTableContext ctx) { OracleInsertStatement result = new OracleInsertStatement(); - result.setInsertMultiTableElementSegment(null == ctx.conditionalInsertClause() - ? createInsertMultiTableElementSegment(ctx.multiTableElement()) - : (InsertMultiTableElementSegment) visit(ctx.conditionalInsertClause())); - OracleSelectStatement subquery = (OracleSelectStatement) visit(ctx.selectSubquery()); - SubquerySegment subquerySegment = new SubquerySegment(ctx.selectSubquery().start.getStartIndex(), ctx.selectSubquery().stop.getStopIndex(), subquery); - result.setInsertSelect(subquerySegment); + result.setInsertSelect(new SubquerySegment(ctx.selectSubquery().start.getStartIndex(), ctx.selectSubquery().stop.getStopIndex(), (OracleSelectStatement) visit(ctx.selectSubquery()))); + result.setMultiTableInsertType(null != ctx.conditionalInsertClause() && null != ctx.conditionalInsertClause().FIRST() ? MultiTableInsertType.FIRST : MultiTableInsertType.ALL); + List multiTableElementContexts = ctx.multiTableElement(); + if (null != multiTableElementContexts && !multiTableElementContexts.isEmpty()) { + MultiTableInsertIntoSegment multiTableInsertIntoSegment = new MultiTableInsertIntoSegment( + multiTableElementContexts.get(0).getStart().getStartIndex(), multiTableElementContexts.get(multiTableElementContexts.size() - 1).getStop().getStopIndex()); + multiTableInsertIntoSegment.getInsertStatements().addAll(createInsertIntoSegments(multiTableElementContexts)); + result.setMultiTableInsertIntoSegment(multiTableInsertIntoSegment); + } else { + result.setMultiTableConditionalIntoSegment((MultiTableConditionalIntoSegment) visit(ctx.conditionalInsertClause())); + } result.addParameterMarkerSegments(getParameterMarkerSegments()); return result; } - private InsertMultiTableElementSegment createInsertMultiTableElementSegment(final List ctx) { - Collection insertStatements = new LinkedList<>(); + private Collection createInsertIntoSegments(final List ctx) { + Collection result = new LinkedList<>(); for (MultiTableElementContext each : ctx) { - insertStatements.add((OracleInsertStatement) visit(each)); + result.add((OracleInsertStatement) visit(each)); } - InsertMultiTableElementSegment result = new InsertMultiTableElementSegment(ctx.get(0).getStart().getStartIndex(), ctx.get(ctx.size() - 1).getStop().getStopIndex()); - result.getInsertStatements().addAll(insertStatements); return result; } @@ -401,6 +423,34 @@ public ASTNode visitSelect(final SelectContext ctx) { return result; } + @Override + public ASTNode visitPivotClause(final PivotClauseContext ctx) { + ColumnSegment pivotForColumn = (ColumnSegment) visitColumnName(ctx.pivotForClause().columnName()); + Collection pivotInColumns = new LinkedList<>(); + if (null != ctx.pivotInClause()) { + ctx.pivotInClause().pivotInClauseExpr().forEach(each -> { + ExpressionSegment expr = (ExpressionSegment) visit(each.expr()); + String columnName = null != each.alias() && null != each.alias().identifier() ? each.alias().identifier().IDENTIFIER_().getText() : expr.getText(); + ColumnSegment columnSegment = new ColumnSegment(each.getStart().getStartIndex(), each.getStop().getStopIndex(), new IdentifierValue(columnName)); + pivotInColumns.add(columnSegment); + }); + } + return new PivotSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), pivotForColumn, pivotInColumns); + } + + @Override + public ASTNode visitUnpivotClause(final UnpivotClauseContext ctx) { + ColumnSegment unpivotColumn = (ColumnSegment) visitColumnName(ctx.columnName()); + ColumnSegment unpivotForColumn = (ColumnSegment) visitColumnName(ctx.pivotForClause().columnName()); + Collection unpivotInColumns = new LinkedList<>(); + if (null != ctx.unpivotInClause()) { + ctx.unpivotInClause().unpivotInClauseExpr().forEach(each -> unpivotInColumns.add((ColumnSegment) visit(each.columnName()))); + } + PivotSegment result = new PivotSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), unpivotForColumn, unpivotInColumns, true); + result.setUnpivotColumn(unpivotColumn); + return result; + } + @Override public ASTNode visitDmlTableClause(final DmlTableClauseContext ctx) { return visit(ctx.tableName()); @@ -432,32 +482,29 @@ public ASTNode visitTableCollectionExpr(final TableCollectionExprContext ctx) { @Override public ASTNode visitConditionalInsertClause(final ConditionalInsertClauseContext ctx) { - Collection insertStatements = new LinkedList<>(); + Collection whenThenSegments = new LinkedList<>(); for (ConditionalInsertWhenPartContext each : ctx.conditionalInsertWhenPart()) { - insertStatements.addAll(createInsertStatementsFromConditionalInsertWhen(each)); + whenThenSegments.add((MultiTableConditionalIntoWhenThenSegment) visit(each)); } + MultiTableConditionalIntoSegment result = new MultiTableConditionalIntoSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); + result.getWhenThenSegments().addAll(whenThenSegments); if (null != ctx.conditionalInsertElsePart()) { - insertStatements.addAll(createInsertStatementsFromConditionalInsertElse(ctx.conditionalInsertElsePart())); + result.setElseSegment((MultiTableConditionalIntoElseSegment) visit(ctx.conditionalInsertElsePart())); } - InsertMultiTableElementSegment result = new InsertMultiTableElementSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex()); - result.getInsertStatements().addAll(insertStatements); return result; } - private Collection createInsertStatementsFromConditionalInsertWhen(final ConditionalInsertWhenPartContext ctx) { - Collection result = new LinkedList<>(); - for (MultiTableElementContext each : ctx.multiTableElement()) { - result.add((OracleInsertStatement) visit(each)); - } - return result; + @Override + public ASTNode visitConditionalInsertWhenPart(final ConditionalInsertWhenPartContext ctx) { + List multiTableElementContexts = ctx.multiTableElement(); + MultiTableConditionalIntoThenSegment thenSegment = new MultiTableConditionalIntoThenSegment(multiTableElementContexts.get(0).start.getStartIndex(), + multiTableElementContexts.get(multiTableElementContexts.size() - 1).stop.getStopIndex(), createInsertIntoSegments(multiTableElementContexts)); + return new MultiTableConditionalIntoWhenThenSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), (ExpressionSegment) visit(ctx.expr()), thenSegment); } - private Collection createInsertStatementsFromConditionalInsertElse(final ConditionalInsertElsePartContext ctx) { - Collection result = new LinkedList<>(); - for (MultiTableElementContext each : ctx.multiTableElement()) { - result.add((OracleInsertStatement) visit(each)); - } - return result; + @Override + public ASTNode visitConditionalInsertElsePart(final ConditionalInsertElsePartContext ctx) { + return new MultiTableConditionalIntoElseSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), createInsertIntoSegments(ctx.multiTableElement())); } @Override @@ -733,7 +780,8 @@ private ASTNode createProjectionForComplexExpressionSegment(final ASTNode projec result.setAlias(alias); return result; } - if (projection instanceof XmlQueryAndExistsFunctionSegment || projection instanceof XmlPiFunctionSegment || projection instanceof XmlSerializeFunctionSegment) { + if (projection instanceof XmlQueryAndExistsFunctionSegment || projection instanceof XmlPiFunctionSegment || projection instanceof XmlSerializeFunctionSegment + || projection instanceof XmlElementFunctionSegment) { return projection; } throw new UnsupportedOperationException("Unsupported Complex Expression"); @@ -799,9 +847,19 @@ private ASTNode createProjectionForExpressionSegment(final ASTNode projection, f } return result; } + if (projection instanceof CaseWhenExpression || projection instanceof VariableSegment || projection instanceof BetweenExpression || projection instanceof InExpression + || projection instanceof CollateExpression) { + return createExpressionProjectionSegment(alias, (ExpressionSegment) projection); + } throw new UnsupportedOperationException("Unsupported Expression"); } + private ExpressionProjectionSegment createExpressionProjectionSegment(final AliasSegment alias, final ExpressionSegment projection) { + ExpressionProjectionSegment result = new ExpressionProjectionSegment(projection.getStartIndex(), projection.getStopIndex(), projection.getText(), projection); + result.setAlias(alias); + return result; + } + @Override public ASTNode visitSelectFromClause(final SelectFromClauseContext ctx) { return visit(ctx.fromClauseList()); @@ -830,23 +888,26 @@ private JoinTableSegment generateJoinTableSourceFromFromClauseOption(final FromC @Override public ASTNode visitFromClauseOption(final FromClauseOptionContext ctx) { - if (null != ctx.xmlTable()) { - return visit(ctx.xmlTable()); - } if (null != ctx.joinClause()) { return visit(ctx.joinClause()); } - return visit(ctx.selectTableReference()); - } - - @Override - public ASTNode visitXmlTable(final XmlTableContext ctx) { - XmlTableSegment result = new XmlTableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), - (XmlTableFunctionSegment) visit(ctx.xmlTableFunction())); - if (null != ctx.xmlTableFunctionAlias()) { - result.setXmlTableFunctionAlias(ctx.xmlTableFunctionAlias().alias().getText()); + if (null != ctx.regularFunction()) { + FunctionSegment functionSegment = (FunctionSegment) visit(ctx.regularFunction()); + FunctionTableSegment result = new FunctionTableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), functionSegment); + if (null != ctx.alias()) { + result.setAlias((AliasSegment) visit(ctx.alias())); + } + return result; } - return result; + if (null != ctx.xmlTableFunction()) { + XmlTableFunctionSegment functionSegment = (XmlTableFunctionSegment) visit(ctx.xmlTableFunction()); + FunctionTableSegment result = new FunctionTableSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), functionSegment); + if (null != ctx.alias()) { + result.setAlias((AliasSegment) visit(ctx.alias())); + } + return result; + } + return visit(ctx.selectTableReference()); } @Override @@ -986,7 +1047,26 @@ public ASTNode visitShardsClause(final ShardsClauseContext ctx) { @Override public ASTNode visitQueryTableExprClause(final QueryTableExprClauseContext ctx) { - return visit(ctx.queryTableExpr()); + ASTNode result = visit(ctx.queryTableExpr()); + if (null != ctx.pivotClause()) { + PivotSegment pivotClause = (PivotSegment) visit(ctx.pivotClause()); + if (result instanceof SubqueryTableSegment) { + ((SubqueryTableSegment) result).setPivot(pivotClause); + } + if (result instanceof SimpleTableSegment) { + ((SimpleTableSegment) result).setPivot(pivotClause); + } + } + if (null != ctx.unpivotClause()) { + PivotSegment pivotClause = (PivotSegment) visit(ctx.unpivotClause()); + if (result instanceof SubqueryTableSegment) { + ((SubqueryTableSegment) result).setPivot(pivotClause); + } + if (result instanceof SimpleTableSegment) { + ((SimpleTableSegment) result).setPivot(pivotClause); + } + } + return result; } @Override @@ -1131,18 +1211,53 @@ private List generateColumnsFromforUpdateClauseOption(final ForUp @Override public ASTNode visitMerge(final MergeContext ctx) { OracleMergeStatement result = new OracleMergeStatement(); - result.setTarget((SimpleTableSegment) visit(ctx.intoClause())); + result.setTarget((TableSegment) visit(ctx.intoClause())); result.setSource((TableSegment) visit(ctx.usingClause())); result.setExpr((ExpressionSegment) visit(ctx.usingClause().expr())); if (null != ctx.mergeUpdateClause()) { - result.getUpdate().setSetAssignment((SetAssignmentSegment) visit(ctx.mergeUpdateClause().mergeSetAssignmentsClause())); - if (null != ctx.mergeUpdateClause().whereClause()) { - result.getUpdate().setWhere((WhereSegment) visit(ctx.mergeUpdateClause().whereClause())); - } - if (null != ctx.mergeUpdateClause().deleteWhereClause()) { - result.getDelete().setWhere((WhereSegment) visit(ctx.mergeUpdateClause().deleteWhereClause())); + result.setUpdate((UpdateStatement) visitMergeUpdateClause(ctx.mergeUpdateClause())); + } + if (null != ctx.mergeInsertClause()) { + result.setInsert((InsertStatement) visitMergeInsertClause(ctx.mergeInsertClause())); + } + return result; + } + + @SuppressWarnings("unchecked") + @Override + public ASTNode visitMergeInsertClause(final MergeInsertClauseContext ctx) { + OracleInsertStatement result = new OracleInsertStatement(); + if (null != ctx.mergeInsertColumn()) { + result.setInsertColumns((InsertColumnsSegment) visit(ctx.mergeInsertColumn())); + } + if (null != ctx.mergeColumnValue()) { + result.getValues().addAll(((CollectionValue) visit(ctx.mergeColumnValue())).getValue()); + } + if (null != ctx.whereClause()) { + result.setWhere((WhereSegment) visit(ctx.whereClause())); + } + return result; + } + + @Override + public ASTNode visitMergeInsertColumn(final MergeInsertColumnContext ctx) { + Collection columnSegments = new ArrayList<>(ctx.columnName().size()); + for (ColumnNameContext each : ctx.columnName()) { + if (null != each.name()) { + columnSegments.add((ColumnSegment) visit(each)); } } + return new InsertColumnsSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), columnSegments); + } + + @Override + public ASTNode visitMergeColumnValue(final MergeColumnValueContext ctx) { + CollectionValue result = new CollectionValue<>(); + List segments = new LinkedList<>(); + for (ExprContext each : ctx.expr()) { + segments.add(null == each ? new CommonExpressionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), ctx.getText()) : (ExpressionSegment) visit(each)); + } + result.getValue().add(new InsertValuesSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), segments)); return result; } @@ -1155,7 +1270,16 @@ public ASTNode visitIntoClause(final IntoClauseContext ctx) { } return result; } - SimpleTableSegment result = (SimpleTableSegment) visit(ctx.viewName()); + if (null != ctx.viewName()) { + SimpleTableSegment result = (SimpleTableSegment) visit(ctx.viewName()); + if (null != ctx.alias()) { + result.setAlias((AliasSegment) visit(ctx.alias())); + } + return result; + } + OracleSelectStatement subquery = (OracleSelectStatement) visit(ctx.subquery()); + SubquerySegment subquerySegment = new SubquerySegment(ctx.subquery().start.getStartIndex(), ctx.subquery().stop.getStopIndex(), subquery); + SubqueryTableSegment result = new SubqueryTableSegment(subquerySegment); if (null != ctx.alias()) { result.setAlias((AliasSegment) visit(ctx.alias())); } @@ -1189,13 +1313,13 @@ public ASTNode visitUsingClause(final UsingClauseContext ctx) { @Override public ASTNode visitMergeUpdateClause(final MergeUpdateClauseContext ctx) { - OracleMergeStatement result = new OracleMergeStatement(); - result.getUpdate().setSetAssignment((SetAssignmentSegment) visit(ctx.mergeSetAssignmentsClause())); + OracleUpdateStatement result = new OracleUpdateStatement(); + result.setSetAssignment((SetAssignmentSegment) visit(ctx.mergeSetAssignmentsClause())); if (null != ctx.whereClause()) { - result.getUpdate().setWhere((WhereSegment) visit(ctx.whereClause())); + result.setWhere((WhereSegment) visit(ctx.whereClause())); } if (null != ctx.deleteWhereClause()) { - result.getDelete().setWhere((WhereSegment) visit(ctx.deleteWhereClause())); + result.setDeleteWhere((WhereSegment) visit(ctx.deleteWhereClause())); } return result; } diff --git a/parser/sql/dialect/postgresql/src/main/antlr4/imports/postgresql/DDLStatement.g4 b/parser/sql/dialect/postgresql/src/main/antlr4/imports/postgresql/DDLStatement.g4 index 9cfdf58e72170..de2ad41bd01d0 100644 --- a/parser/sql/dialect/postgresql/src/main/antlr4/imports/postgresql/DDLStatement.g4 +++ b/parser/sql/dialect/postgresql/src/main/antlr4/imports/postgresql/DDLStatement.g4 @@ -975,7 +975,7 @@ alterMaterializedViewClauses ; executeStmt - : EXECUTE name executeParamClause + : EXECUTE name executeParamClause? ; createMaterializedView @@ -1152,7 +1152,7 @@ alterTypeClauses | RENAME TO name | RENAME ATTRIBUTE name TO name dropBehavior? | SET SCHEMA name - | SET LP_ operatorDefList RP_ + | SET LP_ typeDefList RP_ | OWNER TO roleSpec ; @@ -1166,6 +1166,22 @@ alterTypeCmd | ALTER ATTRIBUTE colId setData? TYPE typeName collateClause? dropBehavior? ; +typeDefList + : typeDefElem (COMMA_ typeDefElem)* + ; + +typeDefElem + : (RECEIVE | SEND | TYPMOD_IN | TYPMOD_OUT | ANALYZE | SUBSCRIPT | STORAGE) EQ_ (NONE | typeDefArg) + ; + +typeDefArg + : funcType + | reservedKeyword + | qualAllOp + | numericOnly + | STRING_ + ; + alterUserMapping : ALTER USER MAPPING FOR authIdent SERVER name alterGenericOptions ; diff --git a/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/PostgreSQLStatementVisitor.java b/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/PostgreSQLStatementVisitor.java index dfac62fcb7d0c..57e5005b2f56b 100644 --- a/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/PostgreSQLStatementVisitor.java +++ b/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/PostgreSQLStatementVisitor.java @@ -116,6 +116,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.enums.JoinType; import org.apache.shardingsphere.sql.parser.sql.common.enums.OrderDirection; import org.apache.shardingsphere.sql.parser.sql.common.enums.ParameterMarkerType; +import org.apache.shardingsphere.sql.parser.sql.common.enums.SubqueryType; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexNameSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexSegment; @@ -225,7 +226,7 @@ public final ASTNode visitNumberLiterals(final NumberLiteralsContext ctx) { @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); - return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); + return null == unreservedWord ? new IdentifierValue(ctx.getText()) : visit(unreservedWord); } @Override @@ -413,7 +414,11 @@ public ASTNode visitCExpr(final CExprContext ctx) { private ExpressionSegment createSubqueryExpressionSegment(final CExprContext ctx) { SubquerySegment subquerySegment = new SubquerySegment(ctx.selectWithParens().getStart().getStartIndex(), ctx.selectWithParens().getStop().getStopIndex(), (PostgreSQLSelectStatement) visit(ctx.selectWithParens())); - return null == ctx.EXISTS() ? new SubqueryExpressionSegment(subquerySegment) : new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); + if (null != ctx.EXISTS()) { + subquerySegment.setSubqueryType(SubqueryType.EXISTS_SUBQUERY); + return new ExistsSubqueryExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), subquerySegment); + } + return new SubqueryExpressionSegment(subquerySegment); } @Override @@ -503,7 +508,7 @@ public ASTNode visitAexprConst(final AexprConstContext ctx) { } if (null != ctx.constTypeName() || null != ctx.funcName() && null == ctx.LP_()) { LiteralExpressionSegment expression = new LiteralExpressionSegment(ctx.STRING_().getSymbol().getStartIndex(), ctx.STRING_().getSymbol().getStopIndex(), value.getValue().toString()); - String dataType = null != ctx.constTypeName() ? ctx.constTypeName().getText() : ctx.funcName().getText(); + String dataType = null == ctx.constTypeName() ? ctx.funcName().getText() : ctx.constTypeName().getText(); return new TypeCastExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText(), expression, dataType); } return SQLUtils.createLiteralExpression(value, ctx.start.getStartIndex(), ctx.stop.getStopIndex(), ctx.getText()); @@ -626,7 +631,7 @@ public final ASTNode visitSortClause(final SortClauseContext ctx) { @Override public final ASTNode visitSortby(final SortbyContext ctx) { - OrderDirection orderDirection = null != ctx.ascDesc() ? generateOrderDirection(ctx.ascDesc()) : OrderDirection.ASC; + OrderDirection orderDirection = null == ctx.ascDesc() ? OrderDirection.ASC : generateOrderDirection(ctx.ascDesc()); NullsOrderType nullsOrderType = generateNullsOrderType(ctx.nullsOrder()); ASTNode expr = visit(ctx.aExpr()); if (expr instanceof ColumnSegment) { @@ -751,6 +756,13 @@ public ASTNode visitQualifiedName(final QualifiedNameContext ctx) { @Override public ASTNode visitInsertRest(final InsertRestContext ctx) { PostgreSQLInsertStatement result = new PostgreSQLInsertStatement(); + ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); + if (null == valuesClause) { + PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); + result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); + } else { + result.getValues().addAll(createInsertValuesSegments(valuesClause)); + } if (null == ctx.insertColumnList()) { result.setInsertColumns(new InsertColumnsSegment(ctx.start.getStartIndex() - 1, ctx.start.getStartIndex() - 1, Collections.emptyList())); } else { @@ -759,13 +771,6 @@ public ASTNode visitInsertRest(final InsertRestContext ctx) { InsertColumnsSegment insertColumnsSegment = new InsertColumnsSegment(insertColumns.start.getStartIndex() - 1, insertColumns.stop.getStopIndex() + 1, columns.getValue()); result.setInsertColumns(insertColumnsSegment); } - ValuesClauseContext valuesClause = ctx.select().selectNoParens().selectClauseN().simpleSelect().valuesClause(); - if (null == valuesClause) { - PostgreSQLSelectStatement selectStatement = (PostgreSQLSelectStatement) visit(ctx.select()); - result.setInsertSelect(new SubquerySegment(ctx.select().start.getStartIndex(), ctx.select().stop.getStopIndex(), selectStatement)); - } else { - result.getValues().addAll(createInsertValuesSegments(valuesClause)); - } return result; } @@ -1123,7 +1128,7 @@ public ASTNode visitTableReference(final TableReferenceContext ctx) { if (null != ctx.selectWithParens()) { PostgreSQLSelectStatement select = (PostgreSQLSelectStatement) visit(ctx.selectWithParens()); SubquerySegment subquery = new SubquerySegment(ctx.selectWithParens().start.getStartIndex(), ctx.selectWithParens().stop.getStopIndex(), select); - AliasSegment alias = null != ctx.aliasClause() ? (AliasSegment) visit(ctx.aliasClause()) : null; + AliasSegment alias = null == ctx.aliasClause() ? null : (AliasSegment) visit(ctx.aliasClause()); SubqueryTableSegment result = new SubqueryTableSegment(subquery); result.setAlias(alias); return result; @@ -1135,14 +1140,14 @@ public ASTNode visitTableReference(final TableReferenceContext ctx) { } JoinTableSegment result = new JoinTableSegment(); result.setLeft((TableSegment) visit(ctx.tableReference())); - int startIndex = null != ctx.LP_() ? ctx.LP_().getSymbol().getStartIndex() : ctx.tableReference().start.getStartIndex(); + int startIndex = null == ctx.LP_() ? ctx.tableReference().start.getStartIndex() : ctx.LP_().getSymbol().getStartIndex(); int stopIndex = 0; AliasSegment alias = null; if (null == ctx.aliasClause()) { - stopIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.tableReference().start.getStopIndex(); + stopIndex = null == ctx.RP_() ? ctx.tableReference().start.getStopIndex() : ctx.RP_().getSymbol().getStopIndex(); } else { alias = (AliasSegment) visit(ctx.aliasClause()); - startIndex = null != ctx.RP_() ? ctx.RP_().getSymbol().getStopIndex() : ctx.joinedTable().stop.getStopIndex(); + startIndex = null == ctx.RP_() ? ctx.joinedTable().stop.getStopIndex() : ctx.RP_().getSymbol().getStopIndex(); } result.setStartIndex(startIndex); result.setStopIndex(stopIndex); @@ -1193,7 +1198,7 @@ private static String getNaturalJoinType(final NaturalJoinTypeContext ctx) { private static String getOutJoinType(final OuterJoinTypeContext ctx) { if (null == ctx.FULL()) { - return null != ctx.LEFT() ? JoinType.LEFT.name() : JoinType.RIGHT.name(); + return null == ctx.LEFT() ? JoinType.RIGHT.name() : JoinType.LEFT.name(); } return JoinType.FULL.name(); } diff --git a/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/type/PostgreSQLDDLStatementVisitor.java b/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/type/PostgreSQLDDLStatementVisitor.java index b7077430c171e..f965f31cec692 100644 --- a/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/type/PostgreSQLDDLStatementVisitor.java +++ b/parser/sql/dialect/postgresql/src/main/java/org/apache/shardingsphere/sql/parser/postgresql/visitor/statement/type/PostgreSQLDDLStatementVisitor.java @@ -541,7 +541,7 @@ public ASTNode visitAddColumnSpecification(final AddColumnSpecificationContext c ColumnDefinitionContext columnDefinition = ctx.columnDefinition(); if (null != columnDefinition) { AddColumnDefinitionSegment addColumnDefinition = new AddColumnDefinitionSegment( - ctx.columnDefinition().getStart().getStartIndex(), columnDefinition.getStop().getStopIndex(), Collections.singletonList((ColumnDefinitionSegment) visit(columnDefinition))); + ctx.columnDefinition().getStart().getStartIndex(), columnDefinition.getStop().getStopIndex(), Collections.singleton((ColumnDefinitionSegment) visit(columnDefinition))); result.getValue().add(addColumnDefinition); } return result; @@ -601,7 +601,7 @@ public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationCon @Override public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) { - return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singletonList((ColumnSegment) visit(ctx.columnName()))); + return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singleton((ColumnSegment) visit(ctx.columnName()))); } @Override @@ -612,7 +612,7 @@ public ASTNode visitRenameColumnSpecification(final RenameColumnSpecificationCon @SuppressWarnings("unchecked") @Override public ASTNode visitDropTable(final DropTableContext ctx) { - boolean containsCascade = null != ctx.dropTableOpt() && null != ctx.dropTableOpt().CASCADE(); + boolean containsCascade = ctx.dropTableOpt() != null && null != ctx.dropTableOpt().CASCADE(); PostgreSQLDropTableStatement result = new PostgreSQLDropTableStatement(null != ctx.ifExists(), containsCascade); result.getTables().addAll(((CollectionValue) visit(ctx.tableNames())).getValue()); return result; diff --git a/parser/sql/dialect/postgresql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/postgresql/external/ExternalPostgreSQLParserIT.java b/parser/sql/dialect/postgresql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/postgresql/external/ExternalPostgreSQLParserIT.java index 892e124c2aced..a3e0dbd070c7e 100644 --- a/parser/sql/dialect/postgresql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/postgresql/external/ExternalPostgreSQLParserIT.java +++ b/parser/sql/dialect/postgresql/src/test/java/org/apache/shardingsphere/test/it/sql/parser/it/postgresql/external/ExternalPostgreSQLParserIT.java @@ -18,10 +18,11 @@ package org.apache.shardingsphere.test.it.sql.parser.it.postgresql.external; import org.apache.shardingsphere.test.it.sql.parser.external.ExternalSQLParserIT; -import org.apache.shardingsphere.test.it.sql.parser.external.loader.DefaultExternalTestParameterLoader; -import org.apache.shardingsphere.test.loader.ExternalCaseSettings; +import org.apache.shardingsphere.test.it.sql.parser.external.loader.StandardExternalTestParameterLoadTemplate; +import org.apache.shardingsphere.test.it.sql.parser.loader.ExternalCaseSettings; -@ExternalCaseSettings(value = "PostgreSQL", caseURL = ExternalPostgreSQLParserIT.CASE_URL, resultURL = ExternalPostgreSQLParserIT.RESULT_URL, caseLoader = DefaultExternalTestParameterLoader.class) +@ExternalCaseSettings(value = "PostgreSQL", caseURL = ExternalPostgreSQLParserIT.CASE_URL, resultURL = ExternalPostgreSQLParserIT.RESULT_URL, + template = StandardExternalTestParameterLoadTemplate.class) class ExternalPostgreSQLParserIT extends ExternalSQLParserIT { static final String CASE_URL = "https://github.com/postgres/postgres/tree/master/src/test/regress/sql"; diff --git a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/SQL92StatementVisitor.java b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/SQL92StatementVisitor.java index 41b07f4d8b417..265e97faef033 100644 --- a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/SQL92StatementVisitor.java +++ b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/SQL92StatementVisitor.java @@ -175,7 +175,7 @@ public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { UnreservedWordContext unreservedWord = ctx.unreservedWord(); - return null != unreservedWord ? visit(unreservedWord) : new IdentifierValue(ctx.getText()); + return null == unreservedWord ? new IdentifierValue(ctx.getText()) : visit(unreservedWord); } @Override @@ -290,7 +290,7 @@ private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) { } else { right = (ExpressionSegment) visit(ctx.subquery()); } - String operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText(); + String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -315,7 +315,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final for (SimpleExprContext each : ctx.simpleExpr()) { right.getItems().add((ExpressionSegment) visit(each)); } - String operator = null != ctx.NOT() ? "NOT LIKE" : "LIKE"; + String operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE"; String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -524,7 +524,7 @@ public final ASTNode visitOrderByClause(final OrderByClauseContext ctx) { @Override public final ASTNode visitOrderByItem(final OrderByItemContext ctx) { - OrderDirection orderDirection = null != ctx.DESC() ? OrderDirection.DESC : OrderDirection.ASC; + OrderDirection orderDirection = null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); return new ColumnOrderByItemSegment(column, orderDirection, null); diff --git a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DDLStatementVisitor.java b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DDLStatementVisitor.java index 56be358b2b2ff..d686dc21308db 100644 --- a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DDLStatementVisitor.java +++ b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DDLStatementVisitor.java @@ -205,8 +205,7 @@ public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationCon @Override public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) { - return new DropColumnDefinitionSegment( - ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singletonList((ColumnSegment) visit(ctx.columnName()))); + return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singleton((ColumnSegment) visit(ctx.columnName()))); } @SuppressWarnings("unchecked") diff --git a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DMLStatementVisitor.java b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DMLStatementVisitor.java index be82a3f17077f..bda004bdfe171 100644 --- a/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DMLStatementVisitor.java +++ b/parser/sql/dialect/sql92/src/main/java/org/apache/shardingsphere/sql/parser/sql92/visitor/statement/type/SQL92DMLStatementVisitor.java @@ -340,7 +340,7 @@ private ASTNode createProjection(final ProjectionContext ctx, final AliasSegment if (projection instanceof BinaryOperationExpression) { BinaryOperationExpression binaryExpression = (BinaryOperationExpression) projection; int startIndex = binaryExpression.getStartIndex(); - int stopIndex = null != alias ? alias.getStopIndex() : binaryExpression.getStopIndex(); + int stopIndex = null == alias ? binaryExpression.getStopIndex() : alias.getStopIndex(); ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, binaryExpression.getText(), binaryExpression); result.setAlias(alias); return result; diff --git a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/SQLServerStatementVisitor.java b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/SQLServerStatementVisitor.java index 942d8ef3eabf3..62e9d18a1b9a7 100644 --- a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/SQLServerStatementVisitor.java +++ b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/SQLServerStatementVisitor.java @@ -259,7 +259,7 @@ public final ASTNode visitNullValueLiterals(final NullValueLiteralsContext ctx) @Override public final ASTNode visitIdentifier(final IdentifierContext ctx) { - return null != ctx.regularIdentifier() ? visit(ctx.regularIdentifier()) : visit(ctx.delimitedIdentifier()); + return null == ctx.regularIdentifier() ? visit(ctx.delimitedIdentifier()) : visit(ctx.regularIdentifier()); } @Override @@ -405,7 +405,7 @@ private ASTNode createCompareSegment(final BooleanPrimaryContext ctx) { } else { right = (ExpressionSegment) visit(ctx.subquery()); } - String operator = null != ctx.SAFE_EQ_() ? ctx.SAFE_EQ_().getText() : ctx.comparisonOperator().getText(); + String operator = null == ctx.SAFE_EQ_() ? ctx.comparisonOperator().getText() : ctx.SAFE_EQ_().getText(); String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -430,7 +430,7 @@ private BinaryOperationExpression createBinaryOperationExpressionFromLike(final for (SimpleExprContext each : ctx.simpleExpr()) { right.getItems().add((ExpressionSegment) visit(each)); } - String operator = null != ctx.NOT() ? "NOT LIKE" : "LIKE"; + String operator = null == ctx.NOT() ? "LIKE" : "NOT LIKE"; String text = ctx.start.getInputStream().getText(new Interval(ctx.start.getStartIndex(), ctx.stop.getStopIndex())); return new BinaryOperationExpression(ctx.start.getStartIndex(), ctx.stop.getStopIndex(), left, right, operator, text); } @@ -645,7 +645,7 @@ private void calculateParameterCount(final Collection exprContexts) @Override public final ASTNode visitOrderByItem(final OrderByItemContext ctx) { - OrderDirection orderDirection = null != ctx.DESC() ? OrderDirection.DESC : OrderDirection.ASC; + OrderDirection orderDirection = null == ctx.DESC() ? OrderDirection.ASC : OrderDirection.DESC; if (null != ctx.columnName()) { ColumnSegment column = (ColumnSegment) visit(ctx.columnName()); return new ColumnOrderByItemSegment(column, orderDirection, null); @@ -1164,7 +1164,7 @@ private ASTNode createProjection(final ProjectionContext ctx, final AliasSegment if (projection instanceof BinaryOperationExpression) { BinaryOperationExpression binaryExpression = (BinaryOperationExpression) projection; int startIndex = binaryExpression.getStartIndex(); - int stopIndex = null != alias ? alias.getStopIndex() : binaryExpression.getStopIndex(); + int stopIndex = null == alias ? binaryExpression.getStopIndex() : alias.getStopIndex(); ExpressionProjectionSegment result = new ExpressionProjectionSegment(startIndex, stopIndex, binaryExpression.getText(), binaryExpression); result.setAlias(alias); return result; diff --git a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDCLStatementVisitor.java b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDCLStatementVisitor.java index 4f8d035e94bd9..7a108e8aa15e3 100644 --- a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDCLStatementVisitor.java +++ b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDCLStatementVisitor.java @@ -208,7 +208,7 @@ private Optional findTableSegment(final OnClassClauseContext } private Optional findTableSegment(final OnClassTypeClauseContext ctx) { - return null != ctx && null != ctx.classType() && null != ctx.classType().OBJECT() ? Optional.of((SimpleTableSegment) visit(ctx.securable())) : Optional.empty(); + return null == ctx || null == ctx.classType() || null == ctx.classType().OBJECT() ? Optional.empty() : Optional.of((SimpleTableSegment) visit(ctx.securable())); } @Override diff --git a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDDLStatementVisitor.java b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDDLStatementVisitor.java index d32374d9e5649..e434e0263fba1 100644 --- a/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDDLStatementVisitor.java +++ b/parser/sql/dialect/sqlserver/src/main/java/org/apache/shardingsphere/sql/parser/sqlserver/visitor/statement/type/SQLServerDDLStatementVisitor.java @@ -394,7 +394,9 @@ public ASTNode visitCreateTrigger(final CreateTriggerContext ctx) { @Override public ASTNode visitCreateSequence(final CreateSequenceContext ctx) { - return new SQLServerCreateSequenceStatement(); + SQLServerCreateSequenceStatement result = new SQLServerCreateSequenceStatement(); + result.setSequenceName(ctx.sequenceName().name().getText()); + return result; } @Override diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/AggregationType.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/AggregationType.java index a1af72e9938c9..5a0c967ea15c0 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/AggregationType.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/AggregationType.java @@ -24,7 +24,7 @@ */ public enum AggregationType { - MAX, MIN, SUM, COUNT, AVG, BIT_XOR, GROUP_CONCAT, COLLECT; + MAX, MIN, SUM, COUNT, AVG, BIT_XOR, GROUP_CONCAT; /** * Is aggregation type. diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/SubqueryType.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/SubqueryType.java index 00a07321f49bf..383c9ede9d4e4 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/SubqueryType.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/enums/SubqueryType.java @@ -22,5 +22,5 @@ */ public enum SubqueryType { - PROJECTION_SUBQUERY, TABLE_SUBQUERY, PREDICATE_SUBQUERY, INSERT_SELECT_SUBQUERY, EXISTS_SUBQUERY + PROJECTION_SUBQUERY, TABLE_SUBQUERY, JOIN_SUBQUERY, PREDICATE_SUBQUERY, INSERT_SELECT_SUBQUERY, EXISTS_SUBQUERY } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/ColumnDefinitionSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/ColumnDefinitionSegment.java index 4376d58774eb6..6372e4eb6c50b 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/ColumnDefinitionSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/ColumnDefinitionSegment.java @@ -19,6 +19,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; +import lombok.Setter; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.CreateDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment; @@ -47,4 +48,7 @@ public final class ColumnDefinitionSegment implements CreateDefinitionSegment { private final boolean notNull; private final Collection referencedTables = new LinkedList<>(); + + @Setter + private boolean isRef; } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/alter/ModifyCollectionRetrievalSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/alter/ModifyCollectionRetrievalSegment.java new file mode 100644 index 0000000000000..1095e964701e0 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/ddl/column/alter/ModifyCollectionRetrievalSegment.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.AlterDefinitionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; + +/** + * Modify collection retrieval segment. + */ +@RequiredArgsConstructor +@Getter +public final class ModifyCollectionRetrievalSegment implements AlterDefinitionSegment { + + private final int startIndex; + + private final int stopIndex; + + private final SimpleTableSegment nestedTable; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/column/ColumnSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/column/ColumnSegment.java index 478f1f4d6ca80..f7269bb1f9dd7 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/column/ColumnSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/column/ColumnSegment.java @@ -18,11 +18,11 @@ package org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column; import lombok.Getter; -import lombok.RequiredArgsConstructor; import lombok.Setter; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerAvailable; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.ColumnSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.List; @@ -32,7 +32,6 @@ /** * Column segment. */ -@RequiredArgsConstructor @Getter @Setter public final class ColumnSegment implements ExpressionSegment, OwnerAvailable { @@ -47,13 +46,18 @@ public final class ColumnSegment implements ExpressionSegment, OwnerAvailable { private OwnerSegment owner; - private IdentifierValue originalDatabase; + private ColumnSegmentBoundedInfo columnBoundedInfo; - private IdentifierValue originalSchema; + private ColumnSegmentBoundedInfo otherUsingColumnBoundedInfo; - private IdentifierValue originalTable; + private boolean isVariable; - private IdentifierValue originalColumn; + public ColumnSegment(final int startIndex, final int stopIndex, final IdentifierValue identifier) { + this.startIndex = startIndex; + this.stopIndex = stopIndex; + this.identifier = identifier; + columnBoundedInfo = new ColumnSegmentBoundedInfo(identifier); + } /** * Get qualified name with quote characters. diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CaseWhenExpression.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CaseWhenExpression.java index 23a2e832786a8..21b18d0f8d117 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CaseWhenExpression.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CaseWhenExpression.java @@ -43,6 +43,6 @@ public final class CaseWhenExpression implements ExpressionSegment { @Override public String getText() { - return caseExpr.getText(); + return null == caseExpr ? "" : caseExpr.getText(); } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CollateExpression.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CollateExpression.java index 0e85b4f4dde9e..d2bf7fd19862a 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CollateExpression.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/CollateExpression.java @@ -21,6 +21,8 @@ import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.SimpleExpressionSegment; +import java.util.Optional; + /** * Collate expression. */ @@ -34,8 +36,14 @@ public final class CollateExpression implements ExpressionSegment { private final SimpleExpressionSegment collateName; + private final ExpressionSegment expr; + @Override public String getText() { return collateName.getText(); } + + public Optional getExpr() { + return Optional.ofNullable(expr); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/RowExpression.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/RowExpression.java new file mode 100644 index 0000000000000..6b83a2eee75f6 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/RowExpression.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +import java.util.Collection; +import java.util.LinkedList; + +/** + * Row expression. + */ +@RequiredArgsConstructor +@Getter +public final class RowExpression implements ExpressionSegment { + + private final int startIndex; + + private final int stopIndex; + + private final Collection items = new LinkedList<>(); + + private final String text; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/UnaryOperationExpression.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/UnaryOperationExpression.java new file mode 100644 index 0000000000000..6deb669176e7a --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/UnaryOperationExpression.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; + +/** + * Unary operation expression. + */ +@RequiredArgsConstructor +@Getter +@Setter +public final class UnaryOperationExpression implements ExpressionSegment { + + private final int startIndex; + + private final int stopIndex; + + private final ExpressionSegment expression; + + private final String operator; + + private final String text; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/XmlElementFunctionSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/XmlElementFunctionSegment.java new file mode 100644 index 0000000000000..ae5b214af894f --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/XmlElementFunctionSegment.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.ComplexExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; + +import java.util.Collection; +import java.util.LinkedList; + +/** + * Xml element function segment. + */ +@RequiredArgsConstructor +@Getter +public class XmlElementFunctionSegment implements ComplexExpressionSegment, ProjectionSegment { + + private final int startIndex; + + private final int stopIndex; + + private final String functionName; + + private final IdentifierValue identifier; + + private final Collection xmlAttributes = new LinkedList<>(); + + private final Collection parameters = new LinkedList<>(); + + private final String text; + + @Override + public String getColumnLabel() { + return text; + } +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ColumnWithJoinOperatorSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ColumnWithJoinOperatorSegment.java index 2959c6ded319f..66e8be1f1b155 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ColumnWithJoinOperatorSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ColumnWithJoinOperatorSegment.java @@ -20,13 +20,14 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; /** * Column with join operator segment. */ @RequiredArgsConstructor @Getter -public class ColumnWithJoinOperatorSegment implements SimpleExpressionSegment { +public class ColumnWithJoinOperatorSegment implements ExpressionSegment { private final int startIndex; diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ParameterMarkerExpressionSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ParameterMarkerExpressionSegment.java index b76780a648d49..3c26c4ac82e69 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ParameterMarkerExpressionSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/expr/simple/ParameterMarkerExpressionSegment.java @@ -78,7 +78,7 @@ public int getParameterIndex() { @Override public int getStopIndex() { - return null != alias ? alias.getStopIndex() : stopIndex; + return null == alias ? stopIndex : alias.getStopIndex(); } @Override diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/item/ExpressionProjectionSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/item/ExpressionProjectionSegment.java index 8a41e1646bdfd..f031e60d12ba0 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/item/ExpressionProjectionSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/dml/item/ExpressionProjectionSegment.java @@ -76,6 +76,6 @@ public Optional getAlias() { @Override public int getStopIndex() { - return null != alias ? alias.getStopIndex() : stopIndex; + return null == alias ? stopIndex : alias.getStopIndex(); } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/GrantLevelSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/GrantLevelSegment.java index 1f5f7019232e2..4ec9101db0958 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/GrantLevelSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/GrantLevelSegment.java @@ -32,7 +32,7 @@ public final class GrantLevelSegment implements SQLSegment { private final int stopIndex; - private final String dbName; + private final String databaseName; private final String tableName; } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/PivotSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/PivotSegment.java new file mode 100644 index 0000000000000..8dcea1768a2bd --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/PivotSegment.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.generic; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; +import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; + +import java.util.Collection; +import java.util.HashSet; + +/** + * Pivot segment. + */ +@RequiredArgsConstructor +@Getter +public final class PivotSegment implements SQLSegment { + + private final int startIndex; + + private final int stopIndex; + + private final ColumnSegment pivotForColumn; + + private final Collection pivotInColumns; + + private final boolean isUnPivot; + + @Setter + private ColumnSegment unpivotColumn; + + public PivotSegment(final int startIndex, final int stopIndex, final ColumnSegment pivotForColumn, final Collection pivotInColumns) { + this.startIndex = startIndex; + this.stopIndex = stopIndex; + this.pivotForColumn = pivotForColumn; + this.pivotInColumns = pivotInColumns; + this.isUnPivot = false; + } + + /** + * Get pivot columns. + * + * @return pivot columns + */ + public Collection getPivotColumns() { + Collection result = new HashSet<>(pivotInColumns); + result.add(pivotForColumn); + if (null != unpivotColumn) { + result.add(unpivotColumn); + } + return result; + } +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/ColumnSegmentBoundedInfo.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/ColumnSegmentBoundedInfo.java new file mode 100644 index 0000000000000..b3232f8a2ec00 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/ColumnSegmentBoundedInfo.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; + +/** + * Column segment bounded info. + */ +@RequiredArgsConstructor +@Getter +public final class ColumnSegmentBoundedInfo { + + private final IdentifierValue originalDatabase; + + private final IdentifierValue originalSchema; + + private final IdentifierValue originalTable; + + private final IdentifierValue originalColumn; + + public ColumnSegmentBoundedInfo(final IdentifierValue originalColumn) { + this.originalDatabase = new IdentifierValue(""); + this.originalSchema = new IdentifierValue(""); + this.originalTable = new IdentifierValue(""); + this.originalColumn = originalColumn; + } +} diff --git a/infra/parser/src/main/java/org/apache/shardingsphere/infra/parser/ParserConfiguration.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/TableSegmentBoundedInfo.java similarity index 72% rename from infra/parser/src/main/java/org/apache/shardingsphere/infra/parser/ParserConfiguration.java rename to parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/TableSegmentBoundedInfo.java index 92ffd750955d2..0523a0c8e693b 100644 --- a/infra/parser/src/main/java/org/apache/shardingsphere/infra/parser/ParserConfiguration.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/bounded/TableSegmentBoundedInfo.java @@ -15,22 +15,20 @@ * limitations under the License. */ -package org.apache.shardingsphere.infra.parser; +package org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.sql.parser.api.CacheOption; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; /** - * Parser configuration. + * Table name bounded info. */ @RequiredArgsConstructor @Getter -public final class ParserConfiguration { +public final class TableSegmentBoundedInfo { - private final CacheOption sqlStatementCacheOption; + private final IdentifierValue originalDatabase; - private final CacheOption parseTreeCacheOption; - - private final boolean isParseComment; + private final IdentifierValue originalSchema; } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/XmlTableSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/FunctionTableSegment.java similarity index 73% rename from parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/XmlTableSegment.java rename to parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/FunctionTableSegment.java index 82a78abe28043..4611decb5ab3f 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/XmlTableSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/FunctionTableSegment.java @@ -20,40 +20,44 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; -import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlTableFunctionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Optional; /** - * XML table segment. + * Function table segment. */ @RequiredArgsConstructor @Getter @Setter -public final class XmlTableSegment implements TableSegment { +public final class FunctionTableSegment implements TableSegment { private final int startIndex; private final int stopIndex; - private final XmlTableFunctionSegment xmlTableFunction; + private final ExpressionSegment tableFunction; @Setter - private String xmlTableFunctionAlias; + private AliasSegment alias; - @Override public Optional getAliasName() { - return Optional.empty(); + return null == alias ? Optional.empty() : Optional.ofNullable(alias.getIdentifier().getValue()); } @Override public Optional getAlias() { - return Optional.empty(); + return Optional.ofNullable(alias).map(AliasSegment::getIdentifier); } - @Override - public void setAlias(final AliasSegment alias) { + /** + * Get alias segment. + * + * @return alias segment + */ + public Optional getAliasSegment() { + return Optional.ofNullable(alias); } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/JoinTableSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/JoinTableSegment.java index 2e5aa535e756b..2c381a3e96884 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/JoinTableSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/JoinTableSegment.java @@ -53,7 +53,9 @@ public final class JoinTableSegment implements TableSegment { private List using = Collections.emptyList(); - private Collection joinTableProjectionSegments = new LinkedList<>(); + private List derivedUsing = Collections.emptyList(); + + private Collection derivedJoinTableProjectionSegments = new LinkedList<>(); @Override public Optional getAliasName() { diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SimpleTableSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SimpleTableSegment.java index ff8fc744d19a7..88358eaeae038 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SimpleTableSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SimpleTableSegment.java @@ -23,6 +23,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerAvailable; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OwnerSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.PivotSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Optional; @@ -42,6 +43,9 @@ public final class SimpleTableSegment implements TableSegment, OwnerAvailable { @Setter private AliasSegment alias; + @Setter + private PivotSegment pivot; + @Override public int getStartIndex() { if (null == owner) { @@ -78,4 +82,13 @@ public Optional getAlias() { public Optional getAliasSegment() { return Optional.ofNullable(alias); } + + /** + * Get pivot segment. + * + * @return pivot segment + */ + public Optional getPivot() { + return Optional.ofNullable(pivot); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SubqueryTableSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SubqueryTableSegment.java index 310bdfd3497c5..f562992bf1bf2 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SubqueryTableSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/SubqueryTableSegment.java @@ -22,6 +22,7 @@ import lombok.Setter; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.AliasSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.PivotSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import java.util.Optional; @@ -38,6 +39,9 @@ public final class SubqueryTableSegment implements TableSegment { @Setter private AliasSegment alias; + @Setter + private PivotSegment pivot; + @Override public Optional getAliasName() { return null == alias ? Optional.empty() : Optional.ofNullable(alias.getIdentifier().getValue()); @@ -48,6 +52,15 @@ public Optional getAlias() { return Optional.ofNullable(alias).map(AliasSegment::getIdentifier); } + /** + * Get pivot segment. + * + * @return pivot segment + */ + public Optional getPivot() { + return Optional.ofNullable(pivot); + } + @Override public int getStartIndex() { return subquery.getStartIndex(); diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/TableNameSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/TableNameSegment.java index f0bf6db777fdf..4f73e26de6750 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/TableNameSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/table/TableNameSegment.java @@ -21,6 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.Setter; import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.bounded.TableSegmentBoundedInfo; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; /** @@ -37,7 +38,5 @@ public final class TableNameSegment implements SQLSegment { private final IdentifierValue identifier; - private IdentifierValue originalDatabase; - - private IdentifierValue originalSchema; + private TableSegmentBoundedInfo tableBoundedInfo; } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/AbstractSQLStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/AbstractSQLStatement.java index e92ce219d4dad..b65e567039760 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/AbstractSQLStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/AbstractSQLStatement.java @@ -37,6 +37,8 @@ public abstract class AbstractSQLStatement implements SQLStatement { private final Collection commentSegments = new LinkedList<>(); + private final Collection variableNames = new HashSet<>(); + @Override public int getParameterCount() { return uniqueParameterIndexes.size(); diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/SQLStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/SQLStatement.java index 2590c433ede72..edced0de37d6b 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/SQLStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/SQLStatement.java @@ -21,6 +21,9 @@ import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.sql.parser.api.ASTNode; +import java.util.Collection; +import java.util.Collections; + /** * SQL statement. */ @@ -41,4 +44,13 @@ public interface SQLStatement extends ASTNode { default DatabaseType getDatabaseType() { return TypedSPILoader.getService(DatabaseType.class, "SQL92"); } + + /** + * Get variable names. + * + * @return variable names + */ + default Collection getVariableNames() { + return Collections.emptyList(); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/InsertStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/InsertStatement.java index 4db2c6e26cbd3..98b3fa54ed88b 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/InsertStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/InsertStatement.java @@ -46,6 +46,8 @@ public abstract class InsertStatement extends AbstractSQLStatement implements DM private final Collection values = new LinkedList<>(); + private final Collection derivedInsertColumns = new LinkedList<>(); + /** * Get insert columns segment. * diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/MergeStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/MergeStatement.java index 5d576a7b88633..c9dd114ae7bd0 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/MergeStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/MergeStatement.java @@ -22,8 +22,6 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.AbstractSQLStatement; -import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleDeleteStatement; -import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleUpdateStatement; /** * Merge statement. @@ -38,7 +36,7 @@ public abstract class MergeStatement extends AbstractSQLStatement implements DML private ExpressionSegment expr; - private UpdateStatement update = new OracleUpdateStatement(); + private UpdateStatement update; - private DeleteStatement delete = new OracleDeleteStatement(); + private InsertStatement insert; } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/UpdateStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/UpdateStatement.java index 424fa9141e37d..633e405ec27e3 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/UpdateStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/statement/dml/UpdateStatement.java @@ -47,4 +47,13 @@ public abstract class UpdateStatement extends AbstractSQLStatement implements DM public Optional getWhere() { return Optional.ofNullable(where); } + + /** + * Get assignment segment. + * + * @return assignment segment + */ + public Optional getAssignmentSegment() { + return Optional.ofNullable(setAssignment); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractor.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractor.java index f0ddac8045c68..b4be86f36c670 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractor.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractor.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ColumnWithJoinOperatorSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.AndPredicate; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; @@ -51,6 +52,12 @@ public static Collection extract(final ExpressionSegment expressi if (((BinaryOperationExpression) expression).getRight() instanceof ColumnSegment) { result.add((ColumnSegment) ((BinaryOperationExpression) expression).getRight()); } + if (((BinaryOperationExpression) expression).getLeft() instanceof ColumnWithJoinOperatorSegment) { + result.add(((ColumnWithJoinOperatorSegment) ((BinaryOperationExpression) expression).getLeft()).getColumnName()); + } + if (((BinaryOperationExpression) expression).getRight() instanceof ColumnWithJoinOperatorSegment) { + result.add(((ColumnWithJoinOperatorSegment) ((BinaryOperationExpression) expression).getRight()).getColumnName()); + } } if (expression instanceof InExpression && ((InExpression) expression).getLeft() instanceof ColumnSegment) { result.add((ColumnSegment) ((InExpression) expression).getLeft()); diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtils.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtils.java index 1b0bb4fd69617..6d69c31ae75dc 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtils.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtils.java @@ -20,6 +20,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.enums.LogicalOperator; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; @@ -27,6 +28,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.TypeCastExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.AndPredicate; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import java.util.ArrayList; import java.util.Collection; @@ -107,18 +109,33 @@ private static void extractParameterMarkerExpressions(final List joinConditions, final Collection whereSegments) { + for (WhereSegment each : whereSegments) { + if (each.getExpr() instanceof BinaryOperationExpression && ((BinaryOperationExpression) each.getExpr()).getLeft() instanceof ColumnSegment + && ((BinaryOperationExpression) each.getExpr()).getRight() instanceof ColumnSegment) { + joinConditions.add((BinaryOperationExpression) each.getExpr()); + } + } + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/SubqueryExtractUtils.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/SubqueryExtractUtils.java index 522e5c556763f..bee7d3d0537ad 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/SubqueryExtractUtils.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/util/SubqueryExtractUtils.java @@ -27,6 +27,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ProjectionSegment; @@ -63,7 +64,7 @@ private static void extractSubquerySegments(final List result, extractSubquerySegmentsFromProjections(result, selectStatement.getProjections()); extractSubquerySegmentsFromTableSegment(result, selectStatement.getFrom()); if (selectStatement.getWhere().isPresent()) { - extractSubquerySegmentsFromExpression(result, selectStatement.getWhere().get().getExpr()); + extractSubquerySegmentsFromWhere(result, selectStatement.getWhere().get().getExpr()); } if (selectStatement.getCombine().isPresent()) { CombineSegment combineSegment = selectStatement.getCombine().get(); @@ -88,50 +89,66 @@ private static void extractSubquerySegmentsFromProjections(final List result, final TableSegment tableSegment) { - if (null == tableSegment) { - return; + if (tableSegment instanceof SubqueryTableSegment) { + extractSubquerySegmentsFromSubqueryTableSegment(result, (SubqueryTableSegment) tableSegment); } + if (tableSegment instanceof JoinTableSegment) { + extractSubquerySegmentsFromJoinTableSegment(result, ((JoinTableSegment) tableSegment).getLeft()); + extractSubquerySegmentsFromJoinTableSegment(result, ((JoinTableSegment) tableSegment).getRight()); + } + } + + private static void extractSubquerySegmentsFromJoinTableSegment(final List result, final TableSegment tableSegment) { if (tableSegment instanceof SubqueryTableSegment) { SubquerySegment subquery = ((SubqueryTableSegment) tableSegment).getSubquery(); - subquery.setSubqueryType(SubqueryType.TABLE_SUBQUERY); + subquery.setSubqueryType(SubqueryType.JOIN_SUBQUERY); result.add(subquery); extractSubquerySegments(result, subquery.getSelect()); + } else if (tableSegment instanceof JoinTableSegment) { + extractSubquerySegmentsFromJoinTableSegment(result, ((JoinTableSegment) tableSegment).getLeft()); + extractSubquerySegmentsFromJoinTableSegment(result, ((JoinTableSegment) tableSegment).getRight()); } - if (tableSegment instanceof JoinTableSegment) { - extractSubquerySegmentsFromTableSegment(result, ((JoinTableSegment) tableSegment).getLeft()); - extractSubquerySegmentsFromTableSegment(result, ((JoinTableSegment) tableSegment).getRight()); - } } - private static void extractSubquerySegmentsFromExpression(final List result, final ExpressionSegment expressionSegment) { + private static void extractSubquerySegmentsFromSubqueryTableSegment(final List result, final SubqueryTableSegment subqueryTableSegment) { + SubquerySegment subquery = subqueryTableSegment.getSubquery(); + subquery.setSubqueryType(SubqueryType.TABLE_SUBQUERY); + result.add(subquery); + extractSubquerySegments(result, subquery.getSelect()); + } + + private static void extractSubquerySegmentsFromWhere(final List result, final ExpressionSegment expressionSegment) { if (expressionSegment instanceof SubqueryExpressionSegment) { SubquerySegment subquery = ((SubqueryExpressionSegment) expressionSegment).getSubquery(); subquery.setSubqueryType(SubqueryType.PREDICATE_SUBQUERY); result.add(subquery); extractSubquerySegments(result, subquery.getSelect()); } + if (expressionSegment instanceof ExistsSubqueryExpression) { + SubquerySegment subquery = ((ExistsSubqueryExpression) expressionSegment).getSubquery(); + subquery.setSubqueryType(SubqueryType.PREDICATE_SUBQUERY); + result.add(subquery); + extractSubquerySegments(result, subquery.getSelect()); + } if (expressionSegment instanceof ListExpression) { for (ExpressionSegment each : ((ListExpression) expressionSegment).getItems()) { - extractSubquerySegmentsFromExpression(result, each); + extractSubquerySegmentsFromWhere(result, each); } } if (expressionSegment instanceof BinaryOperationExpression) { - extractSubquerySegmentsFromExpression(result, ((BinaryOperationExpression) expressionSegment).getLeft()); - extractSubquerySegmentsFromExpression(result, ((BinaryOperationExpression) expressionSegment).getRight()); + extractSubquerySegmentsFromWhere(result, ((BinaryOperationExpression) expressionSegment).getLeft()); + extractSubquerySegmentsFromWhere(result, ((BinaryOperationExpression) expressionSegment).getRight()); } if (expressionSegment instanceof InExpression) { - extractSubquerySegmentsFromExpression(result, ((InExpression) expressionSegment).getLeft()); - extractSubquerySegmentsFromExpression(result, ((InExpression) expressionSegment).getRight()); + extractSubquerySegmentsFromWhere(result, ((InExpression) expressionSegment).getLeft()); + extractSubquerySegmentsFromWhere(result, ((InExpression) expressionSegment).getRight()); } if (expressionSegment instanceof BetweenExpression) { - extractSubquerySegmentsFromExpression(result, ((BetweenExpression) expressionSegment).getBetweenExpr()); - extractSubquerySegmentsFromExpression(result, ((BetweenExpression) expressionSegment).getAndExpr()); + extractSubquerySegmentsFromWhere(result, ((BetweenExpression) expressionSegment).getBetweenExpr()); + extractSubquerySegmentsFromWhere(result, ((BetweenExpression) expressionSegment).getAndExpr()); } - if (expressionSegment instanceof ExistsSubqueryExpression) { - SubquerySegment subquery = ((ExistsSubqueryExpression) expressionSegment).getSubquery(); - subquery.setSubqueryType(SubqueryType.EXISTS_SUBQUERY); - result.add(subquery); - extractSubquerySegments(result, subquery.getSelect()); + if (expressionSegment instanceof NotExpression) { + extractSubquerySegmentsFromWhere(result, ((NotExpression) expressionSegment).getExpression()); } } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/value/literal/impl/DateTimeLiteralValue.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/value/literal/impl/DateTimeLiteralValue.java new file mode 100644 index 0000000000000..ee82f37a4e9b5 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/value/literal/impl/DateTimeLiteralValue.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.common.value.literal.impl; + +import org.apache.shardingsphere.sql.parser.sql.common.value.literal.LiteralValue; + +/** + * Date time literal value. + */ +public final class DateTimeLiteralValue implements LiteralValue { + + private final String dateTimeType; + + private final String dateTimeValue; + + private final boolean containsBrace; + + public DateTimeLiteralValue(final String dateTimeType, final String dateTimeValue, final boolean containsBrace) { + this.dateTimeType = dateTimeType; + this.dateTimeValue = containsBrace ? dateTimeValue.substring(1, dateTimeValue.length() - 1) : dateTimeValue; + this.containsBrace = containsBrace; + } + + @Override + public String getValue() { + if (containsBrace) { + return "{" + dateTimeType + " " + dateTimeValue + "}"; + } + return dateTimeType + " " + dateTimeValue; + } +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandler.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandler.java new file mode 100644 index 0000000000000..94b08a3c2919f --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandler.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyCollectionRetrievalSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterTableStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.SQLStatementHandler; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleAlterTableStatement; + +import java.util.Optional; + +/** + * Alter table statement handler for different dialect SQL statements. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class AlterTableStatementHandler implements SQLStatementHandler { + + /** + * Get modify collection retrieval segment. + * + * @param alterTableStatement alter table statement + * @return modify collection retrieval segment + */ + public static Optional getModifyCollectionRetrievalSegment(final AlterTableStatement alterTableStatement) { + if (alterTableStatement instanceof OracleAlterTableStatement) { + return ((OracleAlterTableStatement) alterTableStatement).getModifyCollectionRetrieval(); + } + return Optional.empty(); + } +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterViewStatementHandler.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterViewStatementHandler.java index 9fa9ffdcccbef..7dc0268011978 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterViewStatementHandler.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterViewStatementHandler.java @@ -19,6 +19,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.SelectStatement; @@ -27,6 +28,8 @@ import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.ddl.MySQLAlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.OpenGaussStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.ddl.OpenGaussAlterViewStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleAlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.postgresql.PostgreSQLStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.postgresql.ddl.PostgreSQLAlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.sqlserver.SQLServerStatement; @@ -87,4 +90,17 @@ public static Optional getRenameView(final AlterViewStatemen } return Optional.empty(); } + + /** + * Get constraint definition segment. + * + * @param alterViewStatement AlterViewStatement + * @return constraint definition + */ + public static Optional getConstraintDefinition(final AlterViewStatement alterViewStatement) { + if (alterViewStatement instanceof OracleStatement) { + return ((OracleAlterViewStatement) alterViewStatement).getConstraintDefinitionSegment(); + } + return Optional.empty(); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/DeleteStatementHandler.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/DeleteStatementHandler.java index c590602a7e53e..2ff9562b31567 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/DeleteStatementHandler.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/DeleteStatementHandler.java @@ -89,4 +89,52 @@ public static Optional getWithSegment(final DeleteStatement deleteS } return Optional.empty(); } + + /** + * Set order by segment. + * + * @param deleteStatement delete statement + * @param orderBySegment order by segment + */ + public static void setOrderBySegment(final DeleteStatement deleteStatement, final OrderBySegment orderBySegment) { + if (deleteStatement instanceof MySQLStatement) { + ((MySQLDeleteStatement) deleteStatement).setOrderBy(orderBySegment); + } + } + + /** + * Set limit segment. + * + * @param deleteStatement delete statement + * @param limitSegment limit segment + */ + public static void setLimitSegment(final DeleteStatement deleteStatement, final LimitSegment limitSegment) { + if (deleteStatement instanceof MySQLStatement) { + ((MySQLDeleteStatement) deleteStatement).setLimit(limitSegment); + } + } + + /** + * Set output segment. + * + * @param deleteStatement delete statement + * @param outputSegment output segment + */ + public static void setOutputSegment(final DeleteStatement deleteStatement, final OutputSegment outputSegment) { + if (deleteStatement instanceof SQLServerStatement) { + ((SQLServerDeleteStatement) deleteStatement).setOutputSegment(outputSegment); + } + } + + /** + * Set with segment. + * + * @param deleteStatement delete statement + * @param withSegment with segment + */ + public static void setWithSegment(final DeleteStatement deleteStatement, final WithSegment withSegment) { + if (deleteStatement instanceof SQLServerStatement) { + ((SQLServerDeleteStatement) deleteStatement).setWithSegment(withSegment); + } + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandler.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandler.java index 4773924c474ac..38f054b30b9ca 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandler.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandler.java @@ -22,11 +22,14 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.ReturningSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.OnDuplicateKeyColumnsSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OutputSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WithSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.SQLStatementHandler; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertType; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLInsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.dml.OpenGaussInsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleInsertStatement; @@ -163,27 +166,77 @@ public static void setOutputSegment(final InsertStatement insertStatement, final } /** - * Get insert multi table element segment. + * Get multi table insert type. * * @param insertStatement insert statement - * @return insert multi table element segment + * @return multi table insert type */ - public static Optional getInsertMultiTableElementSegment(final InsertStatement insertStatement) { + public static Optional getMultiTableInsertType(final InsertStatement insertStatement) { if (insertStatement instanceof OracleInsertStatement) { - return ((OracleInsertStatement) insertStatement).getInsertMultiTableElementSegment(); + return ((OracleInsertStatement) insertStatement).getMultiTableInsertType(); } return Optional.empty(); } /** - * Set insert multi table element segment. + * Set multi table insert type. + * + * @param insertStatement insert into statement + * @param multiTableInsertType multi table insert type + */ + public static void setMultiTableInsertType(final InsertStatement insertStatement, final MultiTableInsertType multiTableInsertType) { + if (insertStatement instanceof OracleInsertStatement) { + ((OracleInsertStatement) insertStatement).setMultiTableInsertType(multiTableInsertType); + } + } + + /** + * Get multi table insert into segment. + * + * @param insertStatement insert statement + * @return multi table insert into segment + */ + public static Optional getMultiTableInsertIntoSegment(final InsertStatement insertStatement) { + if (insertStatement instanceof OracleInsertStatement) { + return ((OracleInsertStatement) insertStatement).getMultiTableInsertIntoSegment(); + } + return Optional.empty(); + } + + /** + * Set multi table insert into segment. * + * @param insertStatement insert into statement + * @param multiTableInsertIntoSegment multi table insert into segment + */ + public static void setMultiTableInsertIntoSegment(final InsertStatement insertStatement, final MultiTableInsertIntoSegment multiTableInsertIntoSegment) { + if (insertStatement instanceof OracleInsertStatement) { + ((OracleInsertStatement) insertStatement).setMultiTableInsertIntoSegment(multiTableInsertIntoSegment); + } + } + + /** + * Get multi table conditional into segment. + * * @param insertStatement insert statement - * @param insertMultiTableElementSegment insert multi table element segment + * @return multi table conditional into segment + */ + public static Optional getMultiTableConditionalIntoSegment(final InsertStatement insertStatement) { + if (insertStatement instanceof OracleInsertStatement) { + return ((OracleInsertStatement) insertStatement).getMultiTableConditionalIntoSegment(); + } + return Optional.empty(); + } + + /** + * Set multi table conditional into segment. + * + * @param insertStatement insert into statement + * @param multiTableConditionalIntoSegment multi table conditional into segment */ - public static void setInsertMultiTableElementSegment(final InsertStatement insertStatement, final InsertMultiTableElementSegment insertMultiTableElementSegment) { + public static void setMultiTableConditionalIntoSegment(final InsertStatement insertStatement, final MultiTableConditionalIntoSegment multiTableConditionalIntoSegment) { if (insertStatement instanceof OracleInsertStatement) { - ((OracleInsertStatement) insertStatement).setInsertMultiTableElementSegment(insertMultiTableElementSegment); + ((OracleInsertStatement) insertStatement).setMultiTableConditionalIntoSegment(multiTableConditionalIntoSegment); } } @@ -217,4 +270,29 @@ public static void setReturningSegment(final InsertStatement insertStatement, fi ((OpenGaussInsertStatement) insertStatement).setReturningSegment(returningSegment); } } + + /** + * Get where segment. + * + * @param insertStatement insert statement + * @return where segment + */ + public static Optional getWhereSegment(final InsertStatement insertStatement) { + if (insertStatement instanceof OracleInsertStatement) { + return ((OracleInsertStatement) insertStatement).getWhere(); + } + return Optional.empty(); + } + + /** + * Set where segment. + * + * @param insertStatement insert statement + * @param whereSegment where segment + */ + public static void setWhereSegment(final InsertStatement insertStatement, final WhereSegment whereSegment) { + if (insertStatement instanceof OracleInsertStatement) { + ((OracleInsertStatement) insertStatement).setWhere(whereSegment); + } + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/UpdateStatementHandler.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/UpdateStatementHandler.java index 9918b671b0db8..3eeea17682a86 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/UpdateStatementHandler.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/UpdateStatementHandler.java @@ -21,10 +21,13 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.order.OrderBySegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.pagination.limit.LimitSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WithSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.SQLStatementHandler; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.MySQLStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLUpdateStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.sqlserver.SQLServerStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.sqlserver.dml.SQLServerUpdateStatement; import java.util.Optional; @@ -59,4 +62,53 @@ public static Optional getLimitSegment(final UpdateStatement updat } return Optional.empty(); } + + /** + * Get with segment. + * + * @param updateStatement update statement + * @return with segment + */ + public static Optional getWithSegment(final UpdateStatement updateStatement) { + if (updateStatement instanceof SQLServerStatement) { + return ((SQLServerUpdateStatement) updateStatement).getWithSegment(); + } + return Optional.empty(); + } + + /** + * Set order by segment. + * + * @param updateStatement update statement + * @param orderBySegment order by segment + */ + public static void setOrderBySegment(final UpdateStatement updateStatement, final OrderBySegment orderBySegment) { + if (updateStatement instanceof MySQLStatement) { + ((MySQLUpdateStatement) updateStatement).setOrderBy(orderBySegment); + } + } + + /** + * Set limit segment. + * + * @param updateStatement update statement + * @param limitSegment limit segment + */ + public static void setLimitSegment(final UpdateStatement updateStatement, final LimitSegment limitSegment) { + if (updateStatement instanceof MySQLStatement) { + ((MySQLUpdateStatement) updateStatement).setLimit(limitSegment); + } + } + + /** + * Set with segment. + * + * @param updateStatement update statement + * @param withSegment with segment + */ + public static void setWithSegment(final UpdateStatement updateStatement, final WithSegment withSegment) { + if (updateStatement instanceof SQLServerStatement) { + ((SQLServerUpdateStatement) updateStatement).setWithSegment(withSegment); + } + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoElseSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoElseSegment.java new file mode 100644 index 0000000000000..b915e47705976 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoElseSegment.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; + +import java.util.Collection; + +/** + * Multi table conditional into else segment. + */ +@RequiredArgsConstructor +@Getter +public final class MultiTableConditionalIntoElseSegment implements SQLSegment { + + private final int startIndex; + + private final int stopIndex; + + private final Collection insertStatements; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoSegment.java new file mode 100644 index 0000000000000..23b7ef19c8a1c --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoSegment.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.Setter; +import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.Optional; + +/** + * Multi table conditional into segment. + */ +@RequiredArgsConstructor +@Getter +public final class MultiTableConditionalIntoSegment implements SQLSegment { + + private final int startIndex; + + private final int stopIndex; + + private final Collection whenThenSegments = new LinkedList<>(); + + @Setter + private MultiTableConditionalIntoElseSegment elseSegment; + + /** + * Get else segment. + * + * @return else segment + */ + public Optional getElseSegment() { + return Optional.ofNullable(elseSegment); + } +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoThenSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoThenSegment.java new file mode 100644 index 0000000000000..f324f66e0a5ce --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoThenSegment.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; + +import java.util.Collection; + +/** + * Multi table conditional into then segment. + */ +@RequiredArgsConstructor +@Getter +public final class MultiTableConditionalIntoThenSegment implements SQLSegment { + + private final int startIndex; + + private final int stopIndex; + + private final Collection insertStatements; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoWhenThenSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoWhenThenSegment.java new file mode 100644 index 0000000000000..58c573aa1896e --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableConditionalIntoWhenThenSegment.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.SQLSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; + +/** + * Multi table conditional into when then segment. + */ +@RequiredArgsConstructor +@Getter +public final class MultiTableConditionalIntoWhenThenSegment implements SQLSegment { + + private final int startIndex; + + private final int stopIndex; + + private final ExpressionSegment whenSegment; + + private final MultiTableConditionalIntoThenSegment thenSegment; +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/InsertMultiTableElementSegment.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertIntoSegment.java similarity index 87% rename from parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/InsertMultiTableElementSegment.java rename to parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertIntoSegment.java index 741444807a959..5cd63f71a05d2 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/common/segment/generic/InsertMultiTableElementSegment.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertIntoSegment.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.shardingsphere.sql.parser.sql.common.segment.generic; +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -26,11 +26,11 @@ import java.util.LinkedList; /** - * Insert multi table element segment. + * Multi table insert into segment. */ @RequiredArgsConstructor @Getter -public final class InsertMultiTableElementSegment implements SQLSegment { +public final class MultiTableInsertIntoSegment implements SQLSegment { private final int startIndex; diff --git a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/RegistryCenterType.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertType.java similarity index 83% rename from examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/RegistryCenterType.java rename to parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertType.java index e6f1ee260538c..edc20f4947f9a 100644 --- a/examples/example-core/config-utility/src/main/java/org/apache/shardingsphere/example/type/RegistryCenterType.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/segment/oracle/insert/MultiTableInsertType.java @@ -15,9 +15,12 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.type; +package org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert; -public enum RegistryCenterType { +/** + * Multi table insert type. + */ +public enum MultiTableInsertType { - ZOOKEEPER, NACOS + ALL, FIRST } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterTableStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterTableStatement.java index 478cceb09f645..341a8465934d0 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterTableStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterTableStatement.java @@ -17,11 +17,29 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl; +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyCollectionRetrievalSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterTableStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; +import java.util.Optional; + /** * Oracle alter table statement. */ +@Getter +@Setter public final class OracleAlterTableStatement extends AlterTableStatement implements OracleStatement { + + private ModifyCollectionRetrievalSegment modifyCollectionRetrieval; + + /** + * Get modify collection retrieval segment. + * + * @return modify collection retrieval + */ + public Optional getModifyCollectionRetrieval() { + return Optional.ofNullable(modifyCollectionRetrieval); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterViewStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterViewStatement.java index f7e547de99b12..ca4f3677f7ee4 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterViewStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleAlterViewStatement.java @@ -17,11 +17,29 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl; +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; +import java.util.Optional; + /** * Oracle alter view statement. */ +@Getter +@Setter public final class OracleAlterViewStatement extends AlterViewStatement implements OracleStatement { + + private ConstraintDefinitionSegment constraintDefinitionSegment; + + /** + * Get constraint definition segment. + * + * @return constraint definition + */ + public Optional getConstraintDefinitionSegment() { + return Optional.ofNullable(constraintDefinitionSegment); + } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleCreateViewStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleCreateViewStatement.java new file mode 100644 index 0000000000000..603aaa554ba45 --- /dev/null +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/ddl/OracleCreateViewStatement.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl; + +import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateViewStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; + +/** + * Oracle create view statement. + */ +public final class OracleCreateViewStatement extends CreateViewStatement implements OracleStatement { +} diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleInsertStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleInsertStatement.java index 8098debf1d22a..8c21e4b5f009c 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleInsertStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleInsertStatement.java @@ -17,9 +17,13 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml; +import lombok.Getter; import lombok.Setter; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertType; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; import java.util.Optional; @@ -27,17 +31,51 @@ /** * Oracle insert statement. */ +@Getter @Setter public final class OracleInsertStatement extends InsertStatement implements OracleStatement { - private InsertMultiTableElementSegment insertMultiTableElementSegment; + private MultiTableInsertType multiTableInsertType; + + private MultiTableInsertIntoSegment multiTableInsertIntoSegment; + + private MultiTableConditionalIntoSegment multiTableConditionalIntoSegment; + + private WhereSegment where; + + /** + * Get multi table insert type. + * + * @return multi table insert type + */ + public Optional getMultiTableInsertType() { + return Optional.ofNullable(multiTableInsertType); + } + + /** + * Get multi table insert into segment. + * + * @return multi table insert into segment + */ + public Optional getMultiTableInsertIntoSegment() { + return Optional.ofNullable(multiTableInsertIntoSegment); + } + + /** + * Get multi table conditional into segment. + * + * @return multi table conditional into segment + */ + public Optional getMultiTableConditionalIntoSegment() { + return Optional.ofNullable(multiTableConditionalIntoSegment); + } /** - * Get insert multi table element segment. + * Get where segment. * - * @return insert select segment + * @return where segment */ - public Optional getInsertMultiTableElementSegment() { - return Optional.ofNullable(insertMultiTableElementSegment); + public Optional getWhere() { + return Optional.ofNullable(where); } } diff --git a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleUpdateStatement.java b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleUpdateStatement.java index 8375d7d1282f2..b489f6829bb49 100644 --- a/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleUpdateStatement.java +++ b/parser/sql/statement/src/main/java/org/apache/shardingsphere/sql/parser/sql/dialect/statement/oracle/dml/OracleUpdateStatement.java @@ -17,11 +17,18 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml; +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.UpdateStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.OracleStatement; /** * Oracle update statement. */ +@Getter +@Setter public final class OracleUpdateStatement extends UpdateStatement implements OracleStatement { + + private WhereSegment deleteWhere; } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/extractor/TableExtractorTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/extractor/TableExtractorTest.java index 63180cb7c7c4e..97888a11ee6e0 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/extractor/TableExtractorTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/extractor/TableExtractorTest.java @@ -92,14 +92,14 @@ void assertExtractTablesFromSelectLockWithValue() { @Test void assertExtractTablesFromInsert() { - MySQLInsertStatement mySQLInsertStatement = new MySQLInsertStatement(); - mySQLInsertStatement.setTable(new SimpleTableSegment(new TableNameSegment(122, 128, new IdentifierValue("t_order")))); + MySQLInsertStatement insertStatement = new MySQLInsertStatement(); + insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(122, 128, new IdentifierValue("t_order")))); Collection assignmentSegments = new LinkedList<>(); ColumnSegment columnSegment = new ColumnSegment(133, 136, new IdentifierValue("id")); columnSegment.setOwner(new OwnerSegment(130, 132, new IdentifierValue("t_order"))); assignmentSegments.add(new ColumnAssignmentSegment(130, 140, Collections.singletonList(columnSegment), new LiteralExpressionSegment(141, 142, 1))); - mySQLInsertStatement.setOnDuplicateKeyColumns(new OnDuplicateKeyColumnsSegment(130, 140, assignmentSegments)); - tableExtractor.extractTablesFromInsert(mySQLInsertStatement); + insertStatement.setOnDuplicateKeyColumns(new OnDuplicateKeyColumnsSegment(130, 140, assignmentSegments)); + tableExtractor.extractTablesFromInsert(insertStatement); assertThat(tableExtractor.getRewriteTables().size(), is(2)); Iterator tableSegmentIterator = tableExtractor.getRewriteTables().iterator(); assertTableSegment(tableSegmentIterator.next(), 122, 128, "t_order"); diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractorTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractorTest.java index 164a05ffa1001..0ca35e57dfa9c 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractorTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ColumnExtractorTest.java @@ -51,6 +51,6 @@ private Collection createWhereSegments() { new ColumnSegment(10, 13, new IdentifierValue("name")), new LiteralExpressionSegment(18, 22, "LiLei"), "=", "name = 'LiLei'"); BinaryOperationExpression rightExpression = new BinaryOperationExpression(30, 44, new ColumnSegment(30, 32, new IdentifierValue("pwd")), new LiteralExpressionSegment(40, 45, "123456"), "=", "pwd = '123456'"); - return Collections.singletonList(new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, leftExpression, rightExpression, "AND", "name = 'LiLei' AND pwd = '123456'"))); + return Collections.singleton(new WhereSegment(0, 0, new BinaryOperationExpression(0, 0, leftExpression, rightExpression, "AND", "name = 'LiLei' AND pwd = '123456'"))); } } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtilsTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtilsTest.java index d89cca0e5bebb..173db7f122035 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtilsTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/ExpressionExtractUtilsTest.java @@ -29,12 +29,14 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.AndPredicate; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.predicate.WhereSegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.junit.jupiter.api.Test; import java.util.Collection; import java.util.Collections; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import static org.hamcrest.CoreMatchers.is; @@ -119,14 +121,14 @@ void assertExtractGetParameterMarkerExpressions() { functionSegment.getParameters().add(param1); functionSegment.getParameters().add(param2); functionSegment.getParameters().add(param3); - List result = ExpressionExtractUtils.getParameterMarkerExpressions(Collections.singletonList(functionSegment)); + List result = ExpressionExtractUtils.getParameterMarkerExpressions(Collections.singleton(functionSegment)); assertThat(result.size(), is(1)); } @Test void assertGetParameterMarkerExpressionsFromTypeCastExpression() { ParameterMarkerExpressionSegment expected = new ParameterMarkerExpressionSegment(0, 0, 1, ParameterMarkerType.DOLLAR); - List input = Collections.singletonList(new TypeCastExpression(0, 0, "$2::varchar", expected, "varchar")); + Collection input = Collections.singleton(new TypeCastExpression(0, 0, "$2::varchar", expected, "varchar")); List actual = ExpressionExtractUtils.getParameterMarkerExpressions(input); assertThat(actual.size(), is(1)); assertThat(actual.get(0), is(expected)); @@ -137,8 +139,18 @@ void assertGetParameterMarkerExpressionsFromInExpression() { ListExpression listExpression = new ListExpression(0, 0); listExpression.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 1, ParameterMarkerType.QUESTION)); listExpression.getItems().add(new ParameterMarkerExpressionSegment(0, 0, 2, ParameterMarkerType.QUESTION)); - List inExpressions = Collections.singletonList(new InExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("order_id")), listExpression, false)); + Collection inExpressions = Collections.singleton(new InExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("order_id")), listExpression, false)); List actual = ExpressionExtractUtils.getParameterMarkerExpressions(inExpressions); assertThat(actual.size(), is(2)); } + + @Test + void assertExtractJoinConditions() { + Collection actual = new LinkedList<>(); + BinaryOperationExpression binaryExpression = + new BinaryOperationExpression(0, 0, new ColumnSegment(0, 0, new IdentifierValue("order_id")), new ColumnSegment(0, 0, new IdentifierValue("order_id")), "=", ""); + ExpressionExtractUtils.extractJoinConditions(actual, Collections.singleton(new WhereSegment(0, 0, binaryExpression))); + assertThat(actual.size(), is(1)); + assertThat(actual.iterator().next(), is(binaryExpression)); + } } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/WhereExtractUtilsTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/WhereExtractUtilsTest.java index c0dbc3f25fda4..1b33759e24d1c 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/WhereExtractUtilsTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/WhereExtractUtilsTest.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.sql.parser.sql.common.util; +import com.google.common.base.Preconditions; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; @@ -72,6 +73,7 @@ void assertGetSubqueryWhereSegmentsFromSubqueryTableSegment() { selectStatement.setProjections(projections); Collection subqueryWhereSegments = WhereExtractUtils.getSubqueryWhereSegments(selectStatement); WhereSegment actual = subqueryWhereSegments.iterator().next(); + Preconditions.checkState(subQuerySelectStatement.getWhere().isPresent()); assertThat(actual.getExpr(), is(subQuerySelectStatement.getWhere().get().getExpr())); } @@ -85,9 +87,9 @@ void assertGetWhereSegmentsFromSubQueryJoin() { new ColumnSegment(75, 83, new IdentifierValue("order_id")), "=", "oi.order_id = o.order_id")); MySQLSelectStatement subQuerySelectStatement = new MySQLSelectStatement(); subQuerySelectStatement.setFrom(joinTableSegment); - MySQLSelectStatement mySQLSelectStatement = new MySQLSelectStatement(); - mySQLSelectStatement.setFrom(new SubqueryTableSegment(new SubquerySegment(20, 84, subQuerySelectStatement))); - Collection subqueryWhereSegments = WhereExtractUtils.getSubqueryWhereSegments(mySQLSelectStatement); + MySQLSelectStatement selectStatement = new MySQLSelectStatement(); + selectStatement.setFrom(new SubqueryTableSegment(new SubquerySegment(20, 84, subQuerySelectStatement))); + Collection subqueryWhereSegments = WhereExtractUtils.getSubqueryWhereSegments(selectStatement); WhereSegment actual = subqueryWhereSegments.iterator().next(); assertThat(actual.getExpr(), is(((JoinTableSegment) subQuerySelectStatement.getFrom()).getCondition())); } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dal/FlushStatementHandlerTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dal/FlushStatementHandlerTest.java index ee46391a4865a..fff672c40e6d2 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dal/FlushStatementHandlerTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dal/FlushStatementHandlerTest.java @@ -31,10 +31,10 @@ class FlushStatementHandlerTest { @Test void assertGetSimpleTableSegmentForMySQL() { - MySQLFlushStatement mySQLFlushStatement = new MySQLFlushStatement(); - assertTrue(FlushStatementHandler.getSimpleTableSegment(mySQLFlushStatement).isEmpty()); - mySQLFlushStatement.getTables().add(new SimpleTableSegment(new TableNameSegment(0, 2, new IdentifierValue("foo_table")))); - assertThat(FlushStatementHandler.getSimpleTableSegment(mySQLFlushStatement), is(mySQLFlushStatement.getTables())); + MySQLFlushStatement flushStatement = new MySQLFlushStatement(); + assertTrue(FlushStatementHandler.getSimpleTableSegment(flushStatement).isEmpty()); + flushStatement.getTables().add(new SimpleTableSegment(new TableNameSegment(0, 2, new IdentifierValue("foo_table")))); + assertThat(FlushStatementHandler.getSimpleTableSegment(flushStatement), is(flushStatement.getTables())); } @Test diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandlerTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandlerTest.java new file mode 100644 index 0000000000000..ea2bc6b31ca4d --- /dev/null +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/AlterTableStatementHandlerTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl; + +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyCollectionRetrievalSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleAlterTableStatement; +import org.junit.jupiter.api.Test; + +import java.util.Optional; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +class AlterTableStatementHandlerTest { + + @Test + void assertGetModifyCollectionRetrievalSegmentForOracle() { + OracleAlterTableStatement alterTableStatement = new OracleAlterTableStatement(); + alterTableStatement.setModifyCollectionRetrieval(new ModifyCollectionRetrievalSegment(0, 0, new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("test"))))); + Optional modifyCollectionRetrievalSegment = AlterTableStatementHandler.getModifyCollectionRetrievalSegment(alterTableStatement); + assertTrue(modifyCollectionRetrievalSegment.isPresent()); + } +} diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/CreateIndexStatementHandlerTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/CreateIndexStatementHandlerTest.java index 6a32e8c6b9221..e3deb18ca623a 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/CreateIndexStatementHandlerTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/ddl/CreateIndexStatementHandlerTest.java @@ -17,12 +17,6 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl; -import static org.hamcrest.Matchers.is; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.Optional; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.ddl.MySQLCreateIndexStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.opengauss.ddl.OpenGaussCreateIndexStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.ddl.OracleCreateIndexStatement; @@ -30,6 +24,13 @@ import org.apache.shardingsphere.sql.parser.sql.dialect.statement.sqlserver.ddl.SQLServerCreateIndexStatement; import org.junit.jupiter.api.Test; +import java.util.Optional; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + class CreateIndexStatementHandlerTest { @Test @@ -43,18 +44,18 @@ void assertIfNotExists() { @Test void assertGeneratedIndexStartIndexForPostgreSQL() { - PostgreSQLCreateIndexStatement postgreSQLCreateIndexStatement = new PostgreSQLCreateIndexStatement(true); - postgreSQLCreateIndexStatement.setGeneratedIndexStartIndex(2); - Optional actual = CreateIndexStatementHandler.getGeneratedIndexStartIndex(postgreSQLCreateIndexStatement); + PostgreSQLCreateIndexStatement createIndexStatement = new PostgreSQLCreateIndexStatement(true); + createIndexStatement.setGeneratedIndexStartIndex(2); + Optional actual = CreateIndexStatementHandler.getGeneratedIndexStartIndex(createIndexStatement); assertTrue(actual.isPresent()); assertThat(actual.get(), is(2)); } @Test void assertGeneratedIndexStartIndexForOpenGauss() { - OpenGaussCreateIndexStatement openGaussCreateIndexStatement = new OpenGaussCreateIndexStatement(true); - openGaussCreateIndexStatement.setGeneratedIndexStartIndex(2); - Optional actual = CreateIndexStatementHandler.getGeneratedIndexStartIndex(openGaussCreateIndexStatement); + OpenGaussCreateIndexStatement createIndexStatement = new OpenGaussCreateIndexStatement(true); + createIndexStatement.setGeneratedIndexStartIndex(2); + Optional actual = CreateIndexStatementHandler.getGeneratedIndexStartIndex(createIndexStatement); assertTrue(actual.isPresent()); assertThat(actual.get(), is(2)); } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/CopyStatementHandlerTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/CopyStatementHandlerTest.java index 0e1437770de03..0679aae06c47b 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/CopyStatementHandlerTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/CopyStatementHandlerTest.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml; +import com.google.common.base.Preconditions; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.prepare.PrepareStatementQuerySegment; import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; @@ -36,33 +37,34 @@ class CopyStatementHandlerTest { @Test void assertGetPrepareStatementQuerySegmentWithSegmentForPostgreSQL() { - PostgreSQLCopyStatement postgreSQLCopyStatement = new PostgreSQLCopyStatement(); - postgreSQLCopyStatement.setPrepareStatementQuerySegment(new PrepareStatementQuerySegment(0, 2)); - Optional actual = CopyStatementHandler.getPrepareStatementQuerySegment(postgreSQLCopyStatement); + PostgreSQLCopyStatement copyStatement = new PostgreSQLCopyStatement(); + copyStatement.setPrepareStatementQuerySegment(new PrepareStatementQuerySegment(0, 2)); + Optional actual = CopyStatementHandler.getPrepareStatementQuerySegment(copyStatement); assertTrue(actual.isPresent()); - assertThat(actual.get(), is(postgreSQLCopyStatement.getPrepareStatementQuerySegment().get())); + Preconditions.checkState(copyStatement.getPrepareStatementQuerySegment().isPresent()); + assertThat(actual.get(), is(copyStatement.getPrepareStatementQuerySegment().get())); } @Test void assertGetPrepareStatementQuerySegmentWithoutSegmentForPostgreSQL() { - PostgreSQLCopyStatement postgreSQLCopyStatement = new PostgreSQLCopyStatement(); - Optional actual = CopyStatementHandler.getPrepareStatementQuerySegment(postgreSQLCopyStatement); + PostgreSQLCopyStatement copyStatement = new PostgreSQLCopyStatement(); + Optional actual = CopyStatementHandler.getPrepareStatementQuerySegment(copyStatement); assertFalse(actual.isPresent()); } @Test void assertGetColumnsWithSegmentForPostgreSQL() { - PostgreSQLCopyStatement postgreSQLCopyStatement = new PostgreSQLCopyStatement(); - postgreSQLCopyStatement.getColumns().add(new ColumnSegment(0, 2, new IdentifierValue("identifier"))); - Collection actual = CopyStatementHandler.getColumns(postgreSQLCopyStatement); + PostgreSQLCopyStatement copyStatement = new PostgreSQLCopyStatement(); + copyStatement.getColumns().add(new ColumnSegment(0, 2, new IdentifierValue("identifier"))); + Collection actual = CopyStatementHandler.getColumns(copyStatement); assertFalse(actual.isEmpty()); - assertThat(actual, is(postgreSQLCopyStatement.getColumns())); + assertThat(actual, is(copyStatement.getColumns())); } @Test void assertGetColumnsWithoutSegmentForPostgreSQL() { - PostgreSQLCopyStatement postgreSQLCopyStatement = new PostgreSQLCopyStatement(); - Collection actual = CopyStatementHandler.getColumns(postgreSQLCopyStatement); + PostgreSQLCopyStatement copyStatement = new PostgreSQLCopyStatement(); + Collection actual = CopyStatementHandler.getColumns(copyStatement); assertTrue(actual.isEmpty()); } diff --git a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandlerTest.java b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandlerTest.java index c105d339bd6b5..83b9afc49ca51 100644 --- a/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandlerTest.java +++ b/parser/sql/statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/dialect/handler/dml/InsertStatementHandlerTest.java @@ -19,7 +19,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.OnDuplicateKeyColumnsSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OutputSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WithSegment; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.mysql.dml.MySQLInsertStatement; @@ -155,19 +155,19 @@ void assertGetOutputSegmentForOtherDatabases() { @Test void assertGetInsertMultiTableElementSegmentForOracle() { OracleInsertStatement insertStatement = new OracleInsertStatement(); - insertStatement.setInsertMultiTableElementSegment(new InsertMultiTableElementSegment(0, 0)); - Optional insertMultiTableElementSegment = InsertStatementHandler.getInsertMultiTableElementSegment(insertStatement); + insertStatement.setMultiTableInsertIntoSegment(new MultiTableInsertIntoSegment(0, 0)); + Optional insertMultiTableElementSegment = InsertStatementHandler.getMultiTableInsertIntoSegment(insertStatement); assertTrue(insertMultiTableElementSegment.isPresent()); - assertThat(insertMultiTableElementSegment.get(), is(insertStatement.getInsertMultiTableElementSegment().get())); - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new SQLServerInsertStatement()).isPresent()); + assertThat(insertMultiTableElementSegment.get(), is(insertStatement.getMultiTableInsertIntoSegment().get())); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new SQLServerInsertStatement()).isPresent()); } @Test void assertGetInsertMultiTableElementSegmentForOtherDatabases() { - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new MySQLInsertStatement()).isPresent()); - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new OpenGaussInsertStatement()).isPresent()); - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new PostgreSQLInsertStatement()).isPresent()); - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new SQL92InsertStatement()).isPresent()); - assertFalse(InsertStatementHandler.getInsertMultiTableElementSegment(new SQLServerInsertStatement()).isPresent()); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new MySQLInsertStatement()).isPresent()); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new OpenGaussInsertStatement()).isPresent()); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new PostgreSQLInsertStatement()).isPresent()); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new SQL92InsertStatement()).isPresent()); + assertFalse(InsertStatementHandler.getMultiTableInsertIntoSegment(new SQLServerInsertStatement()).isPresent()); } } diff --git a/pom.xml b/pom.xml index 88edbae2a85dc..aa743a7b073de 100644 --- a/pom.xml +++ b/pom.xml @@ -84,7 +84,7 @@ 1.2.0 21.2.0 - 1.32.0 + 1.35.0 2.9.3 4.1.90.Final 1.70 @@ -94,6 +94,7 @@ 3.8.1 5.4.0 0.7.5 + 1.51.0 1.9.0 3.0.3 @@ -113,7 +114,6 @@ 4.0.3 2.9.0 - 1.2.18 0.9.5.5 5.9.2 @@ -128,7 +128,7 @@ 1.4.13 4.0.0 1.6.2 - 0.9.22 + 0.9.24 3.2.1 @@ -404,8 +404,17 @@ io.netty netty-transport-native-unix-common + + io.grpc + * + + + io.grpc + grpc-all + ${grpc.version} + com.ctrip.framework.apollo apollo-client @@ -530,12 +539,6 @@ ${commons-dbcp2.version} test - - com.alibaba - druid - ${druid.version} - test - com.mchange c3p0 @@ -1206,7 +1209,7 @@ generateStandardMetadata - 22.3.1 + 23.0.1 true true true diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/ProxyConfigurationLoader.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/ProxyConfigurationLoader.java index 8af3fbe6843ec..8c58778092852 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/ProxyConfigurationLoader.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/ProxyConfigurationLoader.java @@ -82,33 +82,33 @@ private static YamlProxyServerConfiguration loadServerConfiguration(final File y return null == result ? new YamlProxyServerConfiguration() : rebuildGlobalRuleConfiguration(result); } - private static YamlProxyServerConfiguration rebuildGlobalRuleConfiguration(final YamlProxyServerConfiguration serverConfiguration) { - serverConfiguration.getRules().removeIf(YamlGlobalRuleConfiguration.class::isInstance); - if (null != serverConfiguration.getAuthority()) { - serverConfiguration.getRules().add(serverConfiguration.getAuthority()); + private static YamlProxyServerConfiguration rebuildGlobalRuleConfiguration(final YamlProxyServerConfiguration serverConfig) { + serverConfig.getRules().removeIf(YamlGlobalRuleConfiguration.class::isInstance); + if (null != serverConfig.getAuthority()) { + serverConfig.getRules().add(serverConfig.getAuthority()); } - if (null != serverConfiguration.getTransaction()) { - serverConfiguration.getRules().add(serverConfiguration.getTransaction()); + if (null != serverConfig.getTransaction()) { + serverConfig.getRules().add(serverConfig.getTransaction()); } - if (null != serverConfiguration.getGlobalClock()) { - serverConfiguration.getRules().add(serverConfiguration.getGlobalClock()); + if (null != serverConfig.getGlobalClock()) { + serverConfig.getRules().add(serverConfig.getGlobalClock()); } - if (null != serverConfiguration.getSqlParser()) { - serverConfiguration.getRules().add(serverConfiguration.getSqlParser()); + if (null != serverConfig.getSqlParser()) { + serverConfig.getRules().add(serverConfig.getSqlParser()); } - if (null != serverConfiguration.getSqlTranslator()) { - serverConfiguration.getRules().add(serverConfiguration.getSqlTranslator()); + if (null != serverConfig.getSqlTranslator()) { + serverConfig.getRules().add(serverConfig.getSqlTranslator()); } - if (null != serverConfiguration.getTraffic()) { - serverConfiguration.getRules().add(serverConfiguration.getTraffic()); + if (null != serverConfig.getTraffic()) { + serverConfig.getRules().add(serverConfig.getTraffic()); } - if (null != serverConfiguration.getLogging()) { - serverConfiguration.getRules().add(serverConfiguration.getLogging()); + if (null != serverConfig.getLogging()) { + serverConfig.getRules().add(serverConfig.getLogging()); } - if (null != serverConfiguration.getSqlFederation()) { - serverConfiguration.getRules().add(serverConfiguration.getSqlFederation()); + if (null != serverConfig.getSqlFederation()) { + serverConfig.getRules().add(serverConfig.getSqlFederation()); } - return serverConfiguration; + return serverConfig; } private static Collection loadDatabaseConfigurations(final File configPath) throws IOException { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java index 6bdbf634ed690..6db4eab90dc7e 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapper.java @@ -20,9 +20,10 @@ import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.database.impl.DataSourceGeneratedDatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapperEngine; import org.apache.shardingsphere.proxy.backend.config.ProxyConfiguration; import org.apache.shardingsphere.proxy.backend.config.ProxyGlobalConfiguration; @@ -36,6 +37,7 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.stream.Collectors; /** * YAML proxy configuration swapper. @@ -66,7 +68,9 @@ private ProxyGlobalConfiguration swapGlobalConfiguration(final YamlProxyServerCo private Map swapDataSources(final Map yamlDataSourceConfigs) { Map dataSourceConfigs = swapDataSourceConfigurations(yamlDataSourceConfigs); - return DataSourcePoolCreator.create(DataSourcePropertiesCreator.createFromConfiguration(dataSourceConfigs)); + Map propsMap = dataSourceConfigs.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + return DataSourcePoolCreator.create(propsMap, true); } private Map swapDatabaseConfigurations(final Map databaseConfigurations) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapper.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapper.java index ecca599349a07..fbe1c4f8f56b4 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapper.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapper.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.proxy.backend.config.yaml.swapper; -import org.apache.shardingsphere.infra.datasource.config.ConnectionConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.config.PoolConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; import org.apache.shardingsphere.proxy.backend.config.yaml.YamlProxyDataSourceConfiguration; /** diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnector.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnector.java index 422580ea9ef7f..079fe4bea4669 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnector.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnector.java @@ -203,7 +203,7 @@ private ResponseHeader doExecuteWithImplicitCommitTransaction(final Collection createDriver JDBCBackendStatement statementManager = (JDBCBackendStatement) databaseConnectionManager.getConnectionSession().getStatementManager(); return new DriverExecutionPrepareEngine<>(driverType, maxConnectionsSizePerQuery, databaseConnectionManager, statementManager, new StatementOption(isReturnGeneratedKeys), metaData.getMetaData().getDatabase(databaseConnectionManager.getConnectionSession().getDatabaseName()).getRuleMetaData().getRules(), - metaData.getMetaData().getDatabase(databaseConnectionManager.getConnectionSession().getDatabaseName()).getResourceMetaData().getStorageTypes()); + metaData.getMetaData().getDatabase(databaseConnectionManager.getConnectionSession().getDatabaseName()).getResourceMetaData().getStorageUnitMetaData()); } private ResponseHeader processExecuteFederation(final ResultSet resultSet, final MetaDataContexts metaDataContexts) throws SQLException { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/ProxySQLExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/ProxySQLExecutor.java index 5ff74fb7a025e..f97904426a36a 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/ProxySQLExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/ProxySQLExecutor.java @@ -204,7 +204,7 @@ private List useDriverToExecute(final ExecutionContext executionC JDBCBackendStatement statementManager = (JDBCBackendStatement) databaseConnectionManager.getConnectionSession().getStatementManager(); DriverExecutionPrepareEngine prepareEngine = new DriverExecutionPrepareEngine<>( type, maxConnectionsSizePerQuery, databaseConnectionManager, statementManager, new StatementOption(isReturnGeneratedKeys), rules, - ProxyContext.getInstance().getDatabase(databaseConnectionManager.getConnectionSession().getDatabaseName()).getResourceMetaData().getStorageTypes()); + ProxyContext.getInstance().getDatabase(databaseConnectionManager.getConnectionSession().getDatabaseName()).getResourceMetaData().getStorageUnitMetaData()); ExecutionGroupContext executionGroupContext; try { executionGroupContext = prepareEngine.prepare(executionContext.getRouteContext(), executionContext.getExecutionUnits(), diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSource.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSource.java index 4c3a45a118789..eaa76e016a8d6 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSource.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSource.java @@ -68,7 +68,8 @@ public List getConnections(final String databaseName, final String d @SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") public List getConnections(final String databaseName, final String dataSourceName, final int connectionSize, final ConnectionMode connectionMode, final TransactionType transactionType) throws SQLException { - DataSource dataSource = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources().get(dataSourceName); + DataSource dataSource = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData() + .getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits().get(dataSourceName).getDataSource(); if (dataSourceName.contains(".")) { String dataSourceStr = dataSourceName.split("\\.")[0]; if (GlobalDataSourceRegistry.getInstance().getCachedDataSources().containsKey(dataSourceStr)) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactory.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactory.java index 022b6c31b448e..43d3d62112046 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactory.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactory.java @@ -26,19 +26,19 @@ import org.apache.shardingsphere.distsql.parser.statement.ral.QueryableRALStatement; import org.apache.shardingsphere.distsql.parser.statement.rql.RQLStatement; import org.apache.shardingsphere.distsql.parser.statement.rul.RULStatement; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.executor.audit.SQLAuditEngine; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.session.query.QueryContext; -import org.apache.shardingsphere.infra.state.cluster.ClusterState; -import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.state.cluster.ClusterState; import org.apache.shardingsphere.parser.rule.SQLParserRule; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.distsql.DistSQLStatementContext; @@ -110,7 +110,7 @@ public static ProxyBackendHandler newInstance(final DatabaseType databaseType, f return new SkipBackendHandler(sqlStatement); } SQLStatementContext sqlStatementContext = sqlStatement instanceof DistSQLStatement ? new DistSQLStatementContext((DistSQLStatement) sqlStatement) - : new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getDefaultDatabaseName()).bind(sqlStatement, + : new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getDefaultDatabaseName(), hintValueContext).bind(sqlStatement, Collections.emptyList()); QueryContext queryContext = new QueryContext(sqlStatementContext, sql, Collections.emptyList(), hintValueContext); connectionSession.setQueryContext(queryContext); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/admin/executor/AbstractDatabaseMetaDataExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/admin/executor/AbstractDatabaseMetaDataExecutor.java index f65543f9aa4f0..7975e0280c7cb 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/admin/executor/AbstractDatabaseMetaDataExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/admin/executor/AbstractDatabaseMetaDataExecutor.java @@ -31,11 +31,11 @@ import org.apache.shardingsphere.infra.merge.result.MergedResult; import org.apache.shardingsphere.infra.merge.result.impl.transparent.TransparentMergedResult; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.user.Grantee; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; -import javax.sql.DataSource; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -49,7 +49,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Optional; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -161,12 +160,12 @@ protected Collection getDatabaseNames(final ConnectionSession connection @Override protected void processMetaData(final String databaseName, final Consumer callback) throws SQLException { ResourceMetaData resourceMetaData = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(); - Optional> dataSourceEntry = resourceMetaData.getDataSources().entrySet().stream().findFirst(); - if (!dataSourceEntry.isPresent()) { + Optional storageUnit = resourceMetaData.getStorageUnitMetaData().getStorageUnits().values().stream().findFirst(); + if (!storageUnit.isPresent()) { return; } try ( - Connection connection = dataSourceEntry.get().getValue().getConnection(); + Connection connection = storageUnit.get().getDataSource().getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sql)) { for (int i = 0; i < parameters.size(); i++) { preparedStatement.setObject(i + 1, parameters.get(i)); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/NewUpdatableGlobalRuleRALBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/NewUpdatableGlobalRuleRALBackendHandler.java index 4157f0bf9228c..1f11b4cfe73d9 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/NewUpdatableGlobalRuleRALBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/NewUpdatableGlobalRuleRALBackendHandler.java @@ -46,10 +46,10 @@ public ResponseHeader execute() { GlobalRuleRALUpdater globalRuleUpdater = TypedSPILoader.getService(GlobalRuleRALUpdater.class, sqlStatement.getClass()); Class ruleConfigClass = globalRuleUpdater.getRuleConfigurationClass(); ContextManager contextManager = ProxyContext.getInstance().getContextManager(); - Collection ruleConfigurations = contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getConfigurations(); - RuleConfiguration currentRuleConfig = findCurrentRuleConfiguration(ruleConfigurations, ruleConfigClass); + Collection ruleConfigs = contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getConfigurations(); + RuleConfiguration currentRuleConfig = findCurrentRuleConfiguration(ruleConfigs, ruleConfigClass); globalRuleUpdater.checkSQLStatement(currentRuleConfig, sqlStatement); - contextManager.getInstanceContext().getModeContextManager().alterGlobalRuleConfiguration(processUpdate(ruleConfigurations, sqlStatement, globalRuleUpdater, currentRuleConfig)); + contextManager.getInstanceContext().getModeContextManager().alterGlobalRuleConfiguration(processUpdate(ruleConfigs, sqlStatement, globalRuleUpdater, currentRuleConfig)); return new UpdateResponseHeader(sqlStatement); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/RALBackendHandlerFactory.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/RALBackendHandlerFactory.java index 5ee976e81bb8c..9f67920d9323c 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/RALBackendHandlerFactory.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/RALBackendHandlerFactory.java @@ -45,7 +45,8 @@ public static ProxyBackendHandler newInstance(final RALStatement sqlStatement, f return new QueryableRALBackendHandler<>((QueryableRALStatement) sqlStatement, connectionSession); } if (sqlStatement instanceof UpdatableGlobalRuleRALStatement) { - if ("Cluster".equals(ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType())) { + String modeType = ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType(); + if ("Cluster".equals(modeType) || "Standalone".equals(modeType)) { return new NewUpdatableGlobalRuleRALBackendHandler((UpdatableGlobalRuleRALStatement) sqlStatement); } return new UpdatableGlobalRuleRALBackendHandler((UpdatableGlobalRuleRALStatement) sqlStatement); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/UpdatableGlobalRuleRALBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/UpdatableGlobalRuleRALBackendHandler.java index d483e614ea261..410aa3e92527a 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/UpdatableGlobalRuleRALBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/UpdatableGlobalRuleRALBackendHandler.java @@ -47,10 +47,10 @@ public ResponseHeader execute() { GlobalRuleRALUpdater globalRuleUpdater = TypedSPILoader.getService(GlobalRuleRALUpdater.class, sqlStatement.getClass()); Class ruleConfigClass = globalRuleUpdater.getRuleConfigurationClass(); ContextManager contextManager = ProxyContext.getInstance().getContextManager(); - Collection ruleConfigurations = contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getConfigurations(); - RuleConfiguration currentRuleConfig = findCurrentRuleConfiguration(ruleConfigurations, ruleConfigClass); + Collection ruleConfigs = contextManager.getMetaDataContexts().getMetaData().getGlobalRuleMetaData().getConfigurations(); + RuleConfiguration currentRuleConfig = findCurrentRuleConfiguration(ruleConfigs, ruleConfigClass); globalRuleUpdater.checkSQLStatement(currentRuleConfig, sqlStatement); - contextManager.getInstanceContext().getModeContextManager().alterGlobalRuleConfiguration(processUpdate(ruleConfigurations, sqlStatement, globalRuleUpdater, currentRuleConfig)); + contextManager.getInstanceContext().getModeContextManager().alterGlobalRuleConfiguration(processUpdate(ruleConfigs, sqlStatement, globalRuleUpdater, currentRuleConfig)); return new UpdateResponseHeader(sqlStatement); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/EncryptRuleConfigurationImportChecker.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/EncryptRuleConfigurationImportChecker.java index c71c13989bd6c..14276498b0563 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/EncryptRuleConfigurationImportChecker.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/common/checker/EncryptRuleConfigurationImportChecker.java @@ -65,15 +65,15 @@ private void checkEncryptors(final EncryptRuleConfiguration currentRuleConfig) { currentRuleConfig.getEncryptors().values().forEach(each -> TypedSPILoader.checkService(EncryptAlgorithm.class, each.getType(), each.getProps())); } - private void checkTableEncryptorsExisted(final EncryptRuleConfiguration configuration, final String databaseName) { + private void checkTableEncryptorsExisted(final EncryptRuleConfiguration config, final String databaseName) { Collection columns = new LinkedList<>(); - configuration.getTables().forEach(each -> columns.addAll(each.getColumns())); + config.getTables().forEach(each -> columns.addAll(each.getColumns())); Collection notExistedEncryptors = columns.stream().map(optional -> optional.getCipher().getEncryptorName()).collect(Collectors.toList()); notExistedEncryptors.addAll( columns.stream().map(optional -> optional.getLikeQuery().map(EncryptColumnItemRuleConfiguration::getEncryptorName).orElse(null)).filter(Objects::nonNull).collect(Collectors.toList())); notExistedEncryptors.addAll(columns.stream().map(optional -> optional.getAssistedQuery().map(EncryptColumnItemRuleConfiguration::getEncryptorName).orElse(null)).filter(Objects::nonNull) .collect(Collectors.toList())); - Collection encryptors = configuration.getEncryptors().keySet(); + Collection encryptors = config.getEncryptors().keySet(); notExistedEncryptors.removeIf(encryptors::contains); ShardingSpherePreconditions.checkState(notExistedEncryptors.isEmpty(), () -> new MissingRequiredAlgorithmException(databaseName, notExistedEncryptors)); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java index 88bcca67d59f2..2f8ad386508f4 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ConvertYamlConfigurationExecutor.java @@ -25,11 +25,11 @@ import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ConvertYamlConfigurationStatement; import org.apache.shardingsphere.encrypt.api.config.CompatibleEncryptRuleConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; -import org.apache.shardingsphere.infra.datasource.props.custom.CustomDataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.synonym.PoolPropertySynonyms; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.custom.CustomDataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.PoolPropertySynonyms; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.spi.type.ordered.OrderedSPILoader; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -121,8 +121,8 @@ private void appendResources(final Map while (iterator.hasNext()) { Entry entry = iterator.next(); DataSourceConfiguration dataSourceConfig = dataSourceConfigSwapper.swap(entry.getValue()); - DataSourceProperties dataSourceProps = DataSourcePropertiesCreator.create(dataSourceConfig); - appendResource(entry.getKey(), dataSourceProps, stringBuilder); + DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSourceConfig); + appendResource(entry.getKey(), props, stringBuilder); if (iterator.hasNext()) { stringBuilder.append(DistSQLScriptConstants.COMMA); } @@ -130,12 +130,12 @@ private void appendResources(final Map stringBuilder.append(DistSQLScriptConstants.SEMI).append(System.lineSeparator()).append(System.lineSeparator()); } - private void appendResource(final String resourceName, final DataSourceProperties dataSourceProps, final StringBuilder stringBuilder) { - Map connectionProps = dataSourceProps.getConnectionPropertySynonyms().getStandardProperties(); + private void appendResource(final String resourceName, final DataSourcePoolProperties dataSourcePoolProps, final StringBuilder stringBuilder) { + Map connectionProps = dataSourcePoolProps.getConnectionPropertySynonyms().getStandardProperties(); String url = (String) connectionProps.get(DistSQLScriptConstants.KEY_URL); String username = (String) connectionProps.get(DistSQLScriptConstants.KEY_USERNAME); String password = (String) connectionProps.get(DistSQLScriptConstants.KEY_PASSWORD); - String props = getResourceProperties(dataSourceProps.getPoolPropertySynonyms(), dataSourceProps.getCustomDataSourceProperties()); + String props = getResourceProperties(dataSourcePoolProps.getPoolPropertySynonyms(), dataSourcePoolProps.getCustomProperties()); if (Strings.isNullOrEmpty(password)) { stringBuilder.append(String.format(DistSQLScriptConstants.RESOURCE_DEFINITION_WITHOUT_PASSWORD, resourceName, url, username, props)); } else { @@ -143,12 +143,12 @@ private void appendResource(final String resourceName, final DataSourcePropertie } } - private String getResourceProperties(final PoolPropertySynonyms poolPropertySynonyms, final CustomDataSourceProperties customDataSourceProps) { + private String getResourceProperties(final PoolPropertySynonyms poolPropertySynonyms, final CustomDataSourcePoolProperties customDataSourcePoolProps) { StringBuilder result = new StringBuilder(); appendProperties(poolPropertySynonyms.getStandardProperties(), result); - if (!customDataSourceProps.getProperties().isEmpty()) { + if (!customDataSourcePoolProps.getProperties().isEmpty()) { result.append(DistSQLScriptConstants.COMMA); - appendProperties(customDataSourceProps.getProperties(), result); + appendProperties(customDataSourcePoolProps.getProperties(), result); } return result.toString(); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java index ddfc2d3ee931c..feb75008599d0 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutor.java @@ -20,18 +20,18 @@ import org.apache.shardingsphere.distsql.handler.ral.query.MetaDataRequiredQueryableRALExecutor; import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ExportStorageNodesStatement; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.distsql.export.ExportedStorageNode; import org.apache.shardingsphere.proxy.backend.distsql.export.ExportedStorageNodes; import org.apache.shardingsphere.proxy.backend.util.ExportUtils; -import javax.sql.DataSource; import java.time.LocalDateTime; import java.util.Arrays; import java.util.Collection; @@ -87,15 +87,15 @@ private Map> getAllStorageNodes(final Sh private Map> generateDatabaseExportStorageNodesData(final ShardingSphereDatabase database) { Map storageNodes = new LinkedHashMap<>(); - for (Entry entry : database.getResourceMetaData().getDataSources().entrySet()) { + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet()) { ConnectionProperties connectionProps = database.getResourceMetaData().getConnectionProperties(entry.getKey()); String databaseInstanceIp = getDatabaseInstanceIp(connectionProps); if (storageNodes.containsKey(databaseInstanceIp)) { continue; } - Map standardProperties = DataSourcePropertiesCreator.create(entry.getValue()).getConnectionPropertySynonyms().getStandardProperties(); + Map standardProps = DataSourcePoolPropertiesCreator.create(entry.getValue().getDataSource()).getConnectionPropertySynonyms().getStandardProperties(); ExportedStorageNode exportedStorageNode = new ExportedStorageNode(connectionProps.getHostname(), String.valueOf(connectionProps.getPort()), - String.valueOf(standardProperties.get("username")), String.valueOf(standardProperties.get("password")), connectionProps.getCatalog()); + String.valueOf(standardProps.get("username")), String.valueOf(standardProps.get("password")), connectionProps.getCatalog()); storageNodes.put(databaseInstanceIp, exportedStorageNode); } return Collections.singletonMap(database.getName(), storageNodes.values()); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowComputeNodeModeExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowComputeNodeModeExecutor.java index dca08d1afdaaa..9d1cae30ad044 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowComputeNodeModeExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ShowComputeNodeModeExecutor.java @@ -17,12 +17,12 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.ral.queryable; -import com.google.gson.Gson; import org.apache.shardingsphere.distsql.handler.ral.query.InstanceContextRequiredQueryableRALExecutor; import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ShowComputeNodeModeStatement; import org.apache.shardingsphere.infra.config.mode.PersistRepositoryConfiguration; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.util.Arrays; import java.util.Collection; @@ -43,7 +43,7 @@ public Collection getRows(final InstanceContext instanc PersistRepositoryConfiguration repositoryConfig = instanceContext.getModeConfiguration().getRepository(); String modeType = instanceContext.getModeConfiguration().getType(); String repositoryType = null == repositoryConfig ? "" : repositoryConfig.getType(); - String props = null == repositoryConfig || null == repositoryConfig.getProps() ? "" : new Gson().toJson(repositoryConfig.getProps()); + String props = null == repositoryConfig || null == repositoryConfig.getProps() || repositoryConfig.getProps().isEmpty() ? "" : JsonUtils.toJsonString(repositoryConfig.getProps()); return Collections.singleton(new LocalDataQueryResultRow(modeType, repositoryType, props)); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterReadwriteSplittingStorageUnitStatusStatementUpdater.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterReadwriteSplittingStorageUnitStatusStatementUpdater.java index ef03ba6ca1f73..c799f916239a9 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterReadwriteSplittingStorageUnitStatusStatementUpdater.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/AlterReadwriteSplittingStorageUnitStatusStatementUpdater.java @@ -148,7 +148,6 @@ private void checkDisable(final ContextManager contextManager, final String data checkResourceExists(contextManager, databaseName, toBeDisabledStorageUnit); checkIsDisabled(replicaResources, disabledStorageUnits, toBeDisabledStorageUnit); checkIsReplicaResource(replicaResources, toBeDisabledStorageUnit); - checkIsLastResource(replicaResources, toBeDisabledStorageUnit); } private void checkIsDisabled(final Map replicaResources, final Collection disabledStorageUnits, final String toBeDisabledStorageUnit) { @@ -162,15 +161,6 @@ private void checkIsReplicaResource(final Map replicaStorageUnit () -> new UnsupportedSQLOperationException(String.format("`%s` is not used as a read storage unit by any read-write separation rules,cannot be disabled", toBeDisabledStorageUnit))); } - private void checkIsLastResource(final Map replicaStorageUnits, final String toBeDisabledStorageUnit) { - Collection onlyOneResourceRules = getOnlyOneResourceRules(replicaStorageUnits); - Collection toBeDisabledResourceRuleNames = Splitter.on(",").trimResults().splitToList(replicaStorageUnits.get(toBeDisabledStorageUnit)); - onlyOneResourceRules = onlyOneResourceRules.stream().filter(toBeDisabledResourceRuleNames::contains).collect(Collectors.toSet()); - Collection finalOnlyOneResourceRules = onlyOneResourceRules; - ShardingSpherePreconditions.checkState(onlyOneResourceRules.isEmpty(), - () -> new UnsupportedSQLOperationException(String.format("`%s` is the last read storage unit in `%s`, cannot be disabled", toBeDisabledStorageUnit, finalOnlyOneResourceRules))); - } - private Collection getGroupNames(final String toBeDisableStorageUnit, final Map replicaStorageUnits, final Map disabledStorageUnits, final Map autoAwareResources) { String groupNames = autoAwareResources.getOrDefault(toBeDisableStorageUnit, replicaStorageUnits.getOrDefault(toBeDisableStorageUnit, disabledStorageUnits.get(toBeDisableStorageUnit))); @@ -204,12 +194,6 @@ private Map> getExportedReadwriteSplittingRules(fina return result; } - private Collection getOnlyOneResourceRules(final Map replicaStorageUnits) { - return replicaStorageUnits.values().stream().map(databaseName -> Arrays.stream(databaseName.split(",")).collect(Collectors.toMap(each -> each, each -> 1)).entrySet()) - .flatMap(Collection::stream).collect(Collectors.toMap(Entry::getKey, Entry::getValue, Integer::sum)).entrySet().stream() - .filter(entry -> entry.getValue() <= 1).map(Entry::getKey).collect(Collectors.toSet()); - } - private void addReplicaResource(final Map replicaStorageUnits, final Entry> readwriteSplittingRule) { readwriteSplittingRule.getValue().entrySet().stream().filter(entry -> ExportableItemConstants.REPLICA_DATA_SOURCE_NAMES.equals(entry.getKey())) .map(entry -> Arrays.asList(entry.getValue().split(","))).flatMap(Collection::stream).forEach(each -> put(replicaStorageUnits, each, readwriteSplittingRule.getKey())); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdater.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdater.java index 372605e1b7ee1..3dc8526d89fc4 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdater.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdater.java @@ -61,7 +61,7 @@ public void executeUpdate(final String databaseName, final ImportMetaDataStateme } else { jsonMetaDataConfig = new String(Base64.decodeBase64(sqlStatement.getMetaDataValue())); } - ExportedClusterInfo exportedClusterInfo = JsonUtils.readValue(jsonMetaDataConfig, ExportedClusterInfo.class); + ExportedClusterInfo exportedClusterInfo = JsonUtils.fromJsonString(jsonMetaDataConfig, ExportedClusterInfo.class); ExportedMetaData exportedMetaData = exportedClusterInfo.getMetaData(); importServerConfig(exportedMetaData); importDatabase(exportedMetaData); @@ -78,7 +78,7 @@ private void importServerConfig(final ExportedMetaData exportedMetaData) { } private void importDatabase(final ExportedMetaData exportedMetaData) { - for (final String each : exportedMetaData.getDatabases().values()) { + for (String each : exportedMetaData.getDatabases().values()) { YamlProxyDatabaseConfiguration yamlDatabaseConfig = YamlEngine.unmarshal(each, YamlProxyDatabaseConfiguration.class); databaseConfigImportExecutor.importDatabaseConfiguration(yamlDatabaseConfig); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdater.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdater.java index 3815015988e95..4cc07bb979443 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdater.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdater.java @@ -25,12 +25,12 @@ import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.NoDatabaseSelectedException; import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.handler.distsql.ral.updatable.updater.ConnectionSessionRequiredRALUpdater; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; -import javax.sql.DataSource; import java.util.Collections; import java.util.Map; @@ -43,7 +43,7 @@ public final class RefreshTableMetaDataUpdater implements ConnectionSessionRequi public void executeUpdate(final ConnectionSession connectionSession, final RefreshTableMetaDataStatement sqlStatement) { String databaseName = getDatabaseName(connectionSession); ContextManager contextManager = ProxyContext.getInstance().getContextManager(); - checkDataSources(databaseName, contextManager.getDataSourceMap(databaseName), sqlStatement); + checkStorageUnits(databaseName, contextManager.getStorageUnits(databaseName), sqlStatement); String schemaName = getSchemaName(databaseName, sqlStatement, connectionSession); if (sqlStatement.getStorageUnitName().isPresent()) { if (sqlStatement.getTableName().isPresent()) { @@ -60,11 +60,12 @@ public void executeUpdate(final ConnectionSession connectionSession, final Refre } } - private void checkDataSources(final String databaseName, final Map dataSources, final RefreshTableMetaDataStatement sqlStatement) { - ShardingSpherePreconditions.checkState(!dataSources.isEmpty(), () -> new EmptyStorageUnitException(databaseName)); + private void checkStorageUnits(final String databaseName, final Map storageUnits, final RefreshTableMetaDataStatement sqlStatement) { + ShardingSpherePreconditions.checkState(!storageUnits.isEmpty(), () -> new EmptyStorageUnitException(databaseName)); if (sqlStatement.getStorageUnitName().isPresent()) { String storageUnitName = sqlStatement.getStorageUnitName().get(); - ShardingSpherePreconditions.checkState(dataSources.containsKey(storageUnitName), () -> new MissingRequiredStorageUnitsException(databaseName, Collections.singletonList(storageUnitName))); + ShardingSpherePreconditions.checkState( + storageUnits.containsKey(storageUnitName), () -> new MissingRequiredStorageUnitsException(databaseName, Collections.singleton(storageUnitName))); } } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/RDLBackendHandlerFactory.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/RDLBackendHandlerFactory.java index 07be027b28205..a5a96aaaf0225 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/RDLBackendHandlerFactory.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/RDLBackendHandlerFactory.java @@ -52,7 +52,8 @@ public static ProxyBackendHandler newInstance(final RDLStatement sqlStatement, f return getStorageUnitBackendHandler((StorageUnitDefinitionStatement) sqlStatement, connectionSession); } // TODO Remove when metadata structure adjustment completed. #25485 - if ("Cluster".equals(ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType())) { + String modeType = ProxyContext.getInstance().getContextManager().getInstanceContext().getModeConfiguration().getType(); + if ("Cluster".equals(modeType) || "Standalone".equals(modeType)) { return new NewRuleDefinitionBackendHandler<>((RuleDefinitionStatement) sqlStatement, connectionSession); } return new RuleDefinitionBackendHandler<>((RuleDefinitionStatement) sqlStatement, connectionSession); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/NewRuleDefinitionBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/NewRuleDefinitionBackendHandler.java index 60b4a1ec05acb..24ad12ffa15c5 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/NewRuleDefinitionBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/NewRuleDefinitionBackendHandler.java @@ -129,7 +129,8 @@ private Collection processAlter(final ShardingSphereDatabase da @SuppressWarnings({"unchecked", "rawtypes"}) private RuleConfiguration decorateRuleConfiguration(final ShardingSphereDatabase database, final RuleConfiguration ruleConfig) { Optional decorator = TypedSPILoader.findService(RuleConfigurationDecorator.class, ruleConfig.getClass()); - return decorator.map(optional -> optional.decorate(database.getName(), database.getResourceMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig)).orElse(ruleConfig); + return decorator.map(optional -> optional.decorate(database.getName(), + database.getResourceMetaData().getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig)).orElse(ruleConfig); } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -138,7 +139,7 @@ private Collection processDrop(final ShardingSphereDatabase dat return Collections.emptyList(); } ModeContextManager modeContextManager = ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager(); - final RuleConfiguration toBeDroppedRuleConfig = updater.buildToBeDroppedRuleConfiguration(currentRuleConfig, sqlStatement); + RuleConfiguration toBeDroppedRuleConfig = updater.buildToBeDroppedRuleConfiguration(currentRuleConfig, sqlStatement); // TODO remove updateCurrentRuleConfiguration after update refactor completed. if (updater.updateCurrentRuleConfiguration(sqlStatement, currentRuleConfig) && ((DatabaseRuleConfiguration) currentRuleConfig).isEmpty()) { modeContextManager.removeRuleConfigurationItem(database.getName(), toBeDroppedRuleConfig); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/RuleDefinitionBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/RuleDefinitionBackendHandler.java index 0eb13c5eaa55b..8181ed052dfa2 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/RuleDefinitionBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/rule/RuleDefinitionBackendHandler.java @@ -101,7 +101,8 @@ private Collection processSQLStatement(final ShardingSphereDa @SuppressWarnings({"unchecked", "rawtypes"}) private RuleConfiguration decorateRuleConfiguration(final ShardingSphereDatabase database, final RuleConfiguration ruleConfig) { Optional decorator = TypedSPILoader.findService(RuleConfigurationDecorator.class, ruleConfig.getClass()); - return decorator.map(optional -> optional.decorate(database.getName(), database.getResourceMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig)).orElse(ruleConfig); + return decorator.map(optional -> optional.decorate(database.getName(), + database.getResourceMetaData().getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules(), ruleConfig)).orElse(ruleConfig); } @SuppressWarnings({"rawtypes", "unchecked"}) diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java index 9b39e13cab09e..9fca6f85c2153 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandler.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -30,10 +30,11 @@ import org.apache.shardingsphere.infra.database.core.connector.url.JdbcUrl; import org.apache.shardingsphere.infra.database.core.connector.url.StandardJdbcUrlParser; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.ShardingSphereExternalException; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.response.header.ResponseHeader; import org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader; @@ -56,21 +57,21 @@ public final class AlterStorageUnitBackendHandler extends StorageUnitDefinitionB private final DatabaseType databaseType; - private final DataSourcePropertiesValidateHandler validateHandler; + private final DataSourcePoolPropertiesValidateHandler validateHandler; public AlterStorageUnitBackendHandler(final AlterStorageUnitStatement sqlStatement, final ConnectionSession connectionSession) { super(sqlStatement, connectionSession); databaseType = connectionSession.getProtocolType(); - validateHandler = new DataSourcePropertiesValidateHandler(); + validateHandler = new DataSourcePoolPropertiesValidateHandler(); } @Override public ResponseHeader execute(final String databaseName, final AlterStorageUnitStatement sqlStatement) { checkSQLStatement(databaseName, sqlStatement); - Map dataSourcePropsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); - validateHandler.validate(dataSourcePropsMap); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().alterStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().alterStorageUnits(databaseName, propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { log.error("Alter storage unit failed", ex); throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); @@ -100,15 +101,15 @@ private Collection getDuplicatedStorageUnitNames(final Collection storageUnitNames) { - Map storageUnits = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getDataSources(); + Map storageUnits = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); Collection notExistedStorageUnitNames = storageUnitNames.stream().filter(each -> !storageUnits.containsKey(each)).collect(Collectors.toList()); ShardingSpherePreconditions.checkState(notExistedStorageUnitNames.isEmpty(), () -> new MissingRequiredStorageUnitsException(databaseName, notExistedStorageUnitNames)); } private void checkDatabase(final String databaseName, final AlterStorageUnitStatement sqlStatement) { - Map storageUnits = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getDataSources(); + Map storageUnits = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); Collection invalidStorageUnitNames = sqlStatement.getStorageUnits().stream().collect(Collectors.toMap(DataSourceSegment::getName, each -> each)).entrySet().stream() - .filter(each -> !isIdenticalDatabase(each.getValue(), storageUnits.get(each.getKey()))).map(Entry::getKey).collect(Collectors.toSet()); + .filter(each -> !isIdenticalDatabase(each.getValue(), storageUnits.get(each.getKey()).getDataSource())).map(Entry::getKey).collect(Collectors.toSet()); ShardingSpherePreconditions.checkState(invalidStorageUnitNames.isEmpty(), () -> new InvalidStorageUnitsException(Collections.singleton(String.format("Cannot alter the database of %s", invalidStorageUnitNames)))); } @@ -128,7 +129,7 @@ private boolean isIdenticalDatabase(final DataSourceSegment segment, final DataS port = String.valueOf(segmentJdbcUrl.getPort()); database = segmentJdbcUrl.getDatabase(); } - String url = String.valueOf(DataSourcePropertiesCreator.create(dataSource).getConnectionPropertySynonyms().getStandardProperties().get("url")); + String url = String.valueOf(DataSourcePoolPropertiesCreator.create(dataSource).getConnectionPropertySynonyms().getStandardProperties().get("url")); JdbcUrl dataSourceJdbcUrl = new StandardJdbcUrlParser().parse(url); return Objects.equals(hostName, dataSourceJdbcUrl.getHostname()) && Objects.equals(port, String.valueOf(dataSourceJdbcUrl.getPort())) && Objects.equals(database, dataSourceJdbcUrl.getDatabase()); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java index 06415bc1cf933..6aade316f1eb8 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandler.java @@ -20,12 +20,12 @@ import lombok.extern.slf4j.Slf4j; import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.converter.DataSourceSegmentsConverter; import org.apache.shardingsphere.distsql.parser.statement.rdl.create.RegisterStorageUnitStatement; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.ShardingSphereExternalException; @@ -50,30 +50,30 @@ public final class RegisterStorageUnitBackendHandler extends StorageUnitDefiniti private final DatabaseType databaseType; - private final DataSourcePropertiesValidateHandler validateHandler; + private final DataSourcePoolPropertiesValidateHandler validateHandler; public RegisterStorageUnitBackendHandler(final RegisterStorageUnitStatement sqlStatement, final ConnectionSession connectionSession) { super(sqlStatement, connectionSession); databaseType = connectionSession.getProtocolType(); - validateHandler = new DataSourcePropertiesValidateHandler(); + validateHandler = new DataSourcePoolPropertiesValidateHandler(); } @Override public ResponseHeader execute(final String databaseName, final RegisterStorageUnitStatement sqlStatement) { checkSQLStatement(databaseName, sqlStatement); - Map dataSourcePropsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); + Map propsMap = DataSourceSegmentsConverter.convert(databaseType, sqlStatement.getStorageUnits()); if (sqlStatement.isIfNotExists()) { Collection currentStorageUnits = getCurrentStorageUnitNames(databaseName); Collection logicalDataSourceNames = getLogicalDataSourceNames(databaseName); - dataSourcePropsMap.keySet().removeIf(currentStorageUnits::contains); - dataSourcePropsMap.keySet().removeIf(logicalDataSourceNames::contains); + propsMap.keySet().removeIf(currentStorageUnits::contains); + propsMap.keySet().removeIf(logicalDataSourceNames::contains); } - if (dataSourcePropsMap.isEmpty()) { + if (propsMap.isEmpty()) { return new UpdateResponseHeader(sqlStatement); } - validateHandler.validate(dataSourcePropsMap); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { log.error("Register storage unit failed", ex); throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); @@ -112,7 +112,7 @@ private void checkDuplicatedLogicalDataSourceNames(final String databaseName, fi } private Collection getCurrentStorageUnitNames(final String databaseName) { - return ProxyContext.getInstance().getContextManager().getDataSourceMap(databaseName).keySet(); + return ProxyContext.getInstance().getContextManager().getStorageUnits(databaseName).keySet(); } private Collection getLogicalDataSourceNames(final String databaseName) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandler.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandler.java index 0c822b780cb8c..91505767c3406 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandler.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandler.java @@ -22,9 +22,10 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.StorageUnitInUsedException; import org.apache.shardingsphere.distsql.parser.statement.rdl.drop.UnregisterStorageUnitStatement; -import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions; import org.apache.shardingsphere.infra.exception.core.external.server.ShardingSphereServerException; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.response.header.ResponseHeader; import org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader; @@ -32,7 +33,6 @@ import org.apache.shardingsphere.proxy.backend.util.StorageUnitUtils; import org.apache.shardingsphere.single.rule.SingleRule; -import javax.sql.DataSource; import java.sql.SQLException; import java.util.Collection; import java.util.Collections; @@ -71,14 +71,15 @@ public void checkSQLStatement(final String databaseName, final UnregisterStorage } private void checkExisted(final String databaseName, final Collection storageUnitNames) { - Map dataSources = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getDataSources(); - Collection notExistedStorageUnits = storageUnitNames.stream().filter(each -> !dataSources.containsKey(each)).collect(Collectors.toList()); + Map storageUnits = ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); + Collection notExistedStorageUnits = storageUnitNames.stream().filter(each -> !storageUnits.containsKey(each)).collect(Collectors.toList()); ShardingSpherePreconditions.checkState(notExistedStorageUnits.isEmpty(), () -> new MissingRequiredStorageUnitsException(databaseName, notExistedStorageUnits)); } private void checkInUsed(final String databaseName, final UnregisterStorageUnitStatement sqlStatement) { ShardingSphereDatabase database = ProxyContext.getInstance().getDatabase(databaseName); - Map> inUsedStorageUnits = StorageUnitUtils.getInUsedStorageUnits(database.getRuleMetaData(), database.getResourceMetaData().getDataSources().size()); + Map> inUsedStorageUnits = StorageUnitUtils.getInUsedStorageUnits( + database.getRuleMetaData(), database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()); Collection inUsedStorageUnitNames = inUsedStorageUnits.keySet(); inUsedStorageUnitNames.retainAll(sqlStatement.getStorageUnitNames()); if (!inUsedStorageUnitNames.isEmpty()) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/rule/ShowRulesUsedStorageUnitExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/rule/ShowRulesUsedStorageUnitExecutor.java index be340fb063010..143d26aaf52d3 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/rule/ShowRulesUsedStorageUnitExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/rule/ShowRulesUsedStorageUnitExecutor.java @@ -62,7 +62,7 @@ public final class ShowRulesUsedStorageUnitExecutor implements RQLExecutor getRows(final ShardingSphereDatabase database, final ShowRulesUsedStorageUnitStatement sqlStatement) { Collection result = new LinkedList<>(); String resourceName = sqlStatement.getStorageUnitName().orElse(null); - if (database.getResourceMetaData().getDataSources().containsKey(resourceName)) { + if (database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().containsKey(resourceName)) { result.addAll(getShardingData(database)); result.addAll(getReadwriteSplittingData(database, resourceName)); result.addAll(getEncryptData(database)); diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java index f632d955cd296..ae85c48001e0b 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutor.java @@ -17,19 +17,20 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.rql.storage.unit; -import com.google.gson.Gson; import org.apache.shardingsphere.distsql.handler.query.RQLExecutor; import org.apache.shardingsphere.distsql.parser.statement.rql.show.ShowStorageUnitsStatement; import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; -import org.apache.shardingsphere.infra.datasource.ShardingSphereStorageDataSourceWrapper; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.proxy.backend.util.StorageUnitUtils; import javax.sql.DataSource; @@ -40,24 +41,14 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.Properties; +import java.util.stream.Collectors; /** * Show storage unit executor. */ public final class ShowStorageUnitExecutor implements RQLExecutor { - private static final String CONNECTION_TIMEOUT_MILLISECONDS = "connectionTimeoutMilliseconds"; - - private static final String IDLE_TIMEOUT_MILLISECONDS = "idleTimeoutMilliseconds"; - - private static final String MAX_LIFETIME_MILLISECONDS = "maxLifetimeMilliseconds"; - - private static final String MAX_POOL_SIZE = "maxPoolSize"; - - private static final String MIN_POOL_SIZE = "minPoolSize"; - - private static final String READ_ONLY = "readOnly"; - @Override public Collection getColumnNames() { return Arrays.asList("name", "type", "host", "port", "db", "connection_timeout_milliseconds", "idle_timeout_milliseconds", @@ -67,58 +58,67 @@ public Collection getColumnNames() { @Override public Collection getRows(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { ResourceMetaData resourceMetaData = database.getResourceMetaData(); - Map dataSourcePropsMap = getDataSourcePropsMap(database, sqlStatement); Collection result = new LinkedList<>(); - for (Entry entry : dataSourcePropsMap.entrySet()) { + for (Entry entry : getDataSourcePoolPropertiesMap(database, sqlStatement).entrySet()) { String key = entry.getKey(); - DataSourceProperties dataSourceProps = entry.getValue(); ConnectionProperties connectionProps = resourceMetaData.getConnectionProperties(key); - Map standardProps = dataSourceProps.getPoolPropertySynonyms().getStandardProperties(); - Map otherProps = dataSourceProps.getCustomDataSourceProperties().getProperties(); + Map poolProps = entry.getValue().getPoolPropertySynonyms().getStandardProperties(); + Map customProps = getCustomProps(entry.getValue().getCustomProperties().getProperties(), connectionProps.getQueryProperties()); result.add(new LocalDataQueryResultRow(key, resourceMetaData.getStorageType(key).getType(), connectionProps.getHostname(), connectionProps.getPort(), connectionProps.getCatalog(), - getStandardProperty(standardProps, CONNECTION_TIMEOUT_MILLISECONDS), - getStandardProperty(standardProps, IDLE_TIMEOUT_MILLISECONDS), - getStandardProperty(standardProps, MAX_LIFETIME_MILLISECONDS), - getStandardProperty(standardProps, MAX_POOL_SIZE), - getStandardProperty(standardProps, MIN_POOL_SIZE), - getStandardProperty(standardProps, READ_ONLY), - otherProps.isEmpty() ? "" : new Gson().toJson(otherProps))); + getStandardProperty(poolProps, "connectionTimeoutMilliseconds"), + getStandardProperty(poolProps, "idleTimeoutMilliseconds"), + getStandardProperty(poolProps, "maxLifetimeMilliseconds"), + getStandardProperty(poolProps, "maxPoolSize"), + getStandardProperty(poolProps, "minPoolSize"), + getStandardProperty(poolProps, "readOnly"), + customProps.isEmpty() ? "" : JsonUtils.toJsonString(customProps))); + } + return result; + } + + private Map getCustomProps(final Map customProps, final Properties queryProps) { + Map result = new LinkedHashMap<>(customProps.size() + 1, 1F); + result.putAll(customProps); + if (!queryProps.isEmpty()) { + result.put("queryProperties", queryProps); } return result; } - private Map getDataSourcePropsMap(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { - Map result = new LinkedHashMap<>(database.getResourceMetaData().getDataSources().size(), 1F); - Map dataSourcePropsMap = database.getResourceMetaData().getDataSourcePropsMap(); - Map storageTypes = database.getResourceMetaData().getStorageTypes(); - Optional usageCountOptional = sqlStatement.getUsageCount(); - if (usageCountOptional.isPresent()) { - Map> inUsedStorageUnits = StorageUnitUtils.getInUsedStorageUnits(database.getRuleMetaData(), database.getResourceMetaData().getDataSources().size()); - for (Entry entry : database.getResourceMetaData().getDataSources().entrySet()) { + private Map getDataSourcePoolPropertiesMap(final ShardingSphereDatabase database, final ShowStorageUnitsStatement sqlStatement) { + Map result = new LinkedHashMap<>(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size(), 1F); + Map propsMap = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> entry.getValue().getDataSourcePoolProperties(), (oldValue, currentValue) -> currentValue, LinkedHashMap::new)); + Map storageUnits = database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits(); + Optional usageCount = sqlStatement.getUsageCount(); + if (usageCount.isPresent()) { + Map> inUsedStorageUnits = StorageUnitUtils.getInUsedStorageUnits( + database.getRuleMetaData(), database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().size()); + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet()) { Integer currentUsageCount = inUsedStorageUnits.containsKey(entry.getKey()) ? inUsedStorageUnits.get(entry.getKey()).size() : 0; - if (usageCountOptional.get().equals(currentUsageCount)) { - result.put(entry.getKey(), getDataSourceProperties(dataSourcePropsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); + if (usageCount.get().equals(currentUsageCount)) { + result.put(entry.getKey(), getDataSourcePoolProperties(propsMap, entry.getKey(), storageUnits.get(entry.getKey()).getStorageType(), entry.getValue().getDataSource())); } } } else { - for (Entry entry : database.getResourceMetaData().getDataSources().entrySet()) { - result.put(entry.getKey(), getDataSourceProperties(dataSourcePropsMap, entry.getKey(), storageTypes.get(entry.getKey()), entry.getValue())); + for (Entry entry : storageUnits.entrySet()) { + result.put(entry.getKey(), getDataSourcePoolProperties(propsMap, entry.getKey(), storageUnits.get(entry.getKey()).getStorageType(), entry.getValue().getDataSource())); } } return result; } - private DataSourceProperties getDataSourceProperties(final Map dataSourcePropsMap, final String storageUnitName, - final DatabaseType databaseType, final DataSource dataSource) { - DataSourceProperties result = getDataSourceProperties(dataSource); + private DataSourcePoolProperties getDataSourcePoolProperties(final Map propsMap, final String storageUnitName, + final DatabaseType databaseType, final DataSource dataSource) { + DataSourcePoolProperties result = getDataSourcePoolProperties(dataSource); DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); - if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && dataSourcePropsMap.containsKey(storageUnitName)) { - DataSourceProperties unitDataSourceProperties = dataSourcePropsMap.get(storageUnitName); - for (Entry entry : unitDataSourceProperties.getPoolPropertySynonyms().getStandardProperties().entrySet()) { + if (dialectDatabaseMetaData.isInstanceConnectionAvailable() && propsMap.containsKey(storageUnitName)) { + DataSourcePoolProperties unitDataSourcePoolProps = propsMap.get(storageUnitName); + for (Entry entry : unitDataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties().entrySet()) { if (null != entry.getValue()) { result.getPoolPropertySynonyms().getStandardProperties().put(entry.getKey(), entry.getValue()); } @@ -127,10 +127,10 @@ private DataSourceProperties getDataSourceProperties(final Map standardProps, final String key) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutor.java index cb43bb893f836..bf8b86b200df3 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutor.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.rul.sql; -import com.google.gson.Gson; import org.apache.shardingsphere.distsql.parser.statement.rul.sql.ParseStatement; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.parser.rule.SQLParserRule; import org.apache.shardingsphere.proxy.backend.handler.distsql.rul.executor.ConnectionSessionRequiredRULExecutor; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; @@ -43,7 +43,7 @@ public Collection getColumnNames() { @Override public Collection getRows(final ShardingSphereMetaData metaData, final ConnectionSession connectionSession, final ParseStatement sqlStatement) { SQLStatement parsedSqlStatement = parseSQL(metaData, connectionSession, sqlStatement); - return Collections.singleton(new LocalDataQueryResultRow(parsedSqlStatement.getClass().getSimpleName(), new Gson().toJson(parsedSqlStatement))); + return Collections.singleton(new LocalDataQueryResultRow(parsedSqlStatement.getClass().getSimpleName(), JsonUtils.toJsonString(parsedSqlStatement))); } private SQLStatement parseSQL(final ShardingSphereMetaData metaData, final ConnectionSession connectionSession, final ParseStatement sqlStatement) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/PreviewExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/PreviewExecutor.java index 77445b4563094..9d41a7af701b5 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/PreviewExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/PreviewExecutor.java @@ -95,8 +95,8 @@ public Collection getRows(final ShardingSphereMetaData String sql = sqlParserRule.isSqlCommentParseEnabled() ? sqlStatement.getSql() : SQLHintUtils.removeHint(sqlStatement.getSql()); DatabaseType protocolType = metaDataContexts.getMetaData().getDatabase(databaseName).getProtocolType(); SQLStatement previewedStatement = sqlParserRule.getSQLParserEngine(protocolType.getTrunkDatabaseType().orElse(protocolType)).parse(sql, false); - SQLStatementContext sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), databaseName).bind(previewedStatement, Collections.emptyList()); HintValueContext hintValueContext = sqlParserRule.isSqlCommentParseEnabled() ? new HintValueContext() : SQLHintUtils.extractHint(sqlStatement.getSql()).orElseGet(HintValueContext::new); + SQLStatementContext sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), databaseName, hintValueContext).bind(previewedStatement, Collections.emptyList()); QueryContext queryContext = new QueryContext(sqlStatementContext, sql, Collections.emptyList(), hintValueContext); connectionSession.setQueryContext(queryContext); if (sqlStatementContext instanceof CursorAvailable && sqlStatementContext instanceof CursorDefinitionAware) { @@ -168,7 +168,7 @@ private DriverExecutionPrepareEngine createDriver return new DriverExecutionPrepareEngine<>(JDBCDriverType.STATEMENT, maxConnectionsSizePerQuery, connectionSession.getDatabaseConnectionManager(), (JDBCBackendStatement) connectionSession.getStatementManager(), new StatementOption(isReturnGeneratedKeys), metaDataContexts.getMetaData().getDatabase(getDatabaseName(connectionSession)).getRuleMetaData().getRules(), - metaDataContexts.getMetaData().getDatabase(getDatabaseName(connectionSession)).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(getDatabaseName(connectionSession)).getResourceMetaData().getStorageUnitMetaData()); } private String getDatabaseName(final ConnectionSession connectionSession) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java index 88d46568ebd7d..8af92c7fecf55 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/ExportUtils.java @@ -21,8 +21,9 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.config.rule.scope.DatabaseRuleConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.spi.type.ordered.OrderedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.infra.yaml.config.swapper.rule.YamlRuleConfigurationSwapper; @@ -83,20 +84,20 @@ private static void appendDatabaseName(final String databaseName, final StringBu } private static void appendDataSourceConfigurations(final ShardingSphereDatabase database, final StringBuilder stringBuilder) { - if (database.getResourceMetaData().getDataSourcePropsMap().isEmpty()) { + if (database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty()) { return; } stringBuilder.append("dataSources:").append(System.lineSeparator()); - for (Entry entry : database.getResourceMetaData().getDataSourcePropsMap().entrySet()) { - appendDataSourceConfiguration(entry.getKey(), entry.getValue(), stringBuilder); + for (Entry entry : database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().entrySet()) { + appendDataSourceConfiguration(entry.getKey(), entry.getValue().getDataSourcePoolProperties(), stringBuilder); } } - private static void appendDataSourceConfiguration(final String name, final DataSourceProperties dataSourceProps, final StringBuilder stringBuilder) { + private static void appendDataSourceConfiguration(final String name, final DataSourcePoolProperties props, final StringBuilder stringBuilder) { stringBuilder.append(" ").append(name).append(':').append(System.lineSeparator()); - dataSourceProps.getConnectionPropertySynonyms().getStandardProperties() + props.getConnectionPropertySynonyms().getStandardProperties() .forEach((key, value) -> stringBuilder.append(" ").append(key).append(": ").append(value).append(System.lineSeparator())); - for (Entry entry : dataSourceProps.getPoolPropertySynonyms().getStandardProperties().entrySet()) { + for (Entry entry : props.getPoolPropertySynonyms().getStandardProperties().entrySet()) { if (null != entry.getValue()) { stringBuilder.append(" ").append(entry.getKey()).append(": ").append(entry.getValue()).append(System.lineSeparator()); } diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/StorageUnitUtils.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/StorageUnitUtils.java index ef4b5b13fe771..f915ad6f7644f 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/StorageUnitUtils.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/StorageUnitUtils.java @@ -47,12 +47,13 @@ public final class StorageUnitUtils { */ public static Map> getInUsedStorageUnits(final RuleMetaData ruleMetaData, final int initialCapacity) { Map> result = new LinkedHashMap<>(initialCapacity, 1F); - getFromDataSourceContainedRules(result, ruleMetaData.findRules(DataSourceContainedRule.class)); - getFromDataNodeContainedRules(result, ruleMetaData.findRules(DataNodeContainedRule.class)); + result.putAll(getFromDataSourceContainedRules(ruleMetaData.findRules(DataSourceContainedRule.class))); + result.putAll(getFromDataNodeContainedRules(ruleMetaData.findRules(DataNodeContainedRule.class))); return result; } - private static void getFromDataSourceContainedRules(final Map> result, final Collection dataSourceContainedRules) { + private static Map> getFromDataSourceContainedRules(final Collection dataSourceContainedRules) { + Map> result = new LinkedHashMap<>(); for (DataSourceContainedRule each : dataSourceContainedRules) { Collection inUsedStorageUnits = getInUsedStorageUnitNames(each); if (inUsedStorageUnits.isEmpty()) { @@ -60,13 +61,15 @@ private static void getFromDataSourceContainedRules(final Map { Collection rules = result.getOrDefault(storageUnit, new LinkedHashSet<>()); - rules.add(each.getType()); + rules.add(each.getClass().getSimpleName()); result.put(storageUnit, rules); }); } + return result; } - private static void getFromDataNodeContainedRules(final Map> result, final Collection dataNodeContainedRules) { + private static Map> getFromDataNodeContainedRules(final Collection dataNodeContainedRules) { + Map> result = new LinkedHashMap<>(); for (DataNodeContainedRule each : dataNodeContainedRules) { Collection inUsedStorageUnits = getInUsedStorageUnitNames(each); if (inUsedStorageUnits.isEmpty()) { @@ -74,10 +77,11 @@ private static void getFromDataNodeContainedRules(final Map { Collection rules = result.getOrDefault(storageUnit, new LinkedHashSet<>()); - rules.add(each.getType()); + rules.add(each.getClass().getSimpleName()); result.put(storageUnit, rules); }); } + return result; } private static Collection getInUsedStorageUnitNames(final DataSourceContainedRule rule) { diff --git a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java index 8db5512bd2ec9..2c1748977a1b7 100644 --- a/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java +++ b/proxy/backend/core/src/main/java/org/apache/shardingsphere/proxy/backend/util/YamlDatabaseConfigurationImportExecutor.java @@ -24,7 +24,7 @@ import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.DistSQLException; import org.apache.shardingsphere.distsql.handler.exception.datasource.MissingRequiredDataSourcesException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.encrypt.api.config.CompatibleEncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.api.config.EncryptRuleConfiguration; import org.apache.shardingsphere.encrypt.rule.EncryptRule; @@ -35,10 +35,10 @@ import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.rule.ShardingSphereRule; @@ -106,7 +106,7 @@ public final class YamlDatabaseConfigurationImportExecutor { private final YamlProxyDataSourceConfigurationSwapper dataSourceConfigSwapper = new YamlProxyDataSourceConfigurationSwapper(); - private final DataSourcePropertiesValidateHandler validateHandler = new DataSourcePropertiesValidateHandler(); + private final DataSourcePoolPropertiesValidateHandler validateHandler = new DataSourcePoolPropertiesValidateHandler(); /** * Import proxy database from yaml configuration. @@ -130,7 +130,7 @@ public void importDatabaseConfiguration(final YamlProxyDatabaseConfiguration yam private void checkDatabase(final String databaseName) { ShardingSpherePreconditions.checkNotNull(databaseName, () -> new UnsupportedSQLOperationException("Property `databaseName` in imported config is required")); if (ProxyContext.getInstance().databaseExists(databaseName)) { - ShardingSpherePreconditions.checkState(ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getDataSources().isEmpty(), + ShardingSpherePreconditions.checkState(ProxyContext.getInstance().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getStorageUnits().isEmpty(), () -> new UnsupportedSQLOperationException(String.format("Database `%s` exists and is not empty,overwrite is not supported", databaseName))); } } @@ -147,19 +147,20 @@ private void addDatabase(final String databaseName) { } private void addResources(final String databaseName, final Map yamlDataSourceMap) { - Map dataSourcePropsMap = new LinkedHashMap<>(yamlDataSourceMap.size(), 1F); + Map propsMap = new LinkedHashMap<>(yamlDataSourceMap.size(), 1F); for (Entry entry : yamlDataSourceMap.entrySet()) { DataSourceConfiguration dataSourceConfig = dataSourceConfigSwapper.swap(entry.getValue()); - dataSourcePropsMap.put(entry.getKey(), DataSourcePropertiesCreator.create(dataSourceConfig)); + propsMap.put(entry.getKey(), DataSourcePoolPropertiesCreator.create(dataSourceConfig)); } - validateHandler.validate(dataSourcePropsMap); + validateHandler.validate(propsMap); try { - ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, dataSourcePropsMap); + ProxyContext.getInstance().getContextManager().getInstanceContext().getModeContextManager().registerStorageUnits(databaseName, propsMap); } catch (final SQLException ex) { throw new InvalidStorageUnitsException(Collections.singleton(ex.getMessage())); } - Map dataSource = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData().getDataSources(); - dataSourcePropsMap.forEach((key, value) -> dataSource.put(key, DataSourcePoolCreator.create(value))); + Map dataSource = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData() + .getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData().getDataSources(); + propsMap.forEach((key, value) -> dataSource.put(key, DataSourcePoolCreator.create(value))); } private void addRules(final String databaseName, final Collection yamlRuleConfigs) { @@ -246,7 +247,7 @@ private void addShardingRuleConfiguration(final ShardingRuleConfiguration shardi InstanceContext instanceContext = ProxyContext.getInstance().getContextManager().getInstanceContext(); shardingRuleConfigImportChecker.check(database, shardingRuleConfig); allRuleConfigs.add(shardingRuleConfig); - database.getRuleMetaData().getRules().add(new ShardingRule(shardingRuleConfig, database.getResourceMetaData().getDataSources().keySet(), instanceContext)); + database.getRuleMetaData().getRules().add(new ShardingRule(shardingRuleConfig, database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().keySet(), instanceContext)); } private void addReadwriteSplittingRuleConfiguration(final ReadwriteSplittingRuleConfiguration readwriteSplittingRuleConfig, @@ -278,12 +279,13 @@ private void addMaskRuleConfiguration(final MaskRuleConfiguration maskRuleConfig private void addBroadcastRuleConfiguration(final BroadcastRuleConfiguration broadcastRuleConfig, final Collection allRuleConfigs, final ShardingSphereDatabase database) { allRuleConfigs.add(broadcastRuleConfig); - database.getRuleMetaData().getRules().add(new BroadcastRule(broadcastRuleConfig, database.getName(), database.getResourceMetaData().getDataSources())); + database.getRuleMetaData().getRules().add(new BroadcastRule(broadcastRuleConfig, database.getName(), database.getResourceMetaData().getStorageUnitMetaData().getDataSources())); } private void addSingleRuleConfiguration(final SingleRuleConfiguration broadcastRuleConfig, final Collection allRuleConfigs, final ShardingSphereDatabase database) { allRuleConfigs.add(broadcastRuleConfig); - database.getRuleMetaData().getRules().add(new SingleRule(broadcastRuleConfig, database.getName(), database.getResourceMetaData().getDataSources(), database.getRuleMetaData().getRules())); + database.getRuleMetaData().getRules().add( + new SingleRule(broadcastRuleConfig, database.getName(), database.getResourceMetaData().getStorageUnitMetaData().getDataSources(), database.getRuleMetaData().getRules())); } private void dropDatabase(final String databaseName) { diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapperTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapperTest.java index 7dc6539e7d2ae..d8592cbfe01ce 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapperTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyConfigurationSwapperTest.java @@ -22,6 +22,7 @@ import org.apache.shardingsphere.infra.config.algorithm.AlgorithmConfiguration; import org.apache.shardingsphere.infra.config.database.DatabaseConfiguration; import org.apache.shardingsphere.infra.config.rule.RuleConfiguration; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.proxy.backend.config.ProxyConfiguration; import org.apache.shardingsphere.proxy.backend.config.ProxyConfigurationLoader; import org.apache.shardingsphere.proxy.backend.config.YamlProxyConfiguration; @@ -55,7 +56,7 @@ void assertSwap() throws IOException { private void assertDataSources(final ProxyConfiguration proxyConfig) { Map actual = proxyConfig.getDatabaseConfigurations(); assertThat(actual.size(), is(1)); - HikariDataSource dataSource = (HikariDataSource) actual.get("swapper_test").getStorageResource().getStorageNodes().get("foo_db"); + HikariDataSource dataSource = (HikariDataSource) actual.get("swapper_test").getStorageResource().getStorageNodeDataSources().get(new StorageNode("foo_db")); assertThat(dataSource.getJdbcUrl(), is("jdbc:h2:mem:foo_db;DB_CLOSE_DELAY=-1")); assertThat(dataSource.getUsername(), is("sa")); assertThat(dataSource.getPassword(), is("")); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapperTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapperTest.java index b74357b1c5f53..7548cfb89fd24 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapperTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/config/yaml/swapper/YamlProxyDataSourceConfigurationSwapperTest.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.proxy.backend.config.yaml.swapper; -import org.apache.shardingsphere.infra.datasource.config.ConnectionConfiguration; -import org.apache.shardingsphere.infra.datasource.config.DataSourceConfiguration; -import org.apache.shardingsphere.infra.datasource.config.PoolConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.ConnectionConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.DataSourceConfiguration; +import org.apache.shardingsphere.infra.datasource.pool.config.PoolConfiguration; import org.apache.shardingsphere.proxy.backend.config.ProxyConfigurationLoader; import org.apache.shardingsphere.proxy.backend.config.YamlProxyConfiguration; import org.apache.shardingsphere.proxy.backend.config.yaml.YamlProxyDataSourceConfiguration; diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnectorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnectorTest.java index 21bfae7098e5a..33c71fefda578 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnectorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/DatabaseConnectorTest.java @@ -40,6 +40,7 @@ import org.apache.shardingsphere.infra.session.query.QueryContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.parser.config.SQLParserRuleConfiguration; @@ -72,6 +73,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.instanceOf; @@ -80,6 +82,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; @@ -115,7 +118,11 @@ void setUp() { private ContextManager mockContextManager() { RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList(new SQLParserRule(new SQLParserRuleConfiguration(false, mock(CacheOption.class), mock(CacheOption.class))), sqlFederationRule)); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(mockDatabases(), mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); @@ -126,7 +133,6 @@ private Map mockDatabases() { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.containsDataSource()).thenReturn(true); when(database.isComplete()).thenReturn(true); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "H2"))); when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "H2")); when(database.getRuleMetaData().getRules()).thenReturn(Collections.emptyList()); return Collections.singletonMap("foo_db", database); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSourceTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSourceTest.java index f63c406b81034..03ca65319fefd 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSourceTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/connector/jdbc/datasource/JDBCBackendDataSourceTest.java @@ -49,10 +49,10 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Properties; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -95,11 +95,9 @@ private Map createDatabases() { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "FIXTURE"); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getProtocolType()).thenReturn(databaseType); - Map storageTypes = new LinkedHashMap<>(2, 1F); - storageTypes.put("ds_0", databaseType); - storageTypes.put("ds_1", databaseType); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(storageTypes); - when(database.getResourceMetaData().getDataSources()).thenReturn(mockDataSources(2)); + for (Entry entry : mockDataSources(2).entrySet()) { + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits().get(entry.getKey()).getDataSource()).thenReturn(entry.getValue()); + } return Collections.singletonMap("schema", database); } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactoryTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactoryTest.java index 1c74cabe0a580..8e7cc3a984b91 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactoryTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/ProxyBackendHandlerFactoryTest.java @@ -109,7 +109,7 @@ private ContextManager mockContextManager() { when(result.getMetaDataContexts()).thenReturn(metaDataContexts); when(metaDataContexts.getMetaData().getProps()).thenReturn(new ConfigurationProperties(new Properties())); RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList( - new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build(), Collections.emptyMap()), + new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build()), new SQLParserRule(new DefaultSQLParserRuleConfigurationBuilder().build()), new TransactionRule(new DefaultTransactionRuleConfigurationBuilder().build(), Collections.emptyMap()))); when(metaDataContexts.getMetaData().getGlobalRuleMetaData()).thenReturn(globalRuleMetaData); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/admin/DatabaseAdminQueryBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/admin/DatabaseAdminQueryBackendHandlerTest.java index 0fe8a64323fbc..00b8a63611da5 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/admin/DatabaseAdminQueryBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/admin/DatabaseAdminQueryBackendHandlerTest.java @@ -112,9 +112,8 @@ private ContextManager mockContextManager() { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class); when(database.getProtocolType()).thenReturn(databaseType); when(ProxyContext.getInstance().getDatabase("foo_db")).thenReturn(database); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData( - Collections.singletonMap("foo_db", database), mock(ResourceMetaData.class), - mock(RuleMetaData.class), new ConfigurationProperties(new Properties()))); - return new ContextManager(metaDataContexts, mock(InstanceContext.class)); + ShardingSphereMetaData metaData = new ShardingSphereMetaData(Collections.singletonMap("foo_db", database), mock(ResourceMetaData.class), + mock(RuleMetaData.class), new ConfigurationProperties(new Properties())); + return new ContextManager(new MetaDataContexts(mock(MetaDataPersistService.class), metaData), mock(InstanceContext.class)); } } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/database/DatabaseOperateBackendHandlerFactoryTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/database/DatabaseOperateBackendHandlerFactoryTest.java index 257c6e4782d77..9cfc3c99b0dbb 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/database/DatabaseOperateBackendHandlerFactoryTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/database/DatabaseOperateBackendHandlerFactoryTest.java @@ -158,7 +158,6 @@ private void setGovernanceMetaDataContexts(final boolean isGovernance) { private MetaDataContexts mockMetaDataContexts() { MetaDataContexts result = ProxyContext.getInstance().getContextManager().getMetaDataContexts(); - when(result.getMetaData().getDatabase("foo_db").getResourceMetaData().getDataSources()).thenReturn(Collections.emptyMap()); when(result.getMetaData().getDatabase("foo_db").getResourceMetaData().getNotExistedDataSources(any())).thenReturn(Collections.emptyList()); return result; } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/DistSQLBackendHandlerFactoryTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/DistSQLBackendHandlerFactoryTest.java index af78f4b0324db..b167e61aae83f 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/DistSQLBackendHandlerFactoryTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/DistSQLBackendHandlerFactoryTest.java @@ -92,7 +92,6 @@ void setUp() { private ShardingSphereDatabase mockDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(result.getResourceMetaData().getDataSources()).thenReturn(Collections.emptyMap()); when(result.getResourceMetaData().getNotExistedDataSources(any())).thenReturn(Collections.emptyList()); when(result.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); return result; @@ -135,25 +134,25 @@ void assertExecuteAlterResourceContext() throws SQLException { @Test void assertExecuteAlterShadowRuleContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RDLBackendHandlerFactory.newInstance(mock(AlterShadowRuleStatement.class), connectionSession).execute(), instanceOf(UpdateResponseHeader.class)); } @Test void assertExecuteCreateShadowRuleContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RDLBackendHandlerFactory.newInstance(mock(CreateShadowRuleStatement.class), connectionSession).execute(), instanceOf(UpdateResponseHeader.class)); } @Test void assertExecuteDropShadowRuleContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RDLBackendHandlerFactory.newInstance(mock(DropShadowRuleStatement.class), connectionSession).execute(), instanceOf(UpdateResponseHeader.class)); } @Test void assertExecuteAlterDefaultShadowAlgorithm() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); AlterDefaultShadowAlgorithmStatement statement = new AlterDefaultShadowAlgorithmStatement( new ShadowAlgorithmSegment("foo", new AlgorithmSegment("SQL_HINT", PropertiesBuilder.build(new Property("type", "value"))))); assertThat(RDLBackendHandlerFactory.newInstance(statement, connectionSession).execute(), instanceOf(UpdateResponseHeader.class)); @@ -161,25 +160,25 @@ void assertExecuteAlterDefaultShadowAlgorithm() throws SQLException { @Test void assertExecuteShowShadowRulesContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RQLBackendHandlerFactory.newInstance(mock(ShowShadowRulesStatement.class), connectionSession).execute(), instanceOf(QueryResponseHeader.class)); } @Test void assertExecuteShowShadowTableRulesContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RQLBackendHandlerFactory.newInstance(mock(ShowShadowTableRulesStatement.class), connectionSession).execute(), instanceOf(QueryResponseHeader.class)); } @Test void assertExecuteShowShadowAlgorithmsContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RQLBackendHandlerFactory.newInstance(mock(ShowShadowAlgorithmsStatement.class), connectionSession).execute(), instanceOf(QueryResponseHeader.class)); } @Test void assertExecuteDropShadowAlgorithmContext() throws SQLException { - mockShardingSphereRuleMetaData(); + mockRuleMetaData(); assertThat(RDLBackendHandlerFactory.newInstance(mock(DropShadowAlgorithmStatement.class), connectionSession).execute(), instanceOf(UpdateResponseHeader.class)); } @@ -208,7 +207,7 @@ void assertExecuteShowResourceContext() throws SQLException { assertThat(RQLBackendHandlerFactory.newInstance(mock(ShowStorageUnitsStatement.class), connectionSession).execute(), instanceOf(QueryResponseHeader.class)); } - private void mockShardingSphereRuleMetaData() { + private void mockRuleMetaData() { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getName()).thenReturn("foo_db"); when(database.getResourceMetaData()).thenReturn(mock(ResourceMetaData.class)); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java index 4e8f4591fcd79..0cff6d51f1e9a 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportDatabaseConfigurationExecutorTest.java @@ -20,9 +20,11 @@ import lombok.SneakyThrows; import org.apache.shardingsphere.distsql.parser.statement.ral.queryable.ExportDatabaseConfigurationStatement; import org.apache.shardingsphere.infra.config.algorithm.AlgorithmConfiguration; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.merge.result.impl.local.LocalDataQueryResultRow; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration; import org.apache.shardingsphere.sharding.api.config.strategy.keygen.KeyGenerateStrategyConfiguration; @@ -43,8 +45,10 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Properties; +import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -66,7 +70,8 @@ void assertGetColumns() { @Test void assertExecute() { when(database.getName()).thenReturn("normal_db"); - when(database.getResourceMetaData().getDataSourcePropsMap()).thenReturn(DataSourcePropertiesCreator.create(createDataSourceMap())); + Map storageUnits = createStorageUnits(); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singleton(createShardingRuleConfiguration())); Collection actual = new ExportDatabaseConfigurationExecutor().getRows(database, new ExportDatabaseConfigurationStatement(mock(DatabaseSegment.class), null)); assertThat(actual.size(), is(1)); @@ -74,10 +79,22 @@ void assertExecute() { assertThat(row.getCell(1), is(loadExpectedRow())); } + private Map createStorageUnits() { + Map propsMap = createDataSourceMap().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + Map result = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSourcePoolProperties()).thenReturn(entry.getValue()); + result.put(entry.getKey(), storageUnit); + } + return result; + } + @Test void assertExecuteWithEmptyDatabase() { when(database.getName()).thenReturn("empty_db"); - when(database.getResourceMetaData().getDataSourcePropsMap()).thenReturn(Collections.emptyMap()); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.emptyMap()); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ExportDatabaseConfigurationStatement sqlStatement = new ExportDatabaseConfigurationStatement(new DatabaseSegment(0, 0, new IdentifierValue("empty_db")), null); Collection actual = new ExportDatabaseConfigurationExecutor().getRows(database, sqlStatement); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java index 11eab7ac6ee1c..9843649e50ef3 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportMetaDataExecutorTest.java @@ -27,7 +27,8 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.props.DataSourcePropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; import org.apache.shardingsphere.infra.instance.ComputeNodeInstance; import org.apache.shardingsphere.infra.instance.InstanceContext; import org.apache.shardingsphere.infra.instance.metadata.InstanceMetaData; @@ -37,6 +38,7 @@ import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.util.eventbus.EventBusContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -67,8 +69,10 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Properties; +import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; @@ -107,7 +111,7 @@ void assertExecuteWithEmptyMetaData() { when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("empty_metadata")); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("empty_metadata")); - when(database.getResourceMetaData().getDataSourcePropsMap()).thenReturn(Collections.emptyMap()); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.emptyMap()); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ExportMetaDataStatement sqlStatement = new ExportMetaDataStatement(null); Collection actual = new ExportMetaDataExecutor().getRows(contextManager.getMetaDataContexts().getMetaData(), sqlStatement); @@ -130,8 +134,8 @@ private ContextManager mockEmptyContextManager() { void assertExecute() { when(database.getName()).thenReturn("normal_db"); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("empty_metadata")); - Map dataSourceMap = createDataSourceMap(); - when(database.getResourceMetaData().getDataSourcePropsMap()).thenReturn(DataSourcePropertiesCreator.create(dataSourceMap)); + Map storageUnits = createStorageUnits(); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.emptyList()); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); @@ -143,10 +147,22 @@ void assertExecute() { assertThat(row.getCell(3).toString(), is(loadExpectedRow())); } + private Map createStorageUnits() { + Map propsMap = createDataSourceMap().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> DataSourcePoolPropertiesCreator.create(entry.getValue()), (oldValue, currentValue) -> oldValue, LinkedHashMap::new)); + Map result = new LinkedHashMap<>(); + for (Entry entry : propsMap.entrySet()) { + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSourcePoolProperties()).thenReturn(entry.getValue()); + result.put(entry.getKey(), storageUnit); + } + return result; + } + private ContextManager mockContextManager() { MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData(Collections.singletonMap(database.getName(), database), new ResourceMetaData(Collections.emptyMap()), - new RuleMetaData(Arrays.asList(new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build(), Collections.emptyMap()), + new RuleMetaData(Arrays.asList(new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build()), new GlobalClockRule(new DefaultGlobalClockRuleConfigurationBuilder().build(), Collections.singletonMap(database.getName(), database)))), new ConfigurationProperties(PropertiesBuilder.build(new Property(ConfigurationPropertyKey.SQL_SHOW.getKey(), "true"))))); InstanceContext instanceContext = new InstanceContext( diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java index 0d57fd3d2df05..2c06f0d1b832b 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/queryable/ExportStorageNodesExecutorTest.java @@ -29,6 +29,7 @@ import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; @@ -50,8 +51,9 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Answers; import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -import javax.sql.DataSource; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; @@ -72,6 +74,7 @@ import static org.mockito.Mockito.when; @ExtendWith(AutoMockExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) @StaticMockSettings(ProxyContext.class) class ExportStorageNodesExecutorTest { @@ -125,7 +128,8 @@ private ContextManager mockEmptyContextManager() { @Test void assertExecute() { when(database.getName()).thenReturn("normal_db"); - when(database.getResourceMetaData().getDataSources()).thenReturn(createDataSourceMap()); + Map storageUnits = createStorageUnits(); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singleton(createShardingRuleConfiguration())); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); @@ -138,7 +142,8 @@ void assertExecute() { @Test void assertExecuteWithDatabaseName() { when(database.getName()).thenReturn("normal_db"); - when(database.getResourceMetaData().getDataSources()).thenReturn(createDataSourceMap()); + Map storageUnits = createStorageUnits(); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singleton(createShardingRuleConfiguration())); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); @@ -152,26 +157,28 @@ private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData(Collections.singletonMap(database.getName(), database), new ResourceMetaData(Collections.emptyMap()), - new RuleMetaData(Collections.singleton(new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build(), Collections.emptyMap()))), + new RuleMetaData(Collections.singleton(new AuthorityRule(new DefaultAuthorityRuleConfigurationBuilder().build()))), new ConfigurationProperties(PropertiesBuilder.build(new Property(ConfigurationPropertyKey.SQL_SHOW.getKey(), "true"))))); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); return result; } - private Map createDataSourceMap() { - Map result = new LinkedHashMap<>(2, 1F); - result.put("ds_0", createDataSource("demo_ds_0")); - result.put("ds_1", createDataSource("demo_ds_1")); + private Map createStorageUnits() { + Map result = new LinkedHashMap<>(2, 1F); + result.put("ds_0", createStorageUnit("demo_ds_0")); + result.put("ds_1", createStorageUnit("demo_ds_1")); return result; } - private DataSource createDataSource(final String name) { - MockedDataSource result = new MockedDataSource(); - result.setUrl(String.format("jdbc:mock://127.0.0.1/%s", name)); - result.setUsername("root"); - result.setPassword("test"); - result.setMaxPoolSize(50); - result.setMinPoolSize(1); + private StorageUnit createStorageUnit(final String name) { + MockedDataSource dataSource = new MockedDataSource(); + dataSource.setUrl(String.format("jdbc:mock://127.0.0.1/%s", name)); + dataSource.setUsername("root"); + dataSource.setPassword("test"); + dataSource.setMaxPoolSize(50); + dataSource.setMinPoolSize(1); + StorageUnit result = mock(StorageUnit.class); + when(result.getDataSource()).thenReturn(dataSource); return result; } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java index 97b42e17c20df..5e1ddde3db7ab 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportDatabaseConfigurationUpdaterTest.java @@ -20,13 +20,14 @@ import lombok.SneakyThrows; import org.apache.shardingsphere.distsql.handler.exception.datasource.MissingRequiredDataSourcesException; import org.apache.shardingsphere.distsql.handler.exception.rule.DuplicateRuleException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.statement.ral.updatable.ImportDatabaseConfigurationStatement; import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.database.core.DefaultDatabase; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.infra.exception.core.external.sql.type.generic.UnsupportedSQLOperationException; @@ -39,6 +40,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.internal.configuration.plugins.Plugins; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import java.sql.SQLException; import java.util.Collections; @@ -51,6 +54,7 @@ @ExtendWith(AutoMockExtension.class) @StaticMockSettings(ProxyContext.class) +@MockitoSettings(strictness = Strictness.LENIENT) class ImportDatabaseConfigurationUpdaterTest { private ImportDatabaseConfigurationUpdater importDatabaseConfigUpdater; @@ -119,13 +123,14 @@ private void init(final String databaseName) { importDatabaseConfigUpdater = new ImportDatabaseConfigurationUpdater(); YamlDatabaseConfigurationImportExecutor databaseConfigImportExecutor = new YamlDatabaseConfigurationImportExecutor(); Plugins.getMemberAccessor().set(importDatabaseConfigUpdater.getClass().getDeclaredField("databaseConfigImportExecutor"), importDatabaseConfigUpdater, databaseConfigImportExecutor); - Plugins.getMemberAccessor().set(databaseConfigImportExecutor.getClass().getDeclaredField("validateHandler"), databaseConfigImportExecutor, mock(DataSourcePropertiesValidateHandler.class)); + Plugins.getMemberAccessor().set(databaseConfigImportExecutor.getClass().getDeclaredField("validateHandler"), databaseConfigImportExecutor, mock(DataSourcePoolPropertiesValidateHandler.class)); } private ContextManager mockContextManager(final String databaseName) { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); + when(resourceMetaData.getStorageUnitMetaData()).thenReturn(mock(StorageUnitMetaData.class)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdaterTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdaterTest.java index 033973f8aab89..7ef71d693daac 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdaterTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/ImportMetaDataUpdaterTest.java @@ -105,10 +105,10 @@ private ContextManager mockContextManager(final String feature) { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts().getMetaData().getProps()) .thenReturn(new ConfigurationProperties(PropertiesBuilder.build(new Property(ConfigurationPropertyKey.PROXY_FRONTEND_DATABASE_PROTOCOL_TYPE.getKey(), "MySQL")))); - if (feature != null) { + if (null != feature) { ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(new ShardingSphereSchema(createTableMap(), Collections.emptyMap())); - when(database.getResourceMetaData().getDataSources()).thenReturn(createDataSourceMap()); + when(database.getResourceMetaData().getStorageUnitMetaData().getDataSources()).thenReturn(createDataSourceMap()); when(result.getMetaDataContexts().getMetaData().getDatabases()).thenReturn(Collections.singletonMap(feature, database)); when(result.getMetaDataContexts().getMetaData().getDatabase(feature)).thenReturn(database); } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdaterTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdaterTest.java index 9e0c5f01d4a73..0e0866a33f340 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdaterTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/ral/updatable/RefreshTableMetaDataUpdaterTest.java @@ -17,12 +17,12 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.ral.updatable; -import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.NoDatabaseSelectedException; -import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.EmptyStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; import org.apache.shardingsphere.distsql.parser.statement.ral.updatable.RefreshTableMetaDataStatement; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.NoDatabaseSelectedException; +import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -30,7 +30,6 @@ import org.apache.shardingsphere.proxy.backend.response.header.ResponseHeader; import org.apache.shardingsphere.proxy.backend.response.header.update.UpdateResponseHeader; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; -import org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource; import org.apache.shardingsphere.test.mock.AutoMockExtension; import org.apache.shardingsphere.test.mock.StaticMockSettings; import org.junit.jupiter.api.Test; @@ -70,7 +69,7 @@ void assertUnknownDatabaseException() { @Test void assertEmptyResource() { ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(contextManager.getDataSourceMap("foo_db")).thenReturn(Collections.emptyMap()); + when(contextManager.getStorageUnits("foo_db")).thenReturn(Collections.emptyMap()); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().databaseExists("foo_db")).thenReturn(true); UpdatableRALBackendHandler backendHandler = new UpdatableRALBackendHandler<>(new RefreshTableMetaDataStatement(), mockConnectionSession("foo_db")); @@ -80,7 +79,6 @@ void assertEmptyResource() { @Test void assertMissingRequiredResources() { ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(contextManager.getDataSourceMap("foo_db")).thenReturn(Collections.singletonMap("ds_0", new MockedDataSource())); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().databaseExists("foo_db")).thenReturn(true); UpdatableRALBackendHandler backendHandler = new UpdatableRALBackendHandler<>(new RefreshTableMetaDataStatement("t_order", "ds_1", null), mockConnectionSession("foo_db")); @@ -90,7 +88,6 @@ void assertMissingRequiredResources() { @Test void assertUpdate() throws SQLException { ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(contextManager.getDataSourceMap("foo_db")).thenReturn(Collections.singletonMap("ds_0", new MockedDataSource())); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().databaseExists("foo_db")).thenReturn(true); UpdatableRALBackendHandler backendHandler = new UpdatableRALBackendHandler<>(new RefreshTableMetaDataStatement(), mockConnectionSession("foo_db")); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java index 15ea177183afd..b3365c2be4adc 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/AlterStorageUnitBackendHandlerTest.java @@ -21,7 +21,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -29,6 +29,7 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; @@ -70,7 +71,7 @@ void setUp() throws ReflectiveOperationException { when(connectionSession.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); handler = new AlterStorageUnitBackendHandler(mock(AlterStorageUnitStatement.class), connectionSession); Plugins.getMemberAccessor().set( - handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePropertiesValidateHandler.class)); + handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePoolPropertiesValidateHandler.class)); } @Test @@ -78,8 +79,10 @@ void assertExecute() { ContextManager contextManager = mockContextManager(mock(MetaDataContexts.class, RETURNS_DEEP_STUBS)); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().getDatabase("foo_db")).thenReturn(database); - ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("ds_0", mockHikariDataSource("ds_0"))); + ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(mockHikariDataSource("ds_0")); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); assertThat(handler.execute("foo_db", createAlterStorageUnitStatement("ds_0")), instanceOf(UpdateResponseHeader.class)); } @@ -103,8 +106,10 @@ void assertExecuteWithAlterDatabase() { ContextManager contextManager = mockContextManager(mock(MetaDataContexts.class, RETURNS_DEEP_STUBS)); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); when(ProxyContext.getInstance().getDatabase("foo_db")).thenReturn(database); - ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("ds_0", mockHikariDataSource("ds_1"))); + ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(mockHikariDataSource("ds_1")); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); assertThrows(InvalidStorageUnitsException.class, () -> handler.execute("foo_db", createAlterStorageUnitStatement("ds_0"))); } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java index 1f13fc4474c15..1fb0d6d283787 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/RegisterStorageUnitBackendHandlerTest.java @@ -19,7 +19,7 @@ import org.apache.shardingsphere.distsql.handler.exception.storageunit.DuplicateStorageUnitException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.InvalidStorageUnitsException; -import org.apache.shardingsphere.distsql.handler.validate.DataSourcePropertiesValidateHandler; +import org.apache.shardingsphere.distsql.handler.validate.DataSourcePoolPropertiesValidateHandler; import org.apache.shardingsphere.distsql.parser.segment.DataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.HostnameAndPortBasedDataSourceSegment; import org.apache.shardingsphere.distsql.parser.segment.URLBasedDataSourceSegment; @@ -73,7 +73,7 @@ void setUp() throws ReflectiveOperationException { when(connectionSession.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(database.getRuleMetaData()).thenReturn(mock(RuleMetaData.class)); handler = new RegisterStorageUnitBackendHandler(mock(RegisterStorageUnitStatement.class), connectionSession); - Plugins.getMemberAccessor().set(handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePropertiesValidateHandler.class)); + Plugins.getMemberAccessor().set(handler.getClass().getDeclaredField("validateHandler"), handler, mock(DataSourcePoolPropertiesValidateHandler.class)); } @Test @@ -97,7 +97,7 @@ void assertExecuteWithDuplicateStorageUnitNamesInStatement() { @Test void assertExecuteWithDuplicateStorageUnitNamesWithResourceMetaData() { ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(contextManager.getDataSourceMap("foo_db").keySet()).thenReturn(Collections.singleton("ds_0")); + when(contextManager.getStorageUnits("foo_db").keySet()).thenReturn(Collections.singleton("ds_0")); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); assertThrows(DuplicateStorageUnitException.class, () -> handler.execute("foo_db", createRegisterStorageUnitStatement())); } diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java index 4bc36b4711f0c..6392967e4c17d 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rdl/storage/unit/UnregisterStorageUnitBackendHandlerTest.java @@ -17,17 +17,16 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.rdl.storage.unit; -import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.DistSQLException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.MissingRequiredStorageUnitsException; import org.apache.shardingsphere.distsql.handler.exception.storageunit.StorageUnitInUsedException; import org.apache.shardingsphere.distsql.parser.statement.rdl.drop.UnregisterStorageUnitStatement; import org.apache.shardingsphere.infra.datanode.DataNode; +import org.apache.shardingsphere.infra.exception.core.external.sql.type.kernel.category.DistSQLException; import org.apache.shardingsphere.infra.instance.mode.ModeContextManager; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; -import org.apache.shardingsphere.infra.rule.identifier.type.DataNodeContainedRule; -import org.apache.shardingsphere.infra.rule.identifier.type.DataSourceContainedRule; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -40,6 +39,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Answers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; @@ -61,7 +61,7 @@ @MockitoSettings(strictness = Strictness.LENIENT) class UnregisterStorageUnitBackendHandlerTest { - @Mock + @Mock(answer = Answers.RETURNS_DEEP_STUBS) private ShardingSphereDatabase database; @Mock @@ -70,9 +70,6 @@ class UnregisterStorageUnitBackendHandlerTest { @Mock private DataSource dataSource; - @Mock - private RuleMetaData ruleMetaData; - @Mock private ShadowRule shadowRule; @@ -89,8 +86,7 @@ class UnregisterStorageUnitBackendHandlerTest { @BeforeEach void setUp() { resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); - when(database.getRuleMetaData()).thenReturn(ruleMetaData); + when(resourceMetaData.getStorageUnitMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); @@ -109,7 +105,9 @@ private ContextManager mockContextManager() { @Test void assertExecute() throws SQLException { - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(dataSource); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); UnregisterStorageUnitStatement unregisterStorageUnitStatement = new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), false); @@ -119,17 +117,18 @@ void assertExecute() throws SQLException { @Test void assertStorageUnitNameNotExistedExecute() { - when(ProxyContext.getInstance().getDatabase("foo_db").getResourceMetaData().getDataSources()).thenReturn(Collections.emptyMap()); + when(ProxyContext.getInstance().getDatabase("foo_db").getResourceMetaData().getStorageUnitMetaData().getDataSources()).thenReturn(Collections.emptyMap()); assertThrows(MissingRequiredStorageUnitsException.class, () -> handler.execute("foo_db", new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), false))); } @Test void assertStorageUnitNameInUseExecute() { - when(ruleMetaData.findRules(DataSourceContainedRule.class)).thenReturn(Collections.singleton(shadowRule)); - when(shadowRule.getType()).thenReturn("ShadowRule"); + when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(shadowRule))); when(shadowRule.getDataSourceMapper()).thenReturn(Collections.singletonMap("", Collections.singleton("foo_ds"))); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(dataSource); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); assertThrows(StorageUnitInUsedException.class, @@ -138,26 +137,27 @@ void assertStorageUnitNameInUseExecute() { @Test void assertStorageUnitNameInUseWithoutIgnoreSingleTables() { - when(ruleMetaData.findRules(DataNodeContainedRule.class)).thenReturn(Collections.singleton(singleRule)); - when(singleRule.getType()).thenReturn("SingleRule"); + when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(singleRule))); DataNode dataNode = mock(DataNode.class); when(dataNode.getDataSourceName()).thenReturn("foo_ds"); when(singleRule.getAllDataNodes()).thenReturn(Collections.singletonMap("", Collections.singleton(dataNode))); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(dataSource); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); - assertThrows(StorageUnitInUsedException.class, - () -> handler.execute("foo_db", new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), false))); + assertThrows(StorageUnitInUsedException.class, () -> handler.execute("foo_db", new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), false))); } @Test void assertStorageUnitNameInUseIgnoreSingleTables() throws SQLException { - when(ruleMetaData.findRules(DataNodeContainedRule.class)).thenReturn(Collections.singleton(singleRule)); - when(singleRule.getType()).thenReturn("SingleRule"); + when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(singleRule))); DataNode dataNode = mock(DataNode.class); when(dataNode.getDataSourceName()).thenReturn("foo_ds"); when(singleRule.getAllDataNodes()).thenReturn(Collections.singletonMap("", Collections.singleton(dataNode))); - when(resourceMetaData.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getDataSource()).thenReturn(dataSource); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getResourceMetaData()).thenReturn(resourceMetaData); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); UnregisterStorageUnitStatement unregisterStorageUnitStatement = new UnregisterStorageUnitStatement(Collections.singleton("foo_ds"), true); @@ -174,8 +174,7 @@ void assertExecuteWithIfExists() throws SQLException { @Test void assertStorageUnitNameInUseWithIfExists() { - when(ruleMetaData.findRules(DataSourceContainedRule.class)).thenReturn(Collections.singleton(shadowRule)); - when(shadowRule.getType()).thenReturn("ShadowRule"); + when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.singleton(shadowRule))); when(shadowRule.getDataSourceMapper()).thenReturn(Collections.singletonMap("", Collections.singleton("foo_ds"))); when(contextManager.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); UnregisterStorageUnitStatement unregisterStorageUnitStatement = new UnregisterStorageUnitStatement(true, Collections.singleton("foo_ds"), true); diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutorTest.java index 4bcd6b4cf179c..97fef2bfe9b85 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rql/storage/unit/ShowStorageUnitExecutorTest.java @@ -129,7 +129,7 @@ void assertAllStorageUnit() { assertThat(data.getCell(9), is("100")); assertThat(data.getCell(10), is("10")); assertThat(data.getCell(11), is("")); - assertThat(data.getCell(12), is("{\"openedConnections\":[]}")); + assertThat(data.getCell(12), is("{\"openedConnections\":[],\"closed\":false}")); index++; } } @@ -153,7 +153,7 @@ void assertUnusedStorageUnit() { assertThat(data.getCell(9), is("100")); assertThat(data.getCell(10), is("10")); assertThat(data.getCell(11), is("")); - assertThat(data.getCell(12), is("{\"openedConnections\":[]}")); + assertThat(data.getCell(12), is("{\"openedConnections\":[],\"closed\":false}")); } @Test diff --git a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutorTest.java b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutorTest.java index 78d072e593442..a0b52f7342b29 100644 --- a/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutorTest.java +++ b/proxy/backend/core/src/test/java/org/apache/shardingsphere/proxy/backend/handler/distsql/rul/sql/ParseDistSQLExecutorTest.java @@ -17,13 +17,12 @@ package org.apache.shardingsphere.proxy.backend.handler.distsql.rul.sql; -import com.google.gson.Gson; -import com.google.gson.JsonParser; import org.apache.shardingsphere.distsql.parser.statement.rul.sql.FormatStatement; import org.apache.shardingsphere.distsql.parser.statement.rul.sql.ParseStatement; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.parser.rule.SQLParserRule; import org.apache.shardingsphere.parser.rule.builder.DefaultSQLParserRuleConfigurationBuilder; @@ -78,7 +77,7 @@ void assertGetRowDataForMySQL() throws SQLException { handler.next(); SQLStatement statement = sqlParserRule.getSQLParserEngine(TypedSPILoader.getService(DatabaseType.class, "MySQL")).parse(sql, false); assertThat(new LinkedList<>(handler.getRowData().getData()).getFirst(), is("MySQLSelectStatement")); - assertThat(JsonParser.parseString(new LinkedList<>(handler.getRowData().getData()).getLast().toString()), is(JsonParser.parseString(new Gson().toJson(statement)))); + assertThat(new LinkedList<>(handler.getRowData().getData()).getLast().toString(), is(JsonUtils.toJsonString(statement))); } @Test @@ -90,7 +89,7 @@ void assertGetRowDataForPostgreSQL() throws SQLException { handler.execute(); handler.next(); SQLStatement statement = sqlParserRule.getSQLParserEngine(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")).parse(sql, false); - assertThat(JsonParser.parseString(new LinkedList<>(handler.getRowData().getData()).getLast().toString()), is(JsonParser.parseString(new Gson().toJson(statement)))); + assertThat(new LinkedList<>(handler.getRowData().getData()).getLast().toString(), is(JsonUtils.toJsonString(statement))); } @Test diff --git a/proxy/backend/core/src/test/resources/conf/import/config-database-discovery.yaml b/proxy/backend/core/src/test/resources/conf/import/config-database-discovery.yaml deleted file mode 100644 index 1c0857a645d38..0000000000000 --- a/proxy/backend/core/src/test/resources/conf/import/config-database-discovery.yaml +++ /dev/null @@ -1,67 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -databaseName: database_discovery_db - -dataSources: - ds_0: - url: jdbc:mysql://127.0.0.1:3306/demo_primary_ds?serverTimezone=UTC&useSSL=false - username: root - password: - connectionTimeoutMilliseconds: 3000 - idleTimeoutMilliseconds: 60000 - maxLifetimeMilliseconds: 1800000 - maxPoolSize: 50 - minPoolSize: 1 - ds_1: - url: jdbc:mysql://127.0.0.1:3306/demo_replica_ds_0?serverTimezone=UTC&useSSL=false - username: root - password: - connectionTimeoutMilliseconds: 3000 - idleTimeoutMilliseconds: 60000 - maxLifetimeMilliseconds: 1800000 - maxPoolSize: 50 - minPoolSize: 1 - ds_2: - url: jdbc:mysql://127.0.0.1:3306/demo_replica_ds_1?serverTimezone=UTC&useSSL=false - username: root - password: - connectionTimeoutMilliseconds: 3000 - idleTimeoutMilliseconds: 60000 - maxLifetimeMilliseconds: 1800000 - maxPoolSize: 50 - minPoolSize: 1 - -rules: -- !DB_DISCOVERY - dataSources: - readwrite_ds: - dataSourceNames: - - ds_0 - - ds_1 - - ds_2 - discoveryHeartbeatName: mgr-heartbeat - discoveryTypeName: mgr - discoveryHeartbeats: - mgr-heartbeat: - props: - keep-alive-cron: '0/5 * * * * ?' - discoveryTypes: - mgr: - type: MySQL.MGR - props: - group-name: 92504d5b-6dec-11e8-91ea-246e9612aaf1 diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java index 7e5c43146a3e4..fce5a0912e65c 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/config/YamlHBaseConfiguration.java @@ -33,7 +33,7 @@ public final class YamlHBaseConfiguration implements YamlConfiguration { private String databaseName; - private Map commonDataSourceProps; + private Map commonDataSourcePoolProps; private Map dataSources = new HashMap<>(); diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseConnectionFactory.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseConnectionFactory.java index 5060e3e217e1c..f747a84417216 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseConnectionFactory.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/connector/HBaseConnectionFactory.java @@ -44,12 +44,12 @@ public final class HBaseConnectionFactory { /** * Create HBase connection. * - * @param yamlProxyHBaseConfiguration HBase configuration + * @param yamlProxyHBaseConfig YAML HBase configuration * @return A connection for per HBase cluster */ - public static Map createHBaseConnections(final YamlHBaseConfiguration yamlProxyHBaseConfiguration) { - Map result = new LinkedHashMap<>(yamlProxyHBaseConfiguration.getDataSources().size(), 1F); - for (Entry entry : yamlProxyHBaseConfiguration.getDataSources().entrySet()) { + public static Map createHBaseConnections(final YamlHBaseConfiguration yamlProxyHBaseConfig) { + Map result = new LinkedHashMap<>(yamlProxyHBaseConfig.getDataSources().size(), 1F); + for (Entry entry : yamlProxyHBaseConfig.getDataSources().entrySet()) { result.put(entry.getKey(), createConnection(entry.getValue())); } return result; diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java index bc5afda592b82..39e4a70674baf 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/context/HBaseRegionWarmUpContext.java @@ -84,13 +84,13 @@ public void submitWarmUpTask(final String tableName, final HBaseCluster hbaseClu public void loadRegionInfo(final String tableName, final Connection connection) { HBaseRegionWarmUpContext.getInstance().addExecuteCount(); try { - if (connection == null) { + if (null == connection) { return; } RegionLocator regionLocator = connection.getRegionLocator(TableName.valueOf(tableName)); regionLocator.getAllRegionLocations(); - } catch (IOException e) { - throw new HBaseOperationException(String.format("table: %s warm up error, getRegionLocator execute error reason is %s", tableName, e)); + } catch (final IOException ex) { + throw new HBaseOperationException(String.format("table: %s warm up error, getRegionLocator execute error reason is %s", tableName, ex)); } } diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/handler/HBaseBackendUpdateHandler.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/handler/HBaseBackendUpdateHandler.java index e8f7378d4f62c..00c9ade5bddd0 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/handler/HBaseBackendUpdateHandler.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/handler/HBaseBackendUpdateHandler.java @@ -18,8 +18,8 @@ package org.apache.shardingsphere.proxy.backend.hbase.handler; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContextFactory; import org.apache.shardingsphere.infra.executor.sql.execute.result.update.UpdateResult; import org.apache.shardingsphere.proxy.backend.handler.data.DatabaseBackendHandler; import org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseOperationConverter; @@ -48,7 +48,7 @@ public final class HBaseBackendUpdateHandler implements DatabaseBackendHandler { */ @Override public UpdateResponseHeader execute() { - SQLStatementContext sqlStatementContext = new SQLBindEngine(null, "").bind(sqlStatement, Collections.emptyList()); + SQLStatementContext sqlStatementContext = SQLStatementContextFactory.newInstance(null, Collections.emptyList(), sqlStatement, ""); HBaseOperationConverter converter = HBaseOperationConverterFactory.newInstance(sqlStatementContext); Collection updateResults = updater.executeUpdate(converter.convert()); return new UpdateResponseHeader(sqlStatement, updateResults); diff --git a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseGetResultSet.java b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseGetResultSet.java index f518dd1f2d481..278777c929b08 100644 --- a/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseGetResultSet.java +++ b/proxy/backend/type/hbase/src/main/java/org/apache/shardingsphere/proxy/backend/hbase/result/query/HBaseGetResultSet.java @@ -181,14 +181,14 @@ private String getWhereClause() { if (expressionSegment instanceof BetweenExpression) { result.append(((BetweenExpression) expressionSegment).getBetweenExpr()); } else if (expressionSegment instanceof BinaryOperationExpression) { - result.append(((BinaryOperationExpression) expressionSegment).getText()); + result.append(expressionSegment.getText()); } return result.toString(); } @Override public boolean next() { - return resultNum < maxLimitResultSize && (rows.hasNext() || compensateResult != null); + return resultNum < maxLimitResultSize && (rows.hasNext() || null != compensateResult); } @Override diff --git a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseDeleteOperationConverterTest.java b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseDeleteOperationConverterTest.java index db1b3556dfde4..9f5382b155053 100644 --- a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseDeleteOperationConverterTest.java +++ b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseDeleteOperationConverterTest.java @@ -18,8 +18,16 @@ package org.apache.shardingsphere.proxy.backend.hbase.converter.type; import org.apache.hadoop.hbase.client.Delete; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; +import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation; import org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseOperationConverter; import org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseOperationConverterFactory; @@ -28,18 +36,23 @@ import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.junit.jupiter.api.Test; +import java.sql.Types; import java.util.Collections; +import java.util.Map; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; class HBaseDeleteOperationConverterTest { @Test void assertConvert() { SQLStatement sqlStatement = HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getDeleteStatement()); - SQLStatementContext sqlStatementContext = new SQLBindEngine(null, "").bind(sqlStatement, Collections.emptyList()); + SQLStatementContext sqlStatementContext = new SQLBindEngine(mockMetaData(), DefaultDatabase.LOGIC_NAME).bind(sqlStatement, Collections.emptyList()); HBaseOperationConverter converter = HBaseOperationConverterFactory.newInstance(sqlStatementContext); HBaseOperation hbaseOperation = converter.convert(); assertThat(hbaseOperation.getTableName(), is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME)); @@ -50,11 +63,23 @@ void assertConvert() { void assertConvertWithIn() { String sql = " delete /*+ hbase */ from t_test_order where rowKey in ('2', '1')"; SQLStatement sqlStatement = HBaseSupportedSQLStatement.parseSQLStatement(sql); - SQLStatementContext sqlStatementContext = new SQLBindEngine(null, "").bind(sqlStatement, Collections.emptyList()); + SQLStatementContext sqlStatementContext = new SQLBindEngine(mockMetaData(), DefaultDatabase.LOGIC_NAME).bind(sqlStatement, Collections.emptyList()); HBaseOperationConverter converter = HBaseOperationConverterFactory.newInstance(sqlStatementContext); HBaseOperation hBaseOperation = converter.convert(); assertThat(hBaseOperation.getTableName(), is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME)); assertThat(hBaseOperation.getOperation(), instanceOf(HBaseDeleteOperation.class)); assertThat(((HBaseDeleteOperation) hBaseOperation.getOperation()).getDeletes().size(), is(2)); } + + private ShardingSphereMetaData mockMetaData() { + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); + ShardingSphereTable table = new ShardingSphereTable("t_test_order", Collections.singletonList(new ShardingSphereColumn("rowKey", Types.VARCHAR, true, false, false, false, true, false)), + Collections.emptyList(), Collections.emptyList()); + when(database.getSchema(DefaultDatabase.LOGIC_NAME).getTable("t_test_order")).thenReturn(table); + when(database.containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(database.getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_test_order")).thenReturn(true); + Map databases = Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database); + return new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), mock(ConfigurationProperties.class)); + + } } diff --git a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseUpdateOperationConverterTest.java b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseUpdateOperationConverterTest.java index da9492c02272f..4aaefbf457fa4 100644 --- a/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseUpdateOperationConverterTest.java +++ b/proxy/backend/type/hbase/src/test/java/org/apache/shardingsphere/proxy/backend/hbase/converter/type/HBaseUpdateOperationConverterTest.java @@ -18,8 +18,16 @@ package org.apache.shardingsphere.proxy.backend.hbase.converter.type; import org.apache.hadoop.hbase.client.Put; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; +import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.DefaultDatabase; +import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.proxy.backend.hbase.bean.HBaseOperation; import org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseOperationConverter; import org.apache.shardingsphere.proxy.backend.hbase.converter.HBaseOperationConverterFactory; @@ -28,18 +36,24 @@ import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; import org.junit.jupiter.api.Test; +import java.sql.Types; +import java.util.Arrays; import java.util.Collections; +import java.util.Map; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; class HBaseUpdateOperationConverterTest { @Test void assertConvert() { SQLStatement sqlStatement = HBaseSupportedSQLStatement.parseSQLStatement(HBaseSupportedSQLStatement.getUpdateStatement()); - SQLStatementContext sqlStatementContext = new SQLBindEngine(null, "").bind(sqlStatement, Collections.emptyList()); + SQLStatementContext sqlStatementContext = new SQLBindEngine(mockMetaData(), DefaultDatabase.LOGIC_NAME).bind(sqlStatement, Collections.emptyList()); HBaseOperationConverter converter = HBaseOperationConverterFactory.newInstance(sqlStatementContext); HBaseOperation operation = converter.convert(); assertThat(operation.getTableName(), is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME)); @@ -50,11 +64,23 @@ void assertConvert() { void assertConvertWithIn() { String sql = " update /*+ hbase */ t_test_order set age = 10 where rowKey in (1, '2')"; SQLStatement sqlStatement = HBaseSupportedSQLStatement.parseSQLStatement(sql); - SQLStatementContext sqlStatementContext = new SQLBindEngine(null, "").bind(sqlStatement, Collections.emptyList()); + SQLStatementContext sqlStatementContext = new SQLBindEngine(mockMetaData(), DefaultDatabase.LOGIC_NAME).bind(sqlStatement, Collections.emptyList()); HBaseOperationConverter converter = HBaseOperationConverterFactory.newInstance(sqlStatementContext); HBaseOperation operation = converter.convert(); assertThat(operation.getTableName(), is(HBaseSupportedSQLStatement.HBASE_DATABASE_TABLE_NAME)); assertThat(operation.getOperation(), instanceOf(HBaseUpdateOperation.class)); assertThat(((HBaseUpdateOperation) operation.getOperation()).getPuts().size(), is(2)); } + + private ShardingSphereMetaData mockMetaData() { + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); + ShardingSphereTable table = new ShardingSphereTable("t_test_order", Arrays.asList(new ShardingSphereColumn("rowKey", Types.VARCHAR, true, false, false, false, true, false), + new ShardingSphereColumn("age", Types.INTEGER, false, false, false, false, true, false)), Collections.emptyList(), Collections.emptyList()); + when(database.getSchema(DefaultDatabase.LOGIC_NAME).getTable("t_test_order")).thenReturn(table); + when(database.containsSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(true); + when(database.getSchema(DefaultDatabase.LOGIC_NAME).containsTable("t_test_order")).thenReturn(true); + Map databases = Collections.singletonMap(DefaultDatabase.LOGIC_NAME, database); + return new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), mock(ConfigurationProperties.class)); + + } } diff --git a/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java b/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java index effd4e5e0ddd6..ad3aa0e4c2a39 100644 --- a/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java +++ b/proxy/backend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutor.java @@ -95,7 +95,8 @@ protected Collection getDatabaseNames(final ConnectionSession connection @Override protected void preProcess(final String databaseName, final Map rows, final Map alias) { ResourceMetaData resourceMetaData = ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getDatabase(databaseName).getResourceMetaData(); - Collection catalogs = resourceMetaData.getDataSources().keySet().stream().map(each -> resourceMetaData.getConnectionProperties(each).getCatalog()).collect(Collectors.toSet()); + Collection catalogs = resourceMetaData.getStorageUnitMetaData().getStorageUnits().keySet() + .stream().map(each -> resourceMetaData.getConnectionProperties(each).getCatalog()).collect(Collectors.toSet()); schemaNameAlias = alias.getOrDefault(SCHEMA_NAME, ""); String rowValue = rows.getOrDefault(schemaNameAlias, "").toString(); queryDatabase = !rowValue.isEmpty(); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/MySQLAdminExecutorCreatorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/MySQLAdminExecutorCreatorTest.java index bbbc8fe19b758..a482f350689ce 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/MySQLAdminExecutorCreatorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/MySQLAdminExecutorCreatorTest.java @@ -165,12 +165,12 @@ void assertCreateWithSetStatement() { @Test void assertCreateWithSelectStatementForShowConnectionId() { - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CONNECTION_ID()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CONNECTION_ID()", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ShowConnectionIdExecutor.class)); @@ -178,12 +178,12 @@ void assertCreateWithSelectStatementForShowConnectionId() { @Test void assertCreateWithSelectStatementForShowVersion() { - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "version()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select version()", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ShowVersionExecutor.class)); @@ -191,12 +191,12 @@ void assertCreateWithSelectStatementForShowVersion() { @Test void assertCreateWithSelectStatementForCurrentUser() { - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_USER()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_USER()", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ShowCurrentUserExecutor.class)); @@ -205,14 +205,14 @@ void assertCreateWithSelectStatementForCurrentUser() { @Test void assertCreateWithSelectStatementForTransactionReadOnly() { initProxyContext(Collections.emptyMap()); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); VariableSegment variableSegment = new VariableSegment(0, 0, "transaction_read_only"); variableSegment.setScope("SESSION"); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "@@session.transaction_read_only", variableSegment))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select @@session.transaction_read_only", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(MySQLSystemVariableQueryExecutor.class)); @@ -221,14 +221,14 @@ void assertCreateWithSelectStatementForTransactionReadOnly() { @Test void assertCreateWithSelectStatementForTransactionIsolation() { initProxyContext(Collections.emptyMap()); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); VariableSegment variableSegment = new VariableSegment(0, 0, "transaction_isolation"); variableSegment.setScope("SESSION"); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "@@session.transaction_isolation", variableSegment))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select @@session.transaction_isolation", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(MySQLSystemVariableQueryExecutor.class)); @@ -237,12 +237,12 @@ void assertCreateWithSelectStatementForTransactionIsolation() { @Test void assertCreateWithSelectStatementForShowDatabase() { initProxyContext(Collections.emptyMap()); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "DATABASE()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select DATABASE()", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(ShowCurrentDatabaseExecutor.class)); @@ -251,12 +251,12 @@ void assertCreateWithSelectStatementForShowDatabase() { @Test void assertCreateWithOtherSelectStatementForNoResource() { initProxyContext(Collections.emptyMap()); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_DATE()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_DATE()", null, Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(NoResourceShowExecutor.class)); @@ -270,12 +270,12 @@ void assertCreateWithOtherSelectStatementForDatabaseName() { initProxyContext(result); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("db_0")); when(ProxyContext.getInstance().getDatabase("db_0")).thenReturn(database); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_DATE()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_DATE()", "test_db", Collections.emptyList()); assertThat(actual, is(Optional.empty())); } @@ -288,12 +288,12 @@ void assertCreateWithOtherSelectStatementForNullDatabaseName() { initProxyContext(result); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Collections.singleton("db_0")); when(ProxyContext.getInstance().getDatabase("db_0")).thenReturn(database); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(null); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(null); ProjectionsSegment projectionsSegment = mock(ProjectionsSegment.class); when(projectionsSegment.getProjections()).thenReturn(Collections.singletonList(new ExpressionProjectionSegment(0, 10, "CURRENT_DATE()"))); - when(mySQLSelectStatement.getProjections()).thenReturn(projectionsSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + when(selectStatement.getProjections()).thenReturn(projectionsSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CURRENT_DATE()", null, Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(UnicastResourceShowExecutor.class)); @@ -304,9 +304,9 @@ void assertCreateWithSelectStatementFromInformationSchemaOfDefaultExecutorTables initProxyContext(Collections.emptyMap()); SimpleTableSegment tableSegment = new SimpleTableSegment(new TableNameSegment(10, 13, new IdentifierValue("ENGINES"))); tableSegment.setOwner(new OwnerSegment(7, 8, new IdentifierValue("information_schema"))); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(tableSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(tableSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select ENGINE from ENGINES", "information_schema", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(DefaultDatabaseMetaDataExecutor.class)); @@ -317,9 +317,9 @@ void assertCreateWithSelectStatementFromInformationSchemaOfSchemaTable() { initProxyContext(Collections.emptyMap()); SimpleTableSegment tableSegment = new SimpleTableSegment(new TableNameSegment(10, 13, new IdentifierValue("SCHEMATA"))); tableSegment.setOwner(new OwnerSegment(7, 8, new IdentifierValue("information_schema"))); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(tableSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(tableSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select SCHEMA_NAME from SCHEMATA", "information_schema", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), instanceOf(SelectInformationSchemataExecutor.class)); @@ -333,9 +333,9 @@ void assertCreateWithSelectStatementFromInformationSchemaOfOtherTable() { initProxyContext(Collections.emptyMap()); SimpleTableSegment tableSegment = new SimpleTableSegment(new TableNameSegment(10, 13, new IdentifierValue("CHARACTER_SETS"))); tableSegment.setOwner(new OwnerSegment(7, 8, new IdentifierValue("information_schema"))); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(tableSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(tableSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select CHARACTER_SET_NAME from CHARACTER_SETS", "", Collections.emptyList()); assertFalse(actual.isPresent()); } @@ -345,9 +345,9 @@ void assertCreateWithSelectStatementFromPerformanceSchema() { initProxyContext(Collections.emptyMap()); SimpleTableSegment tableSegment = new SimpleTableSegment(new TableNameSegment(10, 13, new IdentifierValue("accounts"))); tableSegment.setOwner(new OwnerSegment(7, 8, new IdentifierValue("performance_schema"))); - MySQLSelectStatement mySQLSelectStatement = mock(MySQLSelectStatement.class); - when(mySQLSelectStatement.getFrom()).thenReturn(tableSegment); - when(sqlStatementContext.getSqlStatement()).thenReturn(mySQLSelectStatement); + MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); + when(selectStatement.getFrom()).thenReturn(tableSegment); + when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); Optional actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select * from accounts", "", Collections.emptyList()); assertFalse(actual.isPresent()); } diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCreateDatabaseExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCreateDatabaseExecutorTest.java index c5c611a6d7fb6..2c8b650fe4004 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCreateDatabaseExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCreateDatabaseExecutorTest.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -38,10 +39,12 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -71,7 +74,11 @@ void assertExecute() throws SQLException { private ContextManager mockContextManager() { Map databases = getDatabases(); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(new Properties()))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCurrentUserExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCurrentUserExecutorTest.java index 083a113b98e1e..b313a9b566d61 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCurrentUserExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowCurrentUserExecutorTest.java @@ -67,12 +67,12 @@ void assertExecute() throws SQLException { private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), - new ShardingSphereMetaData(new HashMap<>(), mock(ResourceMetaData.class), mockShardingSphereRuleMetaData(), new ConfigurationProperties(new Properties()))); + new ShardingSphereMetaData(new HashMap<>(), mock(ResourceMetaData.class), mockRuleMetaData(), new ConfigurationProperties(new Properties()))); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); return result; } - private RuleMetaData mockShardingSphereRuleMetaData() { + private RuleMetaData mockRuleMetaData() { AuthorityRule authorityRule = mock(AuthorityRule.class); ShardingSphereUser shardingSphereUser = mock(ShardingSphereUser.class); when(shardingSphereUser.getGrantee()).thenReturn(new Grantee("root", "%")); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowDatabasesExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowDatabasesExecutorTest.java index de7c9875c8826..6568054767881 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowDatabasesExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowDatabasesExecutorTest.java @@ -28,6 +28,7 @@ import org.apache.shardingsphere.infra.metadata.user.Grantee; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -55,6 +56,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -176,7 +178,11 @@ void assertExecuteWithLikeMatchNone() throws SQLException { private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); RuleMetaData globalRuleMetaData = new RuleMetaData(Collections.singleton(mockAuthorityRule())); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData(getDatabases(), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(getDatabases(), mock(ResourceMetaData.class), globalRuleMetaData, new ConfigurationProperties(new Properties()))); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); return result; diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowFunctionStatusExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowFunctionStatusExecutorTest.java index 0e0d0ba430950..b894c4720acd5 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowFunctionStatusExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowFunctionStatusExecutorTest.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -38,10 +39,12 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -63,7 +66,11 @@ void assertExecute() throws SQLException { private ContextManager mockContextManager() { Map databases = getDatabases(); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(new Properties()))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcedureStatusExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcedureStatusExecutorTest.java index ec895570eeb27..926e2feb32efe 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcedureStatusExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcedureStatusExecutorTest.java @@ -25,6 +25,7 @@ import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -38,10 +39,12 @@ import java.sql.SQLException; import java.util.HashMap; import java.util.Map; +import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -63,7 +66,11 @@ void assertExecute() throws SQLException { private ContextManager mockContextManager() { Map databases = getDatabases(); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(new Properties()))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcessListExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcessListExecutorTest.java index f1d2b4c5540e2..158777ad094ce 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcessListExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowProcessListExecutorTest.java @@ -66,7 +66,7 @@ void assertExecute() throws SQLException, ReflectiveOperationException { private void setupProcesses(final ShowProcessListExecutor showProcessListExecutor) throws ReflectiveOperationException { Process process = new Process("f6c2336a-63ba-41bf-941e-2e3504eb2c80", 1617939785160L, - "ALTER TABLE t_order ADD COLUMN a varchar(64) AFTER order_id", "foo_db", "root", "127.0.0.1", 2, Collections.emptyList(), new AtomicInteger(1), false); + "ALTER TABLE t_order ADD COLUMN a varchar(64) AFTER order_id", "foo_db", "root", "127.0.0.1", 2, Collections.emptyList(), new AtomicInteger(1), false, false); Plugins.getMemberAccessor().set( showProcessListExecutor.getClass().getDeclaredField("processes"), showProcessListExecutor, Collections.singleton(process)); } diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowTablesExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowTablesExecutorTest.java index e1be5b9d6d71e..76c0fb280ca11 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowTablesExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/ShowTablesExecutorTest.java @@ -29,6 +29,7 @@ import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -56,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -192,7 +194,11 @@ void assertShowTableFromUncompletedDatabase() throws SQLException { } private ContextManager mockContextManager(final Map databases) { - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(new Properties()))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); diff --git a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutorTest.java b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutorTest.java index a07132a50b8c5..008d1dfd9c177 100644 --- a/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutorTest.java +++ b/proxy/backend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/backend/mysql/handler/admin/executor/information/SelectInformationSchemataExecutorTest.java @@ -28,6 +28,7 @@ import org.apache.shardingsphere.infra.metadata.user.Grantee; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.parser.rule.SQLParserRule; @@ -146,8 +147,12 @@ private ContextManager mockContextManager(final ShardingSphereDatabase... databa AuthorityRule authorityRule = mock(AuthorityRule.class); when(authorityRule.findPrivileges(grantee)).thenReturn(Optional.of(new DatabasePermittedPrivileges(Collections.singleton("auth_db")))); ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData( - Arrays.stream(databases).collect(Collectors.toMap(ShardingSphereDatabase::getName, each -> each, (key, value) -> value)), + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData( + Arrays.stream(databases).collect(Collectors.toMap(ShardingSphereDatabase::getName, each -> each)), mock(ResourceMetaData.class), new RuleMetaData(Collections.singleton(authorityRule)), new ConfigurationProperties(new Properties()))); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); return result; diff --git a/proxy/backend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutor.java b/proxy/backend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutor.java index ed167da52033a..7ef0dac7a86db 100644 --- a/proxy/backend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutor.java +++ b/proxy/backend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutor.java @@ -100,7 +100,7 @@ private DriverExecutionPrepareEngine createDriver return new DriverExecutionPrepareEngine<>(JDBCDriverType.STATEMENT, maxConnectionsSizePerQuery, connectionSession.getDatabaseConnectionManager(), connectionSession.getStatementManager(), new StatementOption(false), metaDataContexts.getMetaData().getDatabase(databaseName).getRuleMetaData().getRules(), - metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(databaseName).getResourceMetaData().getStorageUnitMetaData()); } private JDBCExecutorCallback createOpenGaussSystemCatalogAdminQueryCallback(final DatabaseType protocolType, final ResourceMetaData resourceMetaData, diff --git a/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussAdminExecutorFactoryTest.java b/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussAdminExecutorFactoryTest.java index d91be4b745d3d..c7e300d9e58f1 100644 --- a/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussAdminExecutorFactoryTest.java +++ b/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussAdminExecutorFactoryTest.java @@ -43,21 +43,21 @@ class OpenGaussAdminExecutorFactoryTest { @Mock - private PostgreSQLAdminExecutorCreator postgreSQLAdminExecutorFactory; + private PostgreSQLAdminExecutorCreator postgresqlAdminExecutorFactory; private OpenGaussAdminExecutorCreator openGaussAdminExecutorFactory; @BeforeEach void setup() throws ReflectiveOperationException { openGaussAdminExecutorFactory = new OpenGaussAdminExecutorCreator(); - Plugins.getMemberAccessor().set(OpenGaussAdminExecutorCreator.class.getDeclaredField("delegated"), openGaussAdminExecutorFactory, postgreSQLAdminExecutorFactory); + Plugins.getMemberAccessor().set(OpenGaussAdminExecutorCreator.class.getDeclaredField("delegated"), openGaussAdminExecutorFactory, postgresqlAdminExecutorFactory); } @Test void assertNewInstanceWithSQLStatementContextOnly() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class); DatabaseAdminExecutor expected = mock(DatabaseAdminExecutor.class); - when(postgreSQLAdminExecutorFactory.create(sqlStatementContext)).thenReturn(Optional.of(expected)); + when(postgresqlAdminExecutorFactory.create(sqlStatementContext)).thenReturn(Optional.of(expected)); Optional actual = openGaussAdminExecutorFactory.create(sqlStatementContext); assertTrue(actual.isPresent()); assertThat(actual.get(), is(expected)); @@ -80,7 +80,7 @@ void assertNewInstanceWithOtherSQL() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, RETURNS_DEEP_STUBS); when(sqlStatementContext.getTablesContext().getTableNames()).thenReturn(Collections.emptyList()); DatabaseAdminExecutor expected = mock(DatabaseAdminExecutor.class); - when(postgreSQLAdminExecutorFactory.create(sqlStatementContext, "", "", Collections.emptyList())).thenReturn(Optional.of(expected)); + when(postgresqlAdminExecutorFactory.create(sqlStatementContext, "", "", Collections.emptyList())).thenReturn(Optional.of(expected)); Optional actual = openGaussAdminExecutorFactory.create(sqlStatementContext, "", "", Collections.emptyList()); assertTrue(actual.isPresent()); assertThat(actual.get(), is(expected)); diff --git a/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutorTest.java b/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutorTest.java index d51f0183099e7..4a5749b17344f 100644 --- a/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutorTest.java +++ b/proxy/backend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/backend/opengauss/handler/admin/OpenGaussSystemCatalogAdminQueryExecutorTest.java @@ -77,16 +77,16 @@ class OpenGaussSystemCatalogAdminQueryExecutorTest { void assertExecuteSelectFromPgDatabase() throws SQLException { when(ProxyContext.getInstance()).thenReturn(mock(ProxyContext.class, RETURNS_DEEP_STUBS)); when(ProxyContext.getInstance().getAllDatabaseNames()).thenReturn(Arrays.asList("foo", "bar", "sharding_db", "other_db")); - ConfigurationProperties properties = new ConfigurationProperties(new Properties()); - when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(properties); + ConfigurationProperties props = new ConfigurationProperties(new Properties()); + when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(props); ConnectionSession connectionSession = mock(ConnectionSession.class); when(connectionSession.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "openGauss")); Map databases = createShardingSphereDatabaseMap(); - SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, properties); + SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, props); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(mock(RuleMetaData.class)); OpenGaussSelectStatement sqlStatement = createSelectStatementForPgDatabase(); ShardingSphereMetaData metaData = - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), new RuleMetaData(Collections.singletonList(sqlFederationRule)), properties); + new ShardingSphereMetaData(databases, mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), new RuleMetaData(Collections.singletonList(sqlFederationRule)), props); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData()).thenReturn(metaData); SelectStatementContext sqlStatementContext = new SelectStatementContext(metaData, Collections.emptyList(), sqlStatement, "sharding_db"); OpenGaussSystemCatalogAdminQueryExecutor executor = new OpenGaussSystemCatalogAdminQueryExecutor(sqlStatementContext, @@ -134,7 +134,7 @@ private Map createShardingSphereDatabaseMap() { ShardingSphereSchema schema = new ShardingSphereSchema( Collections.singletonMap("pg_database", new ShardingSphereTable("pg_database", columns, Collections.emptyList(), Collections.emptyList())), Collections.emptyMap()); result.put("sharding_db", new ShardingSphereDatabase("sharding_db", TypedSPILoader.getService(DatabaseType.class, "openGauss"), - mock(ResourceMetaData.class), mock(RuleMetaData.class), Collections.singletonMap("pg_catalog", schema))); + mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), mock(RuleMetaData.class), Collections.singletonMap("pg_catalog", schema))); return result; } @@ -143,13 +143,13 @@ void assertExecuteSelectVersion() throws SQLException { when(ProxyContext.getInstance()).thenReturn(mock(ProxyContext.class, RETURNS_DEEP_STUBS)); RuleMetaData ruleMetaData = mock(RuleMetaData.class); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(ruleMetaData); - ConfigurationProperties properties = new ConfigurationProperties(new Properties()); - when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(properties); + ConfigurationProperties props = new ConfigurationProperties(new Properties()); + when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(props); Map databases = createShardingSphereDatabaseMap(); - SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, properties); + SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, props); OpenGaussSelectStatement sqlStatement = createSelectStatementForVersion(); ShardingSphereMetaData metaData = - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), new RuleMetaData(Collections.singletonList(sqlFederationRule)), properties); + new ShardingSphereMetaData(databases, mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), new RuleMetaData(Collections.singletonList(sqlFederationRule)), props); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData()).thenReturn(metaData); SelectStatementContext sqlStatementContext = new SelectStatementContext(metaData, Collections.emptyList(), sqlStatement, "sharding_db"); OpenGaussSystemCatalogAdminQueryExecutor executor = @@ -177,13 +177,13 @@ void assertExecuteSelectGsPasswordDeadlineAndIntervalToNum() throws SQLException when(ProxyContext.getInstance()).thenReturn(mock(ProxyContext.class, RETURNS_DEEP_STUBS)); RuleMetaData ruleMetaData = mock(RuleMetaData.class); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(ruleMetaData); - ConfigurationProperties properties = new ConfigurationProperties(new Properties()); - when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(properties); + ConfigurationProperties props = new ConfigurationProperties(new Properties()); + when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(props); Map databases = createShardingSphereDatabaseMap(); - SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, properties); + SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, props); OpenGaussSelectStatement sqlStatement = createSelectStatementForGsPasswordDeadlineAndIntervalToNum(); ShardingSphereMetaData metaData = - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), new RuleMetaData(Collections.singletonList(sqlFederationRule)), properties); + new ShardingSphereMetaData(databases, mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), new RuleMetaData(Collections.singletonList(sqlFederationRule)), props); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData()).thenReturn(metaData); SelectStatementContext sqlStatementContext = new SelectStatementContext(metaData, Collections.emptyList(), sqlStatement, "sharding_db"); OpenGaussSystemCatalogAdminQueryExecutor executor = @@ -213,13 +213,13 @@ void assertExecuteSelectGsPasswordNotifyTime() throws SQLException { when(ProxyContext.getInstance()).thenReturn(mock(ProxyContext.class, RETURNS_DEEP_STUBS)); RuleMetaData ruleMetaData = mock(RuleMetaData.class); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(ruleMetaData); - ConfigurationProperties properties = new ConfigurationProperties(new Properties()); - when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(properties); + ConfigurationProperties props = new ConfigurationProperties(new Properties()); + when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData().getProps()).thenReturn(props); Map databases = createShardingSphereDatabaseMap(); - SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, properties); + SQLFederationRule sqlFederationRule = new SQLFederationRule(new SQLFederationRuleConfiguration(false, new CacheOption(1, 1)), databases, props); OpenGaussSelectStatement sqlStatement = createSelectStatementForGsPasswordNotifyTime(); ShardingSphereMetaData metaData = - new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), new RuleMetaData(Collections.singletonList(sqlFederationRule)), properties); + new ShardingSphereMetaData(databases, mock(ResourceMetaData.class, RETURNS_DEEP_STUBS), new RuleMetaData(Collections.singletonList(sqlFederationRule)), props); when(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData()).thenReturn(metaData); SelectStatementContext sqlStatementContext = new SelectStatementContext(metaData, Collections.emptyList(), sqlStatement, "sharding_db"); OpenGaussSystemCatalogAdminQueryExecutor executor = diff --git a/proxy/backend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/backend/postgresql/handler/admin/executor/variable/charset/PostgreSQLCharacterSets.java b/proxy/backend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/backend/postgresql/handler/admin/executor/variable/charset/PostgreSQLCharacterSets.java index 54437471ec731..4399e2842eb1e 100644 --- a/proxy/backend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/backend/postgresql/handler/admin/executor/variable/charset/PostgreSQLCharacterSets.java +++ b/proxy/backend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/backend/postgresql/handler/admin/executor/variable/charset/PostgreSQLCharacterSets.java @@ -111,7 +111,7 @@ public enum PostgreSQLCharacterSets { public static Charset findCharacterSet(final String charsetName) { String formattedCharsetName = formatValue(charsetName); PostgreSQLCharacterSets result = CHARACTER_SETS_MAP.get(formattedCharsetName.toUpperCase()); - return null != result && null != result.charset ? result.charset : Charset.forName(formattedCharsetName); + return null == result || null == result.charset ? Charset.forName(formattedCharsetName) : result.charset; } private static String formatValue(final String value) { diff --git a/proxy/bootstrap/pom.xml b/proxy/bootstrap/pom.xml index 75f57b0911c83..7eb7e0d1c2584 100644 --- a/proxy/bootstrap/pom.xml +++ b/proxy/bootstrap/pom.xml @@ -142,11 +142,6 @@ HikariCP runtime - - com.alibaba - druid - runtime - org.apache.commons commons-dbcp2 diff --git a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/arguments/BootstrapArguments.java b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/arguments/BootstrapArguments.java index bf1d5c4396040..3a002fd1276df 100644 --- a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/arguments/BootstrapArguments.java +++ b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/arguments/BootstrapArguments.java @@ -125,7 +125,7 @@ private String paddingWithSlash(final String pathArg) { private boolean isValidPath(final String path) { try { Paths.get(path); - } catch (InvalidPathException ignored) { + } catch (final InvalidPathException ignored) { throw new IllegalArgumentException(String.format("Invalid path `%s`.", path)); } return true; diff --git a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfo.java b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfo.java index d4f0a91357719..5407054be2657 100644 --- a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfo.java +++ b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfo.java @@ -31,14 +31,14 @@ @Getter public final class DatabaseServerInfo { - private final String databaseName; + private final String databaseType; private final String databaseVersion; public DatabaseServerInfo(final DataSource dataSource) { try (Connection connection = dataSource.getConnection()) { DatabaseMetaData databaseMetaData = connection.getMetaData(); - databaseName = databaseMetaData.getDatabaseProductName(); + databaseType = databaseMetaData.getDatabaseProductName(); databaseVersion = databaseMetaData.getDatabaseProductVersion(); } catch (final SQLException ex) { throw new DatabaseServerLoadingServerException(ex); @@ -47,6 +47,6 @@ public DatabaseServerInfo(final DataSource dataSource) { @Override public String toString() { - return String.format("Database name is `%s`, version is `%s`", databaseName, databaseVersion); + return String.format("Database type is `%s`, version is `%s`", databaseType, databaseVersion); } } diff --git a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersion.java b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersion.java index 4886ed672e551..4781eb78d3326 100644 --- a/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersion.java +++ b/proxy/bootstrap/src/main/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersion.java @@ -73,7 +73,9 @@ private static void setDatabaseVersion(final ShardingSphereDatabase database) { } private static Optional findDataSourceByProtocolType(final String databaseName, final ResourceMetaData resourceMetaData, final DatabaseType protocolType) { - Optional dataSourceName = resourceMetaData.getStorageTypes().entrySet().stream().filter(entry -> entry.getValue().equals(protocolType)).map(Entry::getKey).findFirst(); - return dataSourceName.flatMap(optional -> Optional.ofNullable(DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, resourceMetaData.getDataSources()).get(optional))); + Optional dataSourceName = resourceMetaData.getStorageUnitMetaData().getStorageUnits().entrySet() + .stream().filter(entry -> entry.getValue().getStorageType().equals(protocolType)).map(Entry::getKey).findFirst(); + return dataSourceName.flatMap(optional -> Optional.ofNullable( + DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, resourceMetaData.getStorageUnitMetaData().getDataSources()).get(optional))); } } diff --git a/proxy/bootstrap/src/main/resources/conf/config-sharding.yaml b/proxy/bootstrap/src/main/resources/conf/config-sharding.yaml index d6f325af70e64..24285d21617f7 100644 --- a/proxy/bootstrap/src/main/resources/conf/config-sharding.yaml +++ b/proxy/bootstrap/src/main/resources/conf/config-sharding.yaml @@ -16,10 +16,10 @@ # ###################################################################################################### -# +# # Here you can configure the rules for the proxy. # This example is configuration of sharding rule. -# +# ###################################################################################################### # #databaseName: sharding_db diff --git a/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfoTest.java b/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfoTest.java index 2c6fe57b4632f..abb748e4ae990 100644 --- a/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfoTest.java +++ b/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/database/DatabaseServerInfoTest.java @@ -52,6 +52,6 @@ void assertToString() throws SQLException { when(databaseMetaData.getDatabaseProductName()).thenReturn("fixtureDB"); when(databaseMetaData.getDatabaseProductVersion()).thenReturn("1.0.0"); when(dataSource.getConnection().getMetaData()).thenReturn(databaseMetaData); - assertThat(new DatabaseServerInfo(dataSource).toString(), is("Database name is `fixtureDB`, version is `1.0.0`")); + assertThat(new DatabaseServerInfo(dataSource).toString(), is("Database type is `fixtureDB`, version is `1.0.0`")); } } diff --git a/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersionTest.java b/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersionTest.java index 2fa4cd21ae48f..e368386eb4136 100644 --- a/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersionTest.java +++ b/proxy/bootstrap/src/test/java/org/apache/shardingsphere/proxy/version/ShardingSphereProxyVersionTest.java @@ -77,10 +77,9 @@ private ShardingSphereDatabase mockDatabase(final String databaseProductName, fi } private ResourceMetaData mockResourceMetaData(final String databaseProductName, final String databaseProductVersion) throws SQLException { - ResourceMetaData result = mock(ResourceMetaData.class); - when(result.getStorageTypes()).thenReturn(Collections.singletonMap("foo_ds", TypedSPILoader.getService(DatabaseType.class, databaseProductName))); + ResourceMetaData result = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); DataSource dataSource = createDataSource(databaseProductName, databaseProductVersion); - when(result.getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); + when(result.getStorageUnitMetaData().getDataSources()).thenReturn(Collections.singletonMap("foo_ds", dataSource)); return result; } diff --git a/proxy/frontend/core/src/main/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactory.java b/proxy/frontend/core/src/main/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactory.java index 3becba054cc35..b6c1589fe84d8 100644 --- a/proxy/frontend/core/src/main/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactory.java +++ b/proxy/frontend/core/src/main/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactory.java @@ -51,7 +51,7 @@ public static DatabaseType getDatabaseType() { return TypedSPILoader.getService(DatabaseType.class, DEFAULT_FRONTEND_DATABASE_PROTOCOL_TYPE); } Optional database = metaDataContexts.getMetaData().getDatabases().values().stream().filter(ShardingSphereDatabase::containsDataSource).findFirst(); - return database.isPresent() ? database.get().getResourceMetaData().getStorageTypes().values().iterator().next() + return database.isPresent() ? database.get().getResourceMetaData().getStorageUnitMetaData().getStorageUnits().values().iterator().next().getStorageType() : TypedSPILoader.getService(DatabaseType.class, DEFAULT_FRONTEND_DATABASE_PROTOCOL_TYPE); } diff --git a/proxy/frontend/core/src/test/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactoryTest.java b/proxy/frontend/core/src/test/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactoryTest.java index af1c9eec03e92..02afea18e486c 100644 --- a/proxy/frontend/core/src/test/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactoryTest.java +++ b/proxy/frontend/core/src/test/java/org/apache/shardingsphere/proxy/frontend/protocol/FrontDatabaseProtocolTypeFactoryTest.java @@ -27,6 +27,7 @@ import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -36,19 +37,24 @@ import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import java.util.Collections; import java.util.Map; +import java.util.Optional; import java.util.Properties; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @ExtendWith(AutoMockExtension.class) @StaticMockSettings(ProxyContext.class) +@MockitoSettings(strictness = Strictness.LENIENT) class FrontDatabaseProtocolTypeFactoryTest { @Test @@ -84,9 +90,12 @@ private Map mockDatabases() { } private ContextManager mockContextManager(final Map databases, final Properties props) { - MetaDataContexts metaDataContexts = new MetaDataContexts( - mock(MetaDataPersistService.class), new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), - mock(RuleMetaData.class), new ConfigurationProperties(props))); + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, + new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), mock(RuleMetaData.class), new ConfigurationProperties(props))); return new ContextManager(metaDataContexts, mock(InstanceContext.class)); } } diff --git a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngine.java b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngine.java index 36ac10763c7b6..c62e37daaa2f8 100644 --- a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngine.java +++ b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngine.java @@ -113,7 +113,7 @@ private AuthenticationResult authenticatePhaseFastPath(final ChannelHandlerConte MySQLHandshakeResponse41Packet handshakeResponsePacket; try { handshakeResponsePacket = new MySQLHandshakeResponse41Packet((MySQLPacketPayload) payload); - } catch (IndexOutOfBoundsException ex) { + } catch (final IndexOutOfBoundsException ex) { if (log.isWarnEnabled()) { log.warn("Received bad handshake from client {}: \n{}", context.channel(), ByteBufUtil.prettyHexDump(payload.getByteBuf().resetReaderIndex())); } diff --git a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/MySQLCommandExecuteEngine.java b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/MySQLCommandExecuteEngine.java index 481b30c7b5587..7dc98f16ce720 100644 --- a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/MySQLCommandExecuteEngine.java +++ b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/MySQLCommandExecuteEngine.java @@ -37,7 +37,7 @@ import org.apache.shardingsphere.proxy.frontend.command.executor.CommandExecutor; import org.apache.shardingsphere.proxy.frontend.command.executor.QueryCommandExecutor; import org.apache.shardingsphere.proxy.frontend.command.executor.ResponseType; -import org.apache.shardingsphere.proxy.frontend.mysql.err.MySQLErrPacketFactory; +import org.apache.shardingsphere.proxy.frontend.mysql.err.MySQLErrorPacketFactory; import java.sql.SQLException; @@ -65,7 +65,7 @@ public CommandExecutor getCommandExecutor(final CommandPacketType type, final Co @Override public MySQLPacket getErrorPacket(final Exception cause) { - return MySQLErrPacketFactory.newInstance(cause); + return MySQLErrorPacketFactory.newInstance(cause); } @Override diff --git a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutor.java b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutor.java index 4184e3ad20493..70ff9f74e08f7 100644 --- a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutor.java +++ b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutor.java @@ -81,7 +81,7 @@ public Collection execute() { throw new UnsupportedPreparedStatementException(); } SQLStatementContext sqlStatementContext = new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), - connectionSession.getDefaultDatabaseName()).bind(sqlStatement, Collections.emptyList()); + connectionSession.getDefaultDatabaseName(), packet.getHintValueContext()).bind(sqlStatement, Collections.emptyList()); int statementId = MySQLStatementIdGenerator.getInstance().nextStatementId(connectionSession.getConnectionId()); MySQLServerPreparedStatement serverPreparedStatement = new MySQLServerPreparedStatement(packet.getSQL(), sqlStatementContext, packet.getHintValueContext(), new CopyOnWriteArrayList<>()); connectionSession.getServerPreparedStatementRegistry().addPreparedStatement(statementId, serverPreparedStatement); diff --git a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandler.java b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandler.java index 2f7d3653911ad..7cf088c7b45eb 100644 --- a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandler.java +++ b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandler.java @@ -140,7 +140,7 @@ public ResponseHeader execute() throws SQLException { DriverExecutionPrepareEngine prepareEngine = new DriverExecutionPrepareEngine<>(JDBCDriverType.STATEMENT, metaDataContexts.getMetaData().getProps() .getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY), connectionSession.getDatabaseConnectionManager(), (JDBCBackendStatement) connectionSession.getStatementManager(), new StatementOption(false), rules, - metaDataContexts.getMetaData().getDatabase(connectionSession.getDatabaseName()).getResourceMetaData().getStorageTypes()); + metaDataContexts.getMetaData().getDatabase(connectionSession.getDatabaseName()).getResourceMetaData().getStorageUnitMetaData()); ExecutionGroupContext executionGroupContext = prepareEngine.prepare(anyExecutionContext.getRouteContext(), samplingExecutionUnit(), new ExecutionGroupReportContext(connectionSession.getProcessId(), connectionSession.getDatabaseName(), connectionSession.getGrantee())); for (ExecutionGroup eachGroup : executionGroupContext.getInputGroups()) { diff --git a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactory.java b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactory.java similarity index 82% rename from proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactory.java rename to proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactory.java index 097224bda3fad..e7b76b9971454 100644 --- a/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactory.java +++ b/proxy/frontend/type/mysql/src/main/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactory.java @@ -21,35 +21,33 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.db.protocol.mysql.packet.generic.MySQLErrPacket; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.dialect.SQLExceptionTransformEngine; import org.apache.shardingsphere.infra.exception.mysql.vendor.MySQLVendorError; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import java.sql.SQLException; /** - * ERR packet factory for MySQL. + * Error packet factory for MySQL. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class MySQLErrPacketFactory { +public final class MySQLErrorPacketFactory { + + private static final DatabaseType DATABASE_TYPE = TypedSPILoader.getService(DatabaseType.class, "MySQL"); /** - * Create new instance of MySQL ERR packet. + * Create new instance of MySQL error packet. * * @param cause cause * @return created instance */ public static MySQLErrPacket newInstance(final Exception cause) { - SQLException sqlException = SQLExceptionTransformEngine.toSQLException(cause, TypedSPILoader.getService(DatabaseType.class, "MySQL")); - return null == sqlException.getSQLState() ? new MySQLErrPacket(MySQLVendorError.ER_INTERNAL_ERROR, getErrorMessage(sqlException)) : createErrPacket(sqlException); + SQLException sqlException = SQLExceptionTransformEngine.toSQLException(cause, DATABASE_TYPE); + return null == sqlException.getSQLState() ? new MySQLErrPacket(MySQLVendorError.ER_INTERNAL_ERROR, getErrorMessage(sqlException)) : new MySQLErrPacket(sqlException); } private static String getErrorMessage(final SQLException cause) { return null == cause.getNextException() || !Strings.isNullOrEmpty(cause.getMessage()) ? cause.getMessage() : cause.getNextException().getMessage(); } - - private static MySQLErrPacket createErrPacket(final SQLException cause) { - return new MySQLErrPacket(cause.getErrorCode(), cause.getSQLState(), cause.getMessage()); - } } diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngineTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngineTest.java index 8698108839261..563918e64e68d 100644 --- a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngineTest.java +++ b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/authentication/MySQLAuthenticationEngineTest.java @@ -33,19 +33,22 @@ import org.apache.shardingsphere.db.protocol.mysql.packet.generic.MySQLOKPacket; import org.apache.shardingsphere.db.protocol.mysql.packet.handshake.MySQLHandshakePacket; import org.apache.shardingsphere.db.protocol.mysql.payload.MySQLPacketPayload; +import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; import org.apache.shardingsphere.infra.exception.mysql.exception.AccessDeniedException; import org.apache.shardingsphere.infra.exception.mysql.exception.DatabaseAccessDeniedException; import org.apache.shardingsphere.infra.exception.mysql.exception.HandshakeException; import org.apache.shardingsphere.infra.exception.mysql.vendor.MySQLVendorError; -import org.apache.shardingsphere.infra.config.props.ConfigurationProperties; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.user.Grantee; import org.apache.shardingsphere.infra.metadata.user.ShardingSphereUser; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.metadata.persist.MetaDataPersistService; +import org.apache.shardingsphere.metadata.persist.data.ShardingSphereDataPersistService; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; @@ -275,8 +278,14 @@ void assertAuthenticateSuccess() { private ContextManager mockContextManager(final AuthorityRule rule) { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); - Map databases = Collections.singletonMap("foo_db", mock(ShardingSphereDatabase.class)); - MetaDataContexts metaDataContexts = new MetaDataContexts(mock(MetaDataPersistService.class), new ShardingSphereMetaData(databases, + ShardingSphereDatabase database = mock(ShardingSphereDatabase.class); + when(database.getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); + Map databases = Collections.singletonMap("foo_db", database); + MetaDataPersistService metaDataPersistService = mock(MetaDataPersistService.class); + ShardingSphereDataPersistService shardingSphereDataPersistService = mock(ShardingSphereDataPersistService.class); + when(shardingSphereDataPersistService.load(any())).thenReturn(Optional.empty()); + when(metaDataPersistService.getShardingSphereDataPersistService()).thenReturn(shardingSphereDataPersistService); + MetaDataContexts metaDataContexts = new MetaDataContexts(metaDataPersistService, new ShardingSphereMetaData(databases, mock(ResourceMetaData.class), new RuleMetaData(Collections.singleton(rule)), new ConfigurationProperties(new Properties()))); when(result.getMetaDataContexts()).thenReturn(metaDataContexts); return result; diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutorTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutorTest.java index a645a4f711353..72d6ca2f4c661 100644 --- a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutorTest.java +++ b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/binary/prepare/MySQLComStmtPrepareExecutorTest.java @@ -33,6 +33,7 @@ import org.apache.shardingsphere.infra.binder.context.statement.dml.UpdateStatementContext; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.mysql.exception.UnsupportedPreparedStatementException; +import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; @@ -106,6 +107,7 @@ void assertPrepareMultiStatements() { void assertPrepareSelectStatement() { String sql = "select name from foo_db.user where id = ?"; when(packet.getSQL()).thenReturn(sql); + when(packet.getHintValueContext()).thenReturn(new HintValueContext()); when(connectionSession.getConnectionId()).thenReturn(1); MySQLStatementIdGenerator.getInstance().registerConnection(1); ContextManager contextManager = mockContextManager(); @@ -128,6 +130,7 @@ void assertPrepareSelectStatement() { void assertPrepareInsertStatement() { String sql = "insert into user (id, name, age) values (1, ?, ?), (?, 'bar', ?)"; when(packet.getSQL()).thenReturn(sql); + when(packet.getHintValueContext()).thenReturn(new HintValueContext()); int connectionId = 2; when(connectionSession.getConnectionId()).thenReturn(connectionId); when(connectionSession.getDefaultDatabaseName()).thenReturn("foo_db"); @@ -166,6 +169,7 @@ private int getColumnDefinitionFlag(final MySQLColumnDefinition41Packet packet) void assertPrepareUpdateStatement() { String sql = "update user set name = ?, age = ? where id = ?"; when(packet.getSQL()).thenReturn(sql); + when(packet.getHintValueContext()).thenReturn(new HintValueContext()); when(connectionSession.getConnectionId()).thenReturn(1); when(connectionSession.getDefaultDatabaseName()).thenReturn("foo_db"); MySQLStatementIdGenerator.getInstance().registerConnection(1); @@ -208,6 +212,7 @@ private ContextManager mockContextManager() { ShardingSphereDatabase database = new ShardingSphereDatabase("foo_db", TypedSPILoader.getService(DatabaseType.class, "MySQL"), new ResourceMetaData("foo_db", Collections.emptyMap()), new RuleMetaData(Collections.emptyList()), Collections.singletonMap("foo_db", schema)); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); + when(result.getMetaDataContexts().getMetaData().containsDatabase("foo_db")).thenReturn(true); return result; } } diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java index e85158d87ff68..eea216efe80ab 100644 --- a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java +++ b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLComQueryPacketExecutorTest.java @@ -26,7 +26,11 @@ import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.logging.rule.LoggingRule; import org.apache.shardingsphere.logging.rule.builder.DefaultLoggingRuleConfigurationBuilder; @@ -57,6 +61,7 @@ import org.mockito.quality.Strictness; import java.sql.SQLException; +import java.sql.Types; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -135,7 +140,6 @@ void assertExecuteMultiUpdateStatements() throws SQLException, NoSuchFieldExcept private MetaDataContexts mockMetaDataContexts() { DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "MySQL"); MetaDataContexts result = mock(MetaDataContexts.class, RETURNS_DEEP_STUBS); - when(result.getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("foo_ds", databaseType)); when(result.getMetaData().getDatabase("foo_db").getProtocolType()).thenReturn(databaseType); RuleMetaData globalRuleMetaData = new RuleMetaData( Arrays.asList(new SQLParserRule(new DefaultSQLParserRuleConfigurationBuilder().build()), new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), @@ -143,9 +147,14 @@ private MetaDataContexts mockMetaDataContexts() { when(result.getMetaData().getGlobalRuleMetaData()).thenReturn(globalRuleMetaData); when(result.getMetaData().getProps().getValue(ConfigurationPropertyKey.KERNEL_EXECUTOR_SIZE)).thenReturn(1); when(result.getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW)).thenReturn(false); - ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); + ShardingSphereTable table = new ShardingSphereTable("t", Arrays.asList(new ShardingSphereColumn("id", Types.BIGINT, true, false, false, false, true, false), + new ShardingSphereColumn("v", Types.INTEGER, false, false, false, false, true, false)), Collections.emptyList(), Collections.emptyList()); + ShardingSphereSchema schema = new ShardingSphereSchema(); + schema.getTables().put("t", table); + ShardingSphereDatabase database = new ShardingSphereDatabase("foo_db", TypedSPILoader.getService(DatabaseType.class, "MySQL"), + new ResourceMetaData("foo_db", Collections.emptyMap()), new RuleMetaData(Collections.emptyList()), Collections.singletonMap("foo_db", schema)); when(result.getMetaData().getDatabase("foo_db")).thenReturn(database); + when(result.getMetaData().containsDatabase("foo_db")).thenReturn(true); return result; } diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandlerTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandlerTest.java index 739acc8a9cc41..e84d35be991b9 100644 --- a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandlerTest.java +++ b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/command/query/text/query/MySQLMultiStatementsHandlerTest.java @@ -21,7 +21,10 @@ import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.executor.sql.prepare.driver.jdbc.StatementOption; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.logging.rule.LoggingRule; import org.apache.shardingsphere.logging.rule.builder.DefaultLoggingRuleConfigurationBuilder; @@ -47,6 +50,7 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; +import java.sql.Types; import java.util.Arrays; import java.util.Collections; @@ -102,8 +106,10 @@ private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getAllInstanceDataSourceNames()) .thenReturn(Collections.singletonList("foo_ds")); - when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageTypes()) - .thenReturn(Collections.singletonMap("foo_ds", TypedSPILoader.getService(DatabaseType.class, "FIXTURE"))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); + when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageUnitMetaData().getStorageUnits()) + .thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "MySQL")); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getRuleMetaData()) .thenReturn(new RuleMetaData(Collections.emptyList())); @@ -114,6 +120,12 @@ private ContextManager mockContextManager() { when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.KERNEL_EXECUTOR_SIZE)).thenReturn(1); when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW)).thenReturn(false); when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY)).thenReturn(1); + ShardingSphereTable table = new ShardingSphereTable("t", Arrays.asList(new ShardingSphereColumn("id", Types.BIGINT, true, false, false, false, true, false), + new ShardingSphereColumn("v", Types.INTEGER, false, false, false, false, true, false)), Collections.emptyList(), Collections.emptyList()); + when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getSchema("foo_db").getTable("t")).thenReturn(table); + when(result.getMetaDataContexts().getMetaData().containsDatabase("foo_db")).thenReturn(true); + when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").containsSchema("foo_db")).thenReturn(true); + when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getSchema("foo_db").containsTable("t")).thenReturn(true); return result; } } diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactoryTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactoryTest.java deleted file mode 100644 index f6db588fdf04a..0000000000000 --- a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrPacketFactoryTest.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.proxy.frontend.mysql.err; - -import org.apache.shardingsphere.db.protocol.mysql.packet.generic.MySQLErrPacket; -import org.apache.shardingsphere.infra.exception.dialect.exception.syntax.database.UnknownDatabaseException; -import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; -import org.apache.shardingsphere.proxy.frontend.exception.CircuitBreakException; -import org.junit.jupiter.api.Test; - -import java.sql.SQLException; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.MatcherAssert.assertThat; - -class MySQLErrPacketFactoryTest { - - @Test - void assertNewInstanceWithSQLExceptionForNullSQLState() { - MySQLErrPacket actual = MySQLErrPacketFactory.newInstance(new SQLException("")); - assertThat(actual.getErrorCode(), is(1815)); - assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); - assertThat(actual.getErrorMessage(), startsWith("Internal error")); - } - - @Test - void assertNewInstanceWithSQLException() { - MySQLErrPacket actual = MySQLErrPacketFactory.newInstance(new SQLException("No reason", "XXX", 30000, new RuntimeException(""))); - assertThat(actual.getErrorCode(), is(30000)); - assertThat(actual.getSqlState(), is("XXX")); - assertThat(actual.getErrorMessage(), is("No reason")); - } - - @Test - void assertNewInstanceWithShardingSphereSQLException() { - MySQLErrPacket actual = MySQLErrPacketFactory.newInstance(new CircuitBreakException()); - assertThat(actual.getErrorCode(), is(13010)); - assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_WARNING.getValue())); - assertThat(actual.getErrorMessage(), is("Circuit break open, the request has been ignored.")); - } - - @Test - void assertNewInstanceWithSQLDialectException() { - MySQLErrPacket actual = MySQLErrPacketFactory.newInstance(new UnknownDatabaseException("foo_db")); - assertThat(actual.getErrorCode(), is(1049)); - assertThat(actual.getSqlState(), is(XOpenSQLState.SYNTAX_ERROR.getValue())); - assertThat(actual.getErrorMessage(), is("Unknown database 'foo_db'")); - } - - @Test - void assertNewInstanceWithUnknownException() { - MySQLErrPacket actual = MySQLErrPacketFactory.newInstance(new RuntimeException("No reason")); - assertThat(actual.getErrorCode(), is(30000)); - assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); - assertThat(actual.getErrorMessage(), is("Unknown exception: No reason")); - } -} diff --git a/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactoryTest.java b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactoryTest.java new file mode 100644 index 0000000000000..847522da441a2 --- /dev/null +++ b/proxy/frontend/type/mysql/src/test/java/org/apache/shardingsphere/proxy/frontend/mysql/err/MySQLErrorPacketFactoryTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.proxy.frontend.mysql.err; + +import org.apache.shardingsphere.db.protocol.mysql.packet.generic.MySQLErrPacket; +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; +import org.junit.jupiter.api.Test; + +import java.sql.SQLException; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +class MySQLErrorPacketFactoryTest { + + @Test + void assertNewInstanceWithoutSQLState() { + MySQLErrPacket actual = MySQLErrorPacketFactory.newInstance(new SQLException("No reason")); + assertThat(actual.getErrorCode(), is(1815)); + assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); + assertThat(actual.getErrorMessage(), is("Internal error: No reason")); + } + + @Test + void assertNewInstanceWithSQLState() { + MySQLErrPacket actual = MySQLErrorPacketFactory.newInstance(new RuntimeException("No reason")); + assertThat(actual.getErrorCode(), is(30000)); + assertThat(actual.getSqlState(), is(XOpenSQLState.GENERAL_ERROR.getValue())); + assertThat(actual.getErrorMessage(), is("Unknown exception: No reason")); + } +} diff --git a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngine.java b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngine.java index e95d339d428b9..cfe4e6067b0d8 100644 --- a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngine.java +++ b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngine.java @@ -34,7 +34,7 @@ public final class OpenGaussFrontendEngine implements DatabaseProtocolFrontendEngine { @Getter(AccessLevel.NONE) - private final PostgreSQLFrontendEngine postgreSQLFrontendEngine = new PostgreSQLFrontendEngine(); + private final PostgreSQLFrontendEngine postgresqlFrontendEngine = new PostgreSQLFrontendEngine(); private final OpenGaussAuthenticationEngine authenticationEngine = new OpenGaussAuthenticationEngine(); @@ -44,7 +44,7 @@ public final class OpenGaussFrontendEngine implements DatabaseProtocolFrontendEn @Override public void release(final ConnectionSession connectionSession) { - postgreSQLFrontendEngine.release(connectionSession); + postgresqlFrontendEngine.release(connectionSession); } @Override diff --git a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/OpenGaussCommandExecuteEngine.java b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/OpenGaussCommandExecuteEngine.java index 8191043b46453..5f4b84e03036e 100644 --- a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/OpenGaussCommandExecuteEngine.java +++ b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/OpenGaussCommandExecuteEngine.java @@ -48,7 +48,7 @@ */ public final class OpenGaussCommandExecuteEngine implements CommandExecuteEngine { - private final PostgreSQLCommandExecuteEngine postgreSQLCommandExecuteEngine = new PostgreSQLCommandExecuteEngine(); + private final PostgreSQLCommandExecuteEngine postgresqlCommandExecuteEngine = new PostgreSQLCommandExecuteEngine(); @Override public CommandPacketType getCommandPacketType(final PacketPayload payload) { @@ -75,12 +75,12 @@ public PostgreSQLPacket getErrorPacket(final Exception cause) { @Override public Optional getOtherPacket(final ConnectionSession connectionSession) { - return postgreSQLCommandExecuteEngine.getOtherPacket(connectionSession); + return postgresqlCommandExecuteEngine.getOtherPacket(connectionSession); } @Override public void writeQueryData(final ChannelHandlerContext context, final ProxyDatabaseConnectionManager databaseConnectionManager, final QueryCommandExecutor queryCommandExecutor, final int headerPackagesCount) throws SQLException { - postgreSQLCommandExecuteEngine.writeQueryData(context, databaseConnectionManager, queryCommandExecutor, headerPackagesCount); + postgresqlCommandExecuteEngine.writeQueryData(context, databaseConnectionManager, queryCommandExecutor, headerPackagesCount); } } diff --git a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactory.java b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactory.java index e4a792acdac17..cce64c7e17244 100644 --- a/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactory.java +++ b/proxy/frontend/type/opengauss/src/main/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactory.java @@ -22,15 +22,16 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.db.protocol.opengauss.packet.command.generic.OpenGaussErrorResponsePacket; import org.apache.shardingsphere.db.protocol.postgresql.constant.PostgreSQLMessageSeverityLevel; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; import org.apache.shardingsphere.infra.exception.dialect.SQLExceptionTransformEngine; -import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; import org.apache.shardingsphere.infra.exception.postgresql.vendor.PostgreSQLVendorError; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.exception.core.external.sql.ShardingSphereSQLException; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.opengauss.util.PSQLException; +import org.opengauss.util.ServerErrorMessage; import java.sql.SQLException; +import java.util.Optional; /** * Error packet factory for openGauss. @@ -38,6 +39,8 @@ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class OpenGaussErrorPacketFactory { + private static final DatabaseType DATABASE_TYPE = TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"); + /** * Create new instance of openGauss error packet. * @@ -45,30 +48,19 @@ public final class OpenGaussErrorPacketFactory { * @return created instance */ public static OpenGaussErrorResponsePacket newInstance(final Exception cause) { - if (existsServerErrorMessage(cause)) { - return new OpenGaussErrorResponsePacket(((PSQLException) cause).getServerErrorMessage()); - } - if (cause instanceof SQLException || cause instanceof ShardingSphereSQLException || cause instanceof SQLDialectException) { - return createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); - } - // TODO OpenGauss need consider FrontendConnectionLimitException - return createErrorResponsePacketForUnknownException(cause); + Optional serverErrorMessage = findServerErrorMessage(cause); + return serverErrorMessage.map(OpenGaussErrorResponsePacket::new).orElseGet(() -> createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, DATABASE_TYPE))); } - private static boolean existsServerErrorMessage(final Exception cause) { - return cause instanceof PSQLException && null != ((PSQLException) cause).getServerErrorMessage(); + private static Optional findServerErrorMessage(final Exception cause) { + return cause instanceof PSQLException ? Optional.ofNullable(((PSQLException) cause).getServerErrorMessage()) : Optional.empty(); } private static OpenGaussErrorResponsePacket createErrorResponsePacket(final SQLException cause) { - // TODO consider what severity to use - String sqlState = Strings.isNullOrEmpty(cause.getSQLState()) ? PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue() : cause.getSQLState(); + String sqlState = Strings.isNullOrEmpty(cause.getSQLState()) || XOpenSQLState.GENERAL_ERROR.getValue().equals(cause.getSQLState()) + ? PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue() + : cause.getSQLState(); String message = Strings.isNullOrEmpty(cause.getMessage()) ? cause.toString() : cause.getMessage(); return new OpenGaussErrorResponsePacket(PostgreSQLMessageSeverityLevel.ERROR, sqlState, message); } - - private static OpenGaussErrorResponsePacket createErrorResponsePacketForUnknownException(final Exception cause) { - // TODO add FIELD_TYPE_CODE for common error and consider what severity to use - String message = Strings.isNullOrEmpty(cause.getLocalizedMessage()) ? cause.toString() : cause.getLocalizedMessage(); - return new OpenGaussErrorResponsePacket(PostgreSQLMessageSeverityLevel.ERROR, PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue(), message); - } } diff --git a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngineTest.java b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngineTest.java index 51ebaff8c064a..ee6411f43ec6f 100644 --- a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngineTest.java +++ b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/OpenGaussFrontendEngineTest.java @@ -44,7 +44,7 @@ class OpenGaussFrontendEngineTest { @BeforeEach void setup() throws ReflectiveOperationException { - Plugins.getMemberAccessor().set(OpenGaussFrontendEngine.class.getDeclaredField("postgreSQLFrontendEngine"), openGaussFrontendEngine, mockPostgreSQLFrontendEngine); + Plugins.getMemberAccessor().set(OpenGaussFrontendEngine.class.getDeclaredField("postgresqlFrontendEngine"), openGaussFrontendEngine, mockPostgreSQLFrontendEngine); } @Test diff --git a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/extended/bind/OpenGaussComBatchBindExecutorTest.java b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/extended/bind/OpenGaussComBatchBindExecutorTest.java index 918cbc3ee915a..8ad1f6165aed0 100644 --- a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/extended/bind/OpenGaussComBatchBindExecutorTest.java +++ b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/command/query/extended/bind/OpenGaussComBatchBindExecutorTest.java @@ -30,7 +30,9 @@ import org.apache.shardingsphere.infra.executor.sql.prepare.driver.jdbc.StatementOption; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.parser.ShardingSphereSQLParserEngine; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -55,6 +57,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.sql.Types; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; @@ -130,14 +133,20 @@ private ContextManager mockContextManager() { new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build())))); ShardingSphereDatabase database = mockDatabase(); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); + when(result.getMetaDataContexts().getMetaData().containsDatabase("foo_db")).thenReturn(true); return result; } private ShardingSphereDatabase mockDatabase() { ShardingSphereDatabase result = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(result.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singleton("foo_ds")); - when(result.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("foo_ds", TypedSPILoader.getService(DatabaseType.class, "openGauss"))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "openGauss")); + when(result.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(result.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); + when(result.containsSchema("public")).thenReturn(true); + when(result.getSchema("public").containsTable("bmsql")).thenReturn(true); + when(result.getSchema("public").getTable("bmsql").getColumnValues()).thenReturn(Collections.singleton(new ShardingSphereColumn("id", Types.VARCHAR, false, false, false, true, false, false))); return result; } } diff --git a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactoryTest.java b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactoryTest.java index f38c50e9d8a25..689be82bc5e44 100644 --- a/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactoryTest.java +++ b/proxy/frontend/type/opengauss/src/test/java/org/apache/shardingsphere/proxy/frontend/opengauss/err/OpenGaussErrorPacketFactoryTest.java @@ -29,8 +29,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; class OpenGaussErrorPacketFactoryTest { @@ -70,14 +68,13 @@ void assertNewInstanceWithSQLException() { @Test void assertNewInstanceWithUnknownException() { - Exception cause = mock(Exception.class); - when(cause.getLocalizedMessage()).thenReturn("LocalizedMessage"); + Exception cause = new RuntimeException("No reason"); OpenGaussErrorResponsePacket actual = OpenGaussErrorPacketFactory.newInstance(cause); Map actualFields = getFieldsInPacket(actual); assertThat(actualFields.size(), is(4)); assertThat(actualFields.get(OpenGaussErrorResponsePacket.FIELD_TYPE_SEVERITY), is("ERROR")); assertThat(actualFields.get(OpenGaussErrorResponsePacket.FIELD_TYPE_CODE), is("58000")); - assertThat(actualFields.get(OpenGaussErrorResponsePacket.FIELD_TYPE_MESSAGE), is("LocalizedMessage")); + assertThat(actualFields.get(OpenGaussErrorResponsePacket.FIELD_TYPE_MESSAGE), is("Unknown exception: No reason")); assertThat(actualFields.get(OpenGaussErrorResponsePacket.FIELD_TYPE_ERRORCODE), is("0")); } diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/PostgreSQLCommandExecuteEngine.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/PostgreSQLCommandExecuteEngine.java index 2d74490a1e06f..ccf31a6fe60ed 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/PostgreSQLCommandExecuteEngine.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/PostgreSQLCommandExecuteEngine.java @@ -41,7 +41,7 @@ import org.apache.shardingsphere.proxy.frontend.command.executor.QueryCommandExecutor; import org.apache.shardingsphere.proxy.frontend.command.executor.ResponseType; import org.apache.shardingsphere.proxy.frontend.postgresql.command.query.PostgreSQLCommand; -import org.apache.shardingsphere.proxy.frontend.postgresql.err.PostgreSQLErrPacketFactory; +import org.apache.shardingsphere.proxy.frontend.postgresql.err.PostgreSQLErrorPacketFactory; import java.sql.SQLException; import java.util.Optional; @@ -71,7 +71,7 @@ public CommandExecutor getCommandExecutor(final CommandPacketType type, final Co @Override public PostgreSQLPacket getErrorPacket(final Exception cause) { - return PostgreSQLErrPacketFactory.newInstance(cause); + return PostgreSQLErrorPacketFactory.newInstance(cause); } @Override diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/PostgreSQLCommand.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/PostgreSQLCommand.java index 288fa6f5aee1f..edac18be14e55 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/PostgreSQLCommand.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/PostgreSQLCommand.java @@ -159,7 +159,7 @@ public static Optional valueOf(final Class getPostgreSQLCommand(final Class sqlStatementClass) { CachedResult result = COMPUTED_STATEMENTS.get(sqlStatementClass); - return null != result ? result.get() : COMPUTED_STATEMENTS.computeIfAbsent(sqlStatementClass, PostgreSQLCommand::compute).get(); + return null == result ? COMPUTED_STATEMENTS.computeIfAbsent(sqlStatementClass, PostgreSQLCommand::compute).get() : result.get(); } private static CachedResult compute(final Class target) { @@ -167,8 +167,8 @@ private static CachedResult compute(final Class target) return result.map(CachedResult::new).orElse(CachedResult.EMPTY); } - private static boolean matches(final Class sqlStatementClass, final PostgreSQLCommand postgreSQLCommand) { - return postgreSQLCommand.sqlStatementClasses.stream().anyMatch(each -> each.isAssignableFrom(sqlStatementClass)); + private static boolean matches(final Class sqlStatementClass, final PostgreSQLCommand command) { + return command.sqlStatementClasses.stream().anyMatch(each -> each.isAssignableFrom(sqlStatementClass)); } @RequiredArgsConstructor diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutor.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutor.java index 36f9c194cc0ec..bff136ac995c7 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutor.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutor.java @@ -19,8 +19,8 @@ import org.apache.shardingsphere.db.protocol.postgresql.packet.command.query.extended.bind.PostgreSQLTypeUnspecifiedSQLParameter; import org.apache.shardingsphere.infra.binder.context.aware.ParameterAware; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.config.props.ConfigurationPropertyKey; import org.apache.shardingsphere.infra.connection.kernel.KernelProcessor; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; @@ -96,7 +96,7 @@ public PostgreSQLBatchedStatementsExecutor(final ConnectionSession connectionSes ExecutionContext executionContext = null; if (parameterSetsIterator.hasNext()) { List firstGroupOfParam = parameterSetsIterator.next(); - sqlStatementContext = createSQLStatementContext(firstGroupOfParam); + sqlStatementContext = createSQLStatementContext(firstGroupOfParam, preparedStatement.getHintValueContext()); executionContext = createExecutionContext(createQueryContext(sqlStatementContext, firstGroupOfParam, preparedStatement.getHintValueContext())); for (ExecutionUnit each : executionContext.getExecutionUnits()) { executionUnitParams.computeIfAbsent(each, unused -> new LinkedList<>()).add(each.getSqlUnit().getParameters()); @@ -106,8 +106,8 @@ public PostgreSQLBatchedStatementsExecutor(final ConnectionSession connectionSes prepareForRestOfParametersSet(parameterSetsIterator, sqlStatementContext, preparedStatement.getHintValueContext()); } - private SQLStatementContext createSQLStatementContext(final List params) { - return new SQLBindEngine(metaDataContexts.getMetaData(), connectionSession.getDatabaseName()).bind(preparedStatement.getSqlStatementContext().getSqlStatement(), params); + private SQLStatementContext createSQLStatementContext(final List params, final HintValueContext hintValueContext) { + return new SQLBindEngine(metaDataContexts.getMetaData(), connectionSession.getDatabaseName(), hintValueContext).bind(preparedStatement.getSqlStatementContext().getSqlStatement(), params); } private void prepareForRestOfParametersSet(final Iterator> paramSetsIterator, final SQLStatementContext sqlStatementContext, final HintValueContext hintValueContext) { @@ -151,7 +151,7 @@ private void addBatchedParametersToPreparedStatements() throws SQLException { DriverExecutionPrepareEngine prepareEngine = new DriverExecutionPrepareEngine<>(JDBCDriverType.PREPARED_STATEMENT, metaDataContexts.getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY), connectionSession.getDatabaseConnectionManager(), (JDBCBackendStatement) connectionSession.getStatementManager(), - new StatementOption(false), rules, metaDataContexts.getMetaData().getDatabase(connectionSession.getDatabaseName()).getResourceMetaData().getStorageTypes()); + new StatementOption(false), rules, metaDataContexts.getMetaData().getDatabase(connectionSession.getDatabaseName()).getResourceMetaData().getStorageUnitMetaData()); executionGroupContext = prepareEngine.prepare(anyExecutionContext.getRouteContext(), executionUnitParams.keySet(), new ExecutionGroupReportContext(connectionSession.getProcessId(), connectionSession.getDatabaseName(), connectionSession.getGrantee())); for (ExecutionGroup eachGroup : executionGroupContext.getInputGroups()) { diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutor.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutor.java index 877dbb59b7b35..c847dfec69905 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutor.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutor.java @@ -238,8 +238,8 @@ private int estimateColumnLength(final int jdbcType) { private void tryDescribePreparedStatementByJDBC(final PostgreSQLServerPreparedStatement logicPreparedStatement) throws SQLException { MetaDataContexts metaDataContexts = ProxyContext.getInstance().getContextManager().getMetaDataContexts(); String databaseName = connectionSession.getDatabaseName(); - SQLStatementContext sqlStatementContext = - new SQLBindEngine(metaDataContexts.getMetaData(), databaseName).bind(logicPreparedStatement.getSqlStatementContext().getSqlStatement(), Collections.emptyList()); + SQLStatementContext sqlStatementContext = new SQLBindEngine(metaDataContexts.getMetaData(), databaseName, logicPreparedStatement.getHintValueContext()) + .bind(logicPreparedStatement.getSqlStatementContext().getSqlStatement(), Collections.emptyList()); QueryContext queryContext = new QueryContext(sqlStatementContext, logicPreparedStatement.getSql(), Collections.emptyList(), logicPreparedStatement.getHintValueContext()); ShardingSphereDatabase database = ProxyContext.getInstance().getDatabase(databaseName); ExecutionContext executionContext = new KernelProcessor().generateExecutionContext( diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutor.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutor.java index 77aeee2c08de4..e580410f307ec 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutor.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutor.java @@ -23,8 +23,8 @@ import org.apache.shardingsphere.db.protocol.postgresql.packet.command.query.extended.parse.PostgreSQLComParsePacket; import org.apache.shardingsphere.db.protocol.postgresql.packet.command.query.extended.parse.PostgreSQLParseCompletePacket; import org.apache.shardingsphere.distsql.parser.statement.DistSQLStatement; -import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.binder.context.statement.SQLStatementContext; +import org.apache.shardingsphere.infra.binder.engine.SQLBindEngine; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.parser.SQLParserEngine; import org.apache.shardingsphere.mode.metadata.MetaDataContexts; @@ -39,6 +39,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.ParameterMarkerSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.AbstractSQLStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.SQLStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.DMLStatement; import java.util.ArrayList; import java.util.Collection; @@ -61,6 +62,11 @@ public Collection execute() { SQLParserEngine sqlParserEngine = createShardingSphereSQLParserEngine(connectionSession.getDatabaseName()); String sql = packet.getSQL(); SQLStatement sqlStatement = sqlParserEngine.parse(sql, true); + String escapedSql = escape(sqlStatement, sql); + if (!escapedSql.equalsIgnoreCase(sql)) { + sqlStatement = sqlParserEngine.parse(escapedSql, true); + sql = escapedSql; + } List actualParameterMarkerIndexes = new ArrayList<>(); if (sqlStatement.getParameterCount() > 0) { List parameterMarkerSegments = new ArrayList<>(((AbstractSQLStatement) sqlStatement).getParameterMarkerSegments()); @@ -72,8 +78,8 @@ public Collection execute() { } List paddedColumnTypes = paddingColumnTypes(sqlStatement.getParameterCount(), packet.readParameterTypes()); SQLStatementContext sqlStatementContext = sqlStatement instanceof DistSQLStatement ? new DistSQLStatementContext((DistSQLStatement) sqlStatement) - : new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getDefaultDatabaseName()).bind(sqlStatement, - Collections.emptyList()); + : new SQLBindEngine(ProxyContext.getInstance().getContextManager().getMetaDataContexts().getMetaData(), connectionSession.getDefaultDatabaseName(), packet.getHintValueContext()) + .bind(sqlStatement, Collections.emptyList()); PostgreSQLServerPreparedStatement serverPreparedStatement = new PostgreSQLServerPreparedStatement(sql, sqlStatementContext, packet.getHintValueContext(), paddedColumnTypes, actualParameterMarkerIndexes); connectionSession.getServerPreparedStatementRegistry().addPreparedStatement(packet.getStatementId(), serverPreparedStatement); @@ -87,6 +93,13 @@ private SQLParserEngine createShardingSphereSQLParserEngine(final String databas return sqlParserRule.getSQLParserEngine(protocolType.getTrunkDatabaseType().orElse(protocolType)); } + private String escape(final SQLStatement sqlStatement, final String sql) { + if (sqlStatement instanceof DMLStatement) { + return sql.replace("?", "??"); + } + return sql; + } + private String convertSQLToJDBCStyle(final List parameterMarkerSegments, final String sql) { parameterMarkerSegments.sort(Comparator.comparingInt(SQLSegment::getStopIndex)); StringBuilder result = new StringBuilder(sql); diff --git a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactory.java b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactory.java similarity index 71% rename from proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactory.java rename to proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactory.java index 926f6fb2220b2..3815e41526efa 100644 --- a/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactory.java +++ b/proxy/frontend/type/postgresql/src/main/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactory.java @@ -22,45 +22,40 @@ import lombok.NoArgsConstructor; import org.apache.shardingsphere.db.protocol.postgresql.constant.PostgreSQLMessageSeverityLevel; import org.apache.shardingsphere.db.protocol.postgresql.packet.generic.PostgreSQLErrorResponsePacket; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState; import org.apache.shardingsphere.infra.exception.dialect.SQLExceptionTransformEngine; -import org.apache.shardingsphere.infra.exception.dialect.exception.SQLDialectException; import org.apache.shardingsphere.infra.exception.postgresql.exception.PostgreSQLException; import org.apache.shardingsphere.infra.exception.postgresql.vendor.PostgreSQLVendorError; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.exception.core.external.sql.ShardingSphereSQLException; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.postgresql.util.PSQLException; import org.postgresql.util.ServerErrorMessage; import java.sql.SQLException; +import java.util.Optional; /** - * ERR packet factory for PostgreSQL. + * Error packet factory for PostgreSQL. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class PostgreSQLErrPacketFactory { +public final class PostgreSQLErrorPacketFactory { + + private static final DatabaseType DATABASE_TYPE = TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"); /** - * Create new instance of PostgreSQL ERR packet. + * Create new instance of PostgreSQL error packet. * * @param cause cause * @return created instance */ - @SuppressWarnings("ConstantConditions") public static PostgreSQLErrorResponsePacket newInstance(final Exception cause) { - if (cause instanceof PSQLException && null != ((PSQLException) cause).getServerErrorMessage()) { - return createErrorResponsePacket(((PSQLException) cause).getServerErrorMessage()); - } - if (cause instanceof SQLException || cause instanceof ShardingSphereSQLException || cause instanceof SQLDialectException) { - return createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); - } - // TODO PostgreSQL need consider FrontendConnectionLimitException - return createErrorResponsePacketForUnknownException(cause); + Optional serverErrorMessage = findServerErrorMessage(cause); + return serverErrorMessage.map(PostgreSQLErrorPacketFactory::createErrorResponsePacket) + .orElseGet(() -> createErrorResponsePacket(SQLExceptionTransformEngine.toSQLException(cause, DATABASE_TYPE))); } - private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final PostgreSQLException.ServerErrorMessage serverErrorMessage) { - return PostgreSQLErrorResponsePacket.newBuilder(serverErrorMessage.getSeverity(), serverErrorMessage.getSqlState(), serverErrorMessage.getMessage()) - .build(); + private static Optional findServerErrorMessage(final Exception cause) { + return cause instanceof PSQLException ? Optional.ofNullable(((PSQLException) cause).getServerErrorMessage()) : Optional.empty(); } private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final ServerErrorMessage serverErrorMessage) { @@ -71,7 +66,6 @@ private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final Ser .constraintName(serverErrorMessage.getConstraint()).file(serverErrorMessage.getFile()).line(serverErrorMessage.getLine()).routine(serverErrorMessage.getRoutine()).build(); } - @SuppressWarnings("ConstantConditions") private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final SQLException cause) { if (cause instanceof PostgreSQLException && null != ((PostgreSQLException) cause).getServerErrorMessage()) { return createErrorResponsePacket(((PostgreSQLException) cause).getServerErrorMessage()); @@ -79,14 +73,14 @@ private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final SQL if (cause instanceof PSQLException && null != ((PSQLException) cause).getServerErrorMessage()) { return createErrorResponsePacket(((PSQLException) cause).getServerErrorMessage()); } - String sqlState = Strings.isNullOrEmpty(cause.getSQLState()) ? PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue() : cause.getSQLState(); + String sqlState = Strings.isNullOrEmpty(cause.getSQLState()) || XOpenSQLState.GENERAL_ERROR.getValue().equals(cause.getSQLState()) + ? PostgreSQLVendorError.SYSTEM_ERROR.getSqlState().getValue() + : cause.getSQLState(); String message = Strings.isNullOrEmpty(cause.getMessage()) ? cause.toString() : cause.getMessage(); return PostgreSQLErrorResponsePacket.newBuilder(PostgreSQLMessageSeverityLevel.ERROR, sqlState, message).build(); } - private static PostgreSQLErrorResponsePacket createErrorResponsePacketForUnknownException(final Exception cause) { - // TODO add FIELD_TYPE_CODE for common error and consider what severity to use - String message = Strings.isNullOrEmpty(cause.getLocalizedMessage()) ? cause.toString() : cause.getLocalizedMessage(); - return PostgreSQLErrorResponsePacket.newBuilder(PostgreSQLMessageSeverityLevel.ERROR, PostgreSQLVendorError.SYSTEM_ERROR, message).build(); + private static PostgreSQLErrorResponsePacket createErrorResponsePacket(final PostgreSQLException.ServerErrorMessage serverErrorMessage) { + return PostgreSQLErrorResponsePacket.newBuilder(serverErrorMessage.getSeverity(), serverErrorMessage.getSqlState(), serverErrorMessage.getMessage()).build(); } } diff --git a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLAggregatedBatchedStatementsCommandExecutorTest.java b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLAggregatedBatchedStatementsCommandExecutorTest.java index 9991a585f1461..979ab817ba8a0 100644 --- a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLAggregatedBatchedStatementsCommandExecutorTest.java +++ b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLAggregatedBatchedStatementsCommandExecutorTest.java @@ -35,7 +35,9 @@ import org.apache.shardingsphere.infra.executor.sql.prepare.driver.jdbc.StatementOption; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.parser.ShardingSphereSQLParserEngine; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; @@ -60,6 +62,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -150,14 +153,21 @@ private ContextManager mockContextManager() { when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.KERNEL_EXECUTOR_SIZE)).thenReturn(0); when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY)).thenReturn(1); when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW)).thenReturn(false); - RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList(new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), - new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build()))); + RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList( + new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build()))); when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(globalRuleMetaData); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singletonList("foo_ds")); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("foo_ds", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("foo_ds", storageUnit)); when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); + when(database.containsSchema("public")).thenReturn(true); + when(database.getSchema("public").containsTable("t_order")).thenReturn(true); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); + when(result.getMetaDataContexts().getMetaData().containsDatabase("foo_db")).thenReturn(true); + when(database.getSchema("public").getTable("t_order").getColumnValues()) + .thenReturn(Collections.singleton(new ShardingSphereColumn("id", Types.VARCHAR, false, false, false, true, false, false))); return result; } } diff --git a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutorTest.java b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutorTest.java index 06c850a117362..9a61edf1ef455 100644 --- a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutorTest.java +++ b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/PostgreSQLBatchedStatementsExecutorTest.java @@ -28,7 +28,9 @@ import org.apache.shardingsphere.infra.executor.sql.prepare.driver.jdbc.StatementOption; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.session.connection.ConnectionContext; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.logging.rule.LoggingRule; @@ -38,6 +40,9 @@ import org.apache.shardingsphere.proxy.backend.connector.jdbc.statement.JDBCBackendStatement; import org.apache.shardingsphere.proxy.backend.context.ProxyContext; import org.apache.shardingsphere.proxy.backend.session.ConnectionSession; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableNameSegment; +import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue; import org.apache.shardingsphere.sql.parser.sql.dialect.statement.postgresql.dml.PostgreSQLInsertStatement; import org.apache.shardingsphere.sqltranslator.rule.SQLTranslatorRule; import org.apache.shardingsphere.sqltranslator.rule.builder.DefaultSQLTranslatorRuleConfigurationBuilder; @@ -54,6 +59,7 @@ import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.sql.Types; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -93,12 +99,12 @@ void assertExecuteBatch() throws SQLException { .thenReturn(preparedStatement); ContextManager contextManager = mockContextManager(); ConnectionSession connectionSession = mockConnectionSession(); - PostgreSQLServerPreparedStatement postgreSQLPreparedStatement = new PostgreSQLServerPreparedStatement("insert into t (id, col) values (?, ?)", mockInsertStatementContext(), + PostgreSQLServerPreparedStatement postgresqlPreparedStatement = new PostgreSQLServerPreparedStatement("insert into t (id, col) values (?, ?)", mockInsertStatementContext(), new HintValueContext(), Arrays.asList(PostgreSQLColumnType.INT4, PostgreSQLColumnType.VARCHAR), Arrays.asList(0, 1)); List> parameterSets = Arrays.asList(Arrays.asList(1, new PostgreSQLTypeUnspecifiedSQLParameter("foo")), Arrays.asList(2, new PostgreSQLTypeUnspecifiedSQLParameter("bar")), Arrays.asList(3, new PostgreSQLTypeUnspecifiedSQLParameter("baz"))); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); - PostgreSQLBatchedStatementsExecutor actual = new PostgreSQLBatchedStatementsExecutor(connectionSession, postgreSQLPreparedStatement, parameterSets); + PostgreSQLBatchedStatementsExecutor actual = new PostgreSQLBatchedStatementsExecutor(connectionSession, postgresqlPreparedStatement, parameterSets); prepareExecutionUnitParameters(actual, parameterSets); int actualUpdated = actual.executeBatch(); assertThat(actualUpdated, is(3)); @@ -112,9 +118,10 @@ void assertExecuteBatch() throws SQLException { private InsertStatementContext mockInsertStatementContext() { PostgreSQLInsertStatement insertStatement = mock(PostgreSQLInsertStatement.class, RETURNS_DEEP_STUBS); - when(insertStatement.getTable().getTableName().getIdentifier().getValue()).thenReturn("t"); + when(insertStatement.getTable()).thenReturn(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t")))); when(insertStatement.getValues()).thenReturn(Collections.emptyList()); when(insertStatement.getCommentSegments()).thenReturn(Collections.emptyList()); + when(insertStatement.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); InsertStatementContext result = mock(InsertStatementContext.class); when(result.getSqlStatement()).thenReturn(insertStatement); return result; @@ -126,9 +133,16 @@ private ContextManager mockContextManager() { when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY)).thenReturn(1); when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW)).thenReturn(false); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); - when(database.getResourceMetaData().getStorageTypes()).thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); + when(database.getResourceMetaData().getStorageUnitMetaData().getStorageUnits()).thenReturn(Collections.singletonMap("ds_0", storageUnit)); when(database.getResourceMetaData().getAllInstanceDataSourceNames()).thenReturn(Collections.singletonList("ds_0")); when(database.getRuleMetaData()).thenReturn(new RuleMetaData(Collections.emptyList())); + when(database.containsSchema("public")).thenReturn(true); + when(database.getSchema("public").containsTable("t")).thenReturn(true); + when(database.getSchema("public").getTable("t").getColumnValues()).thenReturn(Arrays.asList(new ShardingSphereColumn("id", Types.VARCHAR, false, false, false, true, false, false), + new ShardingSphereColumn("col", Types.VARCHAR, false, false, false, true, false, false))); + when(result.getMetaDataContexts().getMetaData().containsDatabase("db")).thenReturn(true); when(result.getMetaDataContexts().getMetaData().getDatabase("db")).thenReturn(database); RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList(new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build()))); diff --git a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutorTest.java b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutorTest.java index 9d307b884bb5b..b44e5a902d2e8 100644 --- a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutorTest.java +++ b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/describe/PostgreSQLComDescribeExecutorTest.java @@ -36,6 +36,7 @@ import org.apache.shardingsphere.infra.executor.sql.execute.engine.ConnectionMode; import org.apache.shardingsphere.infra.hint.HintValueContext; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; @@ -384,8 +385,8 @@ private ContextManager mockContextManager() { when(result.getMetaDataContexts().getMetaData().getProps().getValue(ConfigurationPropertyKey.SQL_SHOW)).thenReturn(false); when(connectionSession.getDatabaseName()).thenReturn(DATABASE_NAME); when(connectionSession.getServerPreparedStatementRegistry()).thenReturn(new ServerPreparedStatementRegistry()); - RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList(new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), - new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build()))); + RuleMetaData globalRuleMetaData = new RuleMetaData(Arrays.asList( + new SQLTranslatorRule(new DefaultSQLTranslatorRuleConfigurationBuilder().build()), new LoggingRule(new DefaultLoggingRuleConfigurationBuilder().build()))); when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()).thenReturn(globalRuleMetaData); when(result.getMetaDataContexts().getMetaData().getDatabases()).thenReturn(Collections.singletonMap(DATABASE_NAME, mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS))); Collection columnMetaData = Arrays.asList( @@ -399,8 +400,13 @@ private ContextManager mockContextManager() { when(schema.getTable(TABLE_NAME)).thenReturn(table); when(schema.getAllColumnNames(TABLE_NAME)).thenReturn(Arrays.asList("id", "k", "c", "pad")); when(result.getMetaDataContexts().getMetaData().getDatabase(DATABASE_NAME).getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); - when(result.getMetaDataContexts().getMetaData().getDatabase(DATABASE_NAME).getResourceMetaData().getStorageTypes()) - .thenReturn(Collections.singletonMap("ds_0", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); + when(result.getMetaDataContexts().getMetaData().getDatabase(DATABASE_NAME).getResourceMetaData().getStorageUnitMetaData().getStorageUnits()) + .thenReturn(Collections.singletonMap("ds_0", storageUnit)); + when(result.getMetaDataContexts().getMetaData().containsDatabase(DATABASE_NAME)).thenReturn(true); + when(result.getMetaDataContexts().getMetaData().getDatabase(DATABASE_NAME).containsSchema("public")).thenReturn(true); + when(result.getMetaDataContexts().getMetaData().getDatabase(DATABASE_NAME).getSchema("public").containsTable(TABLE_NAME)).thenReturn(true); return result; } diff --git a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutorTest.java b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutorTest.java index 8fa7257db0542..53d5084982f9f 100644 --- a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutorTest.java +++ b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/command/query/extended/parse/PostgreSQLComParseExecutorTest.java @@ -25,7 +25,13 @@ import org.apache.shardingsphere.infra.binder.context.statement.CommonSQLStatementContext; import org.apache.shardingsphere.infra.binder.context.statement.dml.InsertStatementContext; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.hint.HintValueContext; +import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; +import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereColumn; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; +import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereTable; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.mode.manager.ContextManager; import org.apache.shardingsphere.parser.rule.SQLParserRule; @@ -45,7 +51,10 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.internal.configuration.plugins.Plugins; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; +import java.sql.Types; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -59,6 +68,7 @@ @ExtendWith(AutoMockExtension.class) @StaticMockSettings(ProxyContext.class) +@MockitoSettings(strictness = Strictness.LENIENT) class PostgreSQLComParseExecutorTest { @Mock @@ -74,6 +84,7 @@ class PostgreSQLComParseExecutorTest { void setup() { when(connectionSession.getServerPreparedStatementRegistry()).thenReturn(new ServerPreparedStatementRegistry()); when(connectionSession.getDatabaseName()).thenReturn("foo_db"); + when(connectionSession.getDefaultDatabaseName()).thenReturn("foo_db"); } @Test @@ -82,6 +93,7 @@ void assertExecuteWithEmptySQL() { final String statementId = "S_1"; when(parsePacket.getSQL()).thenReturn(expectedSQL); when(parsePacket.getStatementId()).thenReturn(statementId); + when(parsePacket.getHintValueContext()).thenReturn(new HintValueContext()); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); Collection actualPackets = executor.execute(); @@ -102,6 +114,7 @@ void assertExecuteWithParameterizedSQL() throws ReflectiveOperationException { when(parsePacket.getSQL()).thenReturn(rawSQL); when(parsePacket.getStatementId()).thenReturn(statementId); when(parsePacket.readParameterTypes()).thenReturn(Collections.singletonList(PostgreSQLColumnType.INT4)); + when(parsePacket.getHintValueContext()).thenReturn(new HintValueContext()); when(connectionSession.getDefaultDatabaseName()).thenReturn("foo_db"); Plugins.getMemberAccessor().set(PostgreSQLComParseExecutor.class.getDeclaredField("connectionSession"), executor, connectionSession); ContextManager contextManager = mockContextManager(); @@ -122,6 +135,7 @@ void assertExecuteWithNonOrderedParameterizedSQL() throws ReflectiveOperationExc final String expectedSQL = "update t_test set name=? where id=?"; final String statementId = "S_2"; when(parsePacket.getSQL()).thenReturn(rawSQL); + when(parsePacket.getHintValueContext()).thenReturn(new HintValueContext()); when(parsePacket.getStatementId()).thenReturn(statementId); when(parsePacket.readParameterTypes()).thenReturn(Arrays.asList(PostgreSQLColumnType.JSON, PostgreSQLColumnType.INT4)); Plugins.getMemberAccessor().set(PostgreSQLComParseExecutor.class.getDeclaredField("connectionSession"), executor, connectionSession); @@ -134,6 +148,22 @@ void assertExecuteWithNonOrderedParameterizedSQL() throws ReflectiveOperationExc assertThat(actualPreparedStatement.getActualParameterMarkerIndexes(), is(Arrays.asList(1, 0))); } + @Test + void assertExecuteWithQuestionOperator() throws ReflectiveOperationException { + final String rawSQL = "update t_test set enabled = $1 where name ?& $2"; + final String expectedSQL = "update t_test set enabled = ? where name ??& ?"; + final String statementId = "S_2"; + when(parsePacket.getSQL()).thenReturn(rawSQL); + when(parsePacket.getStatementId()).thenReturn(statementId); + when(parsePacket.getHintValueContext()).thenReturn(new HintValueContext()); + Plugins.getMemberAccessor().set(PostgreSQLComParseExecutor.class.getDeclaredField("connectionSession"), executor, connectionSession); + ContextManager contextManager = mockContextManager(); + when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); + executor.execute(); + PostgreSQLServerPreparedStatement actualPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(statementId); + assertThat(actualPreparedStatement.getSql(), is(expectedSQL)); + } + @Test void assertExecuteWithDistSQL() { String sql = "SHOW DIST VARIABLE WHERE NAME = sql_show"; @@ -154,11 +184,23 @@ void assertExecuteWithDistSQL() { private ContextManager mockContextManager() { ContextManager result = mock(ContextManager.class, RETURNS_DEEP_STUBS); - when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getResourceMetaData().getStorageTypes()).thenReturn( - Collections.singletonMap("foo_ds", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"))); when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db").getProtocolType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")); when(result.getMetaDataContexts().getMetaData().getGlobalRuleMetaData()) .thenReturn(new RuleMetaData(Collections.singleton(new SQLParserRule(new DefaultSQLParserRuleConfigurationBuilder().build())))); + ShardingSphereTable testTable = new ShardingSphereTable("t_test", Arrays.asList(new ShardingSphereColumn("id", Types.BIGINT, true, false, false, false, true, false), + new ShardingSphereColumn("name", Types.VARCHAR, false, false, false, false, false, false), + new ShardingSphereColumn("age", Types.SMALLINT, false, false, false, false, true, false)), Collections.emptyList(), Collections.emptyList()); + ShardingSphereTable sbTestTable = new ShardingSphereTable("sbtest1", Arrays.asList(new ShardingSphereColumn("id", Types.BIGINT, true, false, false, false, true, false), + new ShardingSphereColumn("k", Types.VARCHAR, false, false, false, false, false, false), + new ShardingSphereColumn("c", Types.VARCHAR, false, false, false, false, true, false), + new ShardingSphereColumn("pad", Types.VARCHAR, false, false, false, false, true, false)), Collections.emptyList(), Collections.emptyList()); + ShardingSphereSchema schema = new ShardingSphereSchema(); + schema.getTables().put("t_test", testTable); + schema.getTables().put("sbtest1", sbTestTable); + ShardingSphereDatabase database = new ShardingSphereDatabase("foo_db", TypedSPILoader.getService(DatabaseType.class, "PostgreSQL"), + new ResourceMetaData("foo_db", Collections.emptyMap()), new RuleMetaData(Collections.emptyList()), Collections.singletonMap("public", schema)); + when(result.getMetaDataContexts().getMetaData().getDatabase("foo_db")).thenReturn(database); + when(result.getMetaDataContexts().getMetaData().containsDatabase("foo_db")).thenReturn(true); return result; } } diff --git a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactoryTest.java b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactoryTest.java similarity index 86% rename from proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactoryTest.java rename to proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactoryTest.java index ffec1bd102ca5..b6cc9aba9a1cb 100644 --- a/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrPacketFactoryTest.java +++ b/proxy/frontend/type/postgresql/src/test/java/org/apache/shardingsphere/proxy/frontend/postgresql/err/PostgreSQLErrorPacketFactoryTest.java @@ -33,7 +33,7 @@ import static org.mockito.Mockito.when; @SuppressWarnings("unchecked") -class PostgreSQLErrPacketFactoryTest { +class PostgreSQLErrorPacketFactoryTest { @Test void assertPSQLExceptionWithServerErrorMessageNotNull() throws ReflectiveOperationException { @@ -42,7 +42,7 @@ void assertPSQLExceptionWithServerErrorMessageNotNull() throws ReflectiveOperati when(serverErrorMessage.getSQLState()).thenReturn("sqlState"); when(serverErrorMessage.getMessage()).thenReturn("message"); when(serverErrorMessage.getPosition()).thenReturn(1); - PostgreSQLErrorResponsePacket actual = PostgreSQLErrPacketFactory.newInstance(new PSQLException(serverErrorMessage)); + PostgreSQLErrorResponsePacket actual = PostgreSQLErrorPacketFactory.newInstance(new PSQLException(serverErrorMessage)); Map fields = (Map) Plugins.getMemberAccessor().get(PostgreSQLErrorResponsePacket.class.getDeclaredField("fields"), actual); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_SEVERITY), is(PostgreSQLMessageSeverityLevel.FATAL)); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_CODE), is("sqlState")); @@ -52,7 +52,7 @@ void assertPSQLExceptionWithServerErrorMessageNotNull() throws ReflectiveOperati @Test void assertPSQLExceptionWithServerErrorMessageIsNull() throws ReflectiveOperationException { - PostgreSQLErrorResponsePacket actual = PostgreSQLErrPacketFactory.newInstance(new PSQLException("psqlEx", PSQLState.UNEXPECTED_ERROR, new Exception("test"))); + PostgreSQLErrorResponsePacket actual = PostgreSQLErrorPacketFactory.newInstance(new PSQLException("psqlEx", PSQLState.UNEXPECTED_ERROR, new Exception("test"))); Map fields = (Map) Plugins.getMemberAccessor().get(PostgreSQLErrorResponsePacket.class.getDeclaredField("fields"), actual); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_CODE), is(PSQLState.UNEXPECTED_ERROR.getState())); assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_MESSAGE), is("psqlEx")); @@ -60,8 +60,8 @@ void assertPSQLExceptionWithServerErrorMessageIsNull() throws ReflectiveOperatio @Test void assertRuntimeException() throws ReflectiveOperationException { - PostgreSQLErrorResponsePacket actual = PostgreSQLErrPacketFactory.newInstance(new RuntimeException("test")); + PostgreSQLErrorResponsePacket actual = PostgreSQLErrorPacketFactory.newInstance(new RuntimeException("No reason")); Map fields = (Map) Plugins.getMemberAccessor().get(PostgreSQLErrorResponsePacket.class.getDeclaredField("fields"), actual); - assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_MESSAGE), is("test")); + assertThat(fields.get(PostgreSQLErrorResponsePacket.FIELD_TYPE_MESSAGE), is("Unknown exception: No reason")); } } diff --git a/test/e2e/agent/plugins/common/pom.xml b/test/e2e/agent/plugins/common/pom.xml index 82b024c85a7ff..1a8f74e9a0cc2 100644 --- a/test/e2e/agent/plugins/common/pom.xml +++ b/test/e2e/agent/plugins/common/pom.xml @@ -32,8 +32,9 @@ - com.google.code.gson - gson + org.apache.shardingsphere + shardingsphere-infra-util + ${project.version} diff --git a/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/env/E2ETestEnvironment.java b/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/env/E2ETestEnvironment.java index c34414d905778..864a5b566c0e8 100644 --- a/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/env/E2ETestEnvironment.java +++ b/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/env/E2ETestEnvironment.java @@ -135,7 +135,7 @@ private DataSource createHikariCP(final Properties props) { private boolean waitForJdbcEnvironmentReady() { log.info("Jdbc project with agent environment initializing ..."); try { - Awaitility.await().atMost(2L, TimeUnit.MINUTES).pollInterval(5L, TimeUnit.SECONDS).until(() -> isJdbcReady()); + Awaitility.await().atMost(2L, TimeUnit.MINUTES).pollInterval(5L, TimeUnit.SECONDS).until(this::isJdbcReady); } catch (final ConditionTimeoutException ignored) { log.info("Jdbc project with agent environment initialization failed ..."); return false; diff --git a/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/util/OkHttpUtils.java b/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/util/OkHttpUtils.java index bd0886f1e9792..335a3f3cab181 100644 --- a/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/util/OkHttpUtils.java +++ b/test/e2e/agent/plugins/common/src/test/java/org/apache/shardingsphere/test/e2e/agent/common/util/OkHttpUtils.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.test.e2e.agent.common.util; -import com.google.gson.Gson; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -34,8 +34,6 @@ public final class OkHttpUtils { private static final OkHttpUtils OK_HTTP_UTILS = new OkHttpUtils(); - private static final Gson GSON = new Gson(); - private final OkHttpClient client; private OkHttpUtils() { @@ -65,7 +63,7 @@ public static OkHttpUtils getInstance() { * @throws IOException IO exception */ public T get(final String url, final Class clazz) throws IOException { - return GSON.fromJson(get(url), clazz); + return JsonUtils.fromJsonString(get(url), clazz); } /** @@ -81,16 +79,4 @@ public String get(final String url) throws IOException { assertNotNull(response.body()); return response.body().string(); } - - /** - * Get response. - * - * @param url url - * @return response - * @throws IOException IO exception - */ - public Response getResponse(final String url) throws IOException { - Request request = new Request.Builder().url(url).build(); - return client.newCall(request).execute(); - } } diff --git a/test/e2e/agent/plugins/logging/file/pom.xml b/test/e2e/agent/plugins/logging/file/pom.xml index bbe89bc35842e..d90e8a9c17d51 100644 --- a/test/e2e/agent/plugins/logging/file/pom.xml +++ b/test/e2e/agent/plugins/logging/file/pom.xml @@ -45,8 +45,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql-connector-java.version} runtime diff --git a/test/e2e/agent/plugins/logging/file/src/test/resources/docker/agent/conf/agent.yaml b/test/e2e/agent/plugins/logging/file/src/test/resources/docker/agent/conf/agent.yaml index 02648ba5226c5..b2e6876c75327 100644 --- a/test/e2e/agent/plugins/logging/file/src/test/resources/docker/agent/conf/agent.yaml +++ b/test/e2e/agent/plugins/logging/file/src/test/resources/docker/agent/conf/agent.yaml @@ -18,5 +18,3 @@ plugins: logging: File: - props: - level: "INFO" diff --git a/test/e2e/agent/plugins/metrics/prometheus/pom.xml b/test/e2e/agent/plugins/metrics/prometheus/pom.xml index 7f5e4882c8ae2..240a1f7fe426b 100644 --- a/test/e2e/agent/plugins/metrics/prometheus/pom.xml +++ b/test/e2e/agent/plugins/metrics/prometheus/pom.xml @@ -45,8 +45,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql-connector-java.version} runtime diff --git a/test/e2e/agent/plugins/tracing/jaeger/pom.xml b/test/e2e/agent/plugins/tracing/jaeger/pom.xml index 42bbd68685045..1d246a268cfc4 100644 --- a/test/e2e/agent/plugins/tracing/jaeger/pom.xml +++ b/test/e2e/agent/plugins/tracing/jaeger/pom.xml @@ -45,8 +45,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql-connector-java.version} runtime diff --git a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/asserts/SpanAssert.java b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/asserts/SpanAssert.java index 6fb4891370eb4..669c26699c9df 100644 --- a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/asserts/SpanAssert.java +++ b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/asserts/SpanAssert.java @@ -18,10 +18,9 @@ package org.apache.shardingsphere.test.e2e.agent.jaeger.asserts; import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.test.e2e.agent.common.util.OkHttpUtils; import org.apache.shardingsphere.test.e2e.agent.jaeger.cases.SpanTestCase; import org.apache.shardingsphere.test.e2e.agent.jaeger.cases.TagAssertion; @@ -73,7 +72,7 @@ private static void assertTagKey(final String baseUrl, final SpanTestCase expect private static void assertTagValue(final String baseUrl, final SpanTestCase expected, final TagAssertion expectedTagCase) { String urlWithParameter = String.format("%s/api/traces?service=%s&operation=%s&tags=%s&limit=%s", baseUrl, getEncodeValue(expected.getServiceName()), - getEncodeValue(expected.getSpanName()), getEncodeValue(new Gson().toJson(ImmutableMap.of(expectedTagCase.getTagKey(), expectedTagCase.getTagValue()))), 1000); + getEncodeValue(expected.getSpanName()), getEncodeValue(JsonUtils.toJsonString(ImmutableMap.of(expectedTagCase.getTagKey(), expectedTagCase.getTagValue()))), 1000); Collection traceResults = getTraceResults(urlWithParameter); assertFalse(traceResults.isEmpty(), String.format("The tag `%s`=`%s` does not exist in `%s` span", expectedTagCase.getTagKey(), expectedTagCase.getTagValue(), expected.getSpanName())); } @@ -85,8 +84,7 @@ private static String getEncodeValue(final String value) { @SneakyThrows(IOException.class) private static Collection getTraceResults(final String url) { - TraceResults result = new Gson().fromJson(OkHttpUtils.getInstance().get(url), new TypeToken() { - }.getType()); + TraceResults result = JsonUtils.fromJsonString(OkHttpUtils.getInstance().get(url), TraceResults.class); assertNotNull(result); return result.getData(); } diff --git a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/JaegerTraceResult.java b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/JaegerTraceResult.java index f8d6f6323eab4..21c2625abf01e 100644 --- a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/JaegerTraceResult.java +++ b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/JaegerTraceResult.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.test.e2e.agent.jaeger.result; -import com.google.gson.annotations.SerializedName; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; import lombok.Setter; import org.apache.shardingsphere.test.e2e.agent.common.result.JsonConfiguration; @@ -45,7 +45,7 @@ public final class JaegerTraceResult implements JsonConfiguration { @Setter public static final class JaegerTraceResultData { - @SerializedName("traceID") + @JsonProperty("traceID") private String traceId; private List spans; @@ -59,10 +59,10 @@ public static final class JaegerTraceResultData { @Setter public static final class Span { - @SerializedName("traceID") + @JsonProperty("traceID") private String traceId; - @SerializedName("spanID") + @JsonProperty("spanID") private String spanId; private int flags; @@ -79,7 +79,7 @@ public static final class Span { private List logs; - @SerializedName("processID") + @JsonProperty("processID") private String processId; private String warnings; diff --git a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/SpanResult.java b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/SpanResult.java index c5dce6d9e00f7..099a8d84e5cc6 100644 --- a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/SpanResult.java +++ b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/SpanResult.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.test.e2e.agent.jaeger.result; -import com.google.gson.annotations.SerializedName; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; import lombok.Setter; @@ -27,10 +27,10 @@ @Setter public final class SpanResult { - @SerializedName("traceID") + @JsonProperty("traceID") private String traceId; - @SerializedName("spanID") + @JsonProperty("spanID") private String spanId; private String operationName; diff --git a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/TraceResult.java b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/TraceResult.java index 9108d6ee82fd4..ecf5658572461 100644 --- a/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/TraceResult.java +++ b/test/e2e/agent/plugins/tracing/jaeger/src/test/java/org/apache/shardingsphere/test/e2e/agent/jaeger/result/TraceResult.java @@ -17,7 +17,7 @@ package org.apache.shardingsphere.test.e2e.agent.jaeger.result; -import com.google.gson.annotations.SerializedName; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; import lombok.Setter; @@ -27,7 +27,7 @@ @Setter public final class TraceResult { - @SerializedName("traceID") + @JsonProperty("traceID") private String traceId; private Collection spans; diff --git a/test/e2e/agent/plugins/tracing/zipkin/pom.xml b/test/e2e/agent/plugins/tracing/zipkin/pom.xml index 1b1c6b8881f48..f3854939e66cc 100644 --- a/test/e2e/agent/plugins/tracing/zipkin/pom.xml +++ b/test/e2e/agent/plugins/tracing/zipkin/pom.xml @@ -45,8 +45,8 @@ - mysql - mysql-connector-java + com.mysql + mysql-connector-j ${mysql-connector-java.version} runtime diff --git a/test/e2e/agent/plugins/tracing/zipkin/src/test/java/org/apache/shardingsphere/test/e2e/agent/zipkin/asserts/SpanAssert.java b/test/e2e/agent/plugins/tracing/zipkin/src/test/java/org/apache/shardingsphere/test/e2e/agent/zipkin/asserts/SpanAssert.java index df835994561fb..cdade204e63fd 100644 --- a/test/e2e/agent/plugins/tracing/zipkin/src/test/java/org/apache/shardingsphere/test/e2e/agent/zipkin/asserts/SpanAssert.java +++ b/test/e2e/agent/plugins/tracing/zipkin/src/test/java/org/apache/shardingsphere/test/e2e/agent/zipkin/asserts/SpanAssert.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.test.e2e.agent.zipkin.asserts; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; +import com.fasterxml.jackson.core.type.TypeReference; import lombok.SneakyThrows; +import org.apache.shardingsphere.infra.util.json.JsonUtils; import org.apache.shardingsphere.test.e2e.agent.common.util.OkHttpUtils; import org.apache.shardingsphere.test.e2e.agent.zipkin.cases.SpanTestCase; import org.apache.shardingsphere.test.e2e.agent.zipkin.cases.TagAssertion; @@ -77,8 +77,8 @@ private static String getEncodeValue(final String value) { @SneakyThrows(IOException.class) private static Collection getSpanResults(final SpanTestCase expected, final String url) { - List> result = new Gson().fromJson(OkHttpUtils.getInstance().get(url), new TypeToken>>() { - }.getType()); + List> result = JsonUtils.fromJsonString(OkHttpUtils.getInstance().get(url), new TypeReference>>() { + }); assertNotNull(result); return result.stream().findFirst().orElse(Collections.emptyList()).stream() .filter(each -> expected.getSpanName().equalsIgnoreCase(each.getName())).collect(Collectors.toList()); diff --git a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCEncryptAlgorithmFixture.java b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCEncryptAlgorithmFixture.java index 7b88cb7af6700..294bd23ef9ec2 100644 --- a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCEncryptAlgorithmFixture.java +++ b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCEncryptAlgorithmFixture.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.test.e2e.driver.fixture.encrypt; -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; +import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -public final class JDBCEncryptAlgorithmFixture implements StandardEncryptAlgorithm { +public final class JDBCEncryptAlgorithmFixture implements StandardEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { @@ -28,7 +28,7 @@ public String encrypt(final Object plainValue, final EncryptContext encryptConte } @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { + public Object decrypt(final Object cipherValue, final EncryptContext encryptContext) { return "decryptValue"; } diff --git a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCQueryAssistedEncryptAlgorithmFixture.java b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCQueryAssistedEncryptAlgorithmFixture.java index bcfc186e8c542..516fcb9419d04 100644 --- a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCQueryAssistedEncryptAlgorithmFixture.java +++ b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/encrypt/JDBCQueryAssistedEncryptAlgorithmFixture.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; -public final class JDBCQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { +public final class JDBCQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { diff --git a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/sharding/JDBCStandardShardingAlgorithmFixture.java b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/sharding/JDBCStandardShardingAlgorithmFixture.java index 18fa50db85294..2f093265a1177 100644 --- a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/sharding/JDBCStandardShardingAlgorithmFixture.java +++ b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/fixture/sharding/JDBCStandardShardingAlgorithmFixture.java @@ -17,25 +17,15 @@ package org.apache.shardingsphere.test.e2e.driver.fixture.sharding; -import lombok.Getter; import org.apache.shardingsphere.sharding.api.sharding.standard.PreciseShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.RangeShardingValue; import org.apache.shardingsphere.sharding.api.sharding.standard.StandardShardingAlgorithm; import java.util.Collection; import java.util.HashSet; -import java.util.Properties; -@Getter public final class JDBCStandardShardingAlgorithmFixture implements StandardShardingAlgorithm { - private Properties props; - - @Override - public void init(final Properties props) { - this.props = props; - } - @Override public String doSharding(final Collection availableTargetNames, final PreciseShardingValue shardingValue) { for (String each : availableTargetNames) { diff --git a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ReadwriteSplittingPreparedStatementTest.java b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ReadwriteSplittingPreparedStatementTest.java index 980fc90929ce3..d1bff293b10d8 100644 --- a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ReadwriteSplittingPreparedStatementTest.java +++ b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ReadwriteSplittingPreparedStatementTest.java @@ -115,4 +115,65 @@ void assertGetGeneratedKeysWithPrimaryKeyIsNullInTransactional() throws SQLExcep connection.commit(); } } + + @Test + void assertGetAutoGeneratedKeysAfterExecuteBatch() throws SQLException { + try ( + Connection connection = getReadwriteSplittingDataSource().getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO t_config(status) VALUES(?);", Statement.RETURN_GENERATED_KEYS)) { + int batchSize = 4; + for (int i = 0; i < batchSize; i++) { + preparedStatement.setString(1, "BATCH"); + preparedStatement.addBatch(); + } + int[] result = preparedStatement.executeBatch(); + for (int each : result) { + assertThat(each, is(1)); + } + ResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys(); + Object lastGeneratedId = null; + Object generatedId; + for (int i = 0; i < batchSize; i++) { + assertTrue(generateKeyResultSet.next()); + generatedId = generateKeyResultSet.getObject(1); + assertThat(generatedId, not(lastGeneratedId)); + lastGeneratedId = generatedId; + } + assertFalse(generateKeyResultSet.next()); + } + } + + @Test + void assertGetAutoGeneratedKeysAfterExecuteBatchMultiValues() throws SQLException { + try ( + Connection connection = getReadwriteSplittingDataSource().getConnection(); + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO t_config(status) VALUES(?),(?);", Statement.RETURN_GENERATED_KEYS)) { + preparedStatement.setString(1, "BATCH"); + preparedStatement.setString(2, "BATCH"); + preparedStatement.addBatch(); + preparedStatement.setString(1, "BATCH"); + preparedStatement.setString(2, "BATCH"); + preparedStatement.addBatch(); + preparedStatement.setString(1, "BATCH"); + preparedStatement.setString(2, "BATCH"); + preparedStatement.addBatch(); + preparedStatement.setString(1, "BATCH"); + preparedStatement.setString(2, "BATCH"); + preparedStatement.addBatch(); + int[] result = preparedStatement.executeBatch(); + for (int each : result) { + assertThat(each, is(2)); + } + ResultSet generateKeyResultSet = preparedStatement.getGeneratedKeys(); + Object lastGeneratedId = null; + Object generatedId; + for (int i = 0; i < 8; i++) { + assertTrue(generateKeyResultSet.next()); + generatedId = generateKeyResultSet.getObject(1); + assertThat(generatedId, not(lastGeneratedId)); + lastGeneratedId = generatedId; + } + assertFalse(generateKeyResultSet.next()); + } + } } diff --git a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ShardingPreparedStatementTest.java b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ShardingPreparedStatementTest.java index 4e23b0f408770..0cb5c99e55a65 100644 --- a/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ShardingPreparedStatementTest.java +++ b/test/e2e/driver/src/test/java/org/apache/shardingsphere/test/e2e/driver/statement/ShardingPreparedStatementTest.java @@ -40,7 +40,7 @@ class ShardingPreparedStatementTest extends AbstractShardingDriverTest { private static final String INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = "INSERT INTO t_user (name) VALUES (?),(?),(?),(?)"; - private static final String SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = "SELECT name FROM t_user WHERE id=%dL"; + private static final String SELECT_FOR_INSERT_MULTI_VALUES_WITH_GENERATE_SHARDING_KEY_SQL = "SELECT name FROM t_user WHERE id=%d"; private static final String INSERT_WITH_GENERATE_KEY_SQL = "INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (?, ?, ?, ?)"; diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/AdaptorContainerConfiguration.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/AdaptorContainerConfiguration.java index fb2175d174661..f49ce3b3e6f5a 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/AdaptorContainerConfiguration.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/AdaptorContainerConfiguration.java @@ -34,4 +34,6 @@ public final class AdaptorContainerConfiguration { private final Map mountedResources; private final String adapterContainerImage; + + private final String containerCommand; } diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/ProxyClusterContainerConfigurationFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/ProxyClusterContainerConfigurationFactory.java index 77cd0254c6e4f..5cf7cecc50b29 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/ProxyClusterContainerConfigurationFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/config/ProxyClusterContainerConfigurationFactory.java @@ -37,7 +37,7 @@ public final class ProxyClusterContainerConfigurationFactory { * @return created instance */ public static AdaptorContainerConfiguration newInstance() { - return new AdaptorContainerConfiguration("", getMountedResources(), AdapterContainerUtils.getAdapterContainerImage()); + return new AdaptorContainerConfiguration("", getMountedResources(), AdapterContainerUtils.getAdapterContainerImage(), ""); } private static Map getMountedResources() { diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereJdbcContainer.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereJdbcContainer.java index 96e0888439377..a6bb7709abb1e 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereJdbcContainer.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereJdbcContainer.java @@ -35,6 +35,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.SQLException; +import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; /** @@ -82,7 +83,7 @@ public DataSource getTargetDataSource(final String serverLists) { private DataSource createGovernanceClientDataSource(final String serverLists) { YamlRootConfiguration rootConfig = YamlEngine.unmarshal(new File(scenarioCommonPath.getRuleConfigurationFile(databaseType)), YamlRootConfiguration.class); rootConfig.setMode(createYamlModeConfiguration(serverLists)); - return YamlShardingSphereDataSourceFactory.createDataSource(storageContainer.getActualDataSourceMap(), YamlEngine.marshal(rootConfig).getBytes(StandardCharsets.UTF_8)); + return YamlShardingSphereDataSourceFactory.createDataSource(Collections.emptyMap(), YamlEngine.marshal(rootConfig).getBytes(StandardCharsets.UTF_8)); } private YamlModeConfiguration createYamlModeConfiguration(final String serverLists) { diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java index 9dff96e80a5d2..1b6de75eb34cf 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.impl; +import com.google.common.base.Strings; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.test.e2e.env.container.atomic.DockerITContainer; import org.apache.shardingsphere.test.e2e.env.container.atomic.adapter.AdapterContainer; @@ -64,7 +65,11 @@ public ShardingSphereProxyClusterContainer withAgent(final String agentHome) { @Override protected void configure() { + if (!Strings.isNullOrEmpty(config.getContainerCommand())) { + setCommand(config.getContainerCommand()); + } withExposedPorts(3307, 33071, 3308); + addEnv("TZ", "UTC"); mountConfigurationFiles(); setWaitStrategy(new JdbcConnectionWaitStrategy(() -> DriverManager.getConnection(DataSourceEnvironment.getURL(databaseType, getHost(), getMappedPort(3307), config.getProxyDataSourceName()), ProxyContainerConstants.USERNAME, ProxyContainerConstants.PASSWORD))); diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/StorageContainerFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/StorageContainerFactory.java index c89ebed56ff57..b4019af1311c4 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/StorageContainerFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/StorageContainerFactory.java @@ -37,21 +37,21 @@ public final class StorageContainerFactory { * * @param databaseType database type * @param storageContainerImage storage container image - * @param storageContainerConfiguration storage container configuration + * @param storageContainerConfig storage container configuration * @return created instance * @throws RuntimeException runtime exception */ public static StorageContainer newInstance(final DatabaseType databaseType, final String storageContainerImage, - final StorageContainerConfiguration storageContainerConfiguration) { + final StorageContainerConfiguration storageContainerConfig) { switch (databaseType.getType()) { case "MySQL": - return new MySQLContainer(storageContainerImage, storageContainerConfiguration); + return new MySQLContainer(storageContainerImage, storageContainerConfig); case "PostgreSQL": - return new PostgreSQLContainer(storageContainerImage, storageContainerConfiguration); + return new PostgreSQLContainer(storageContainerImage, storageContainerConfig); case "openGauss": - return new OpenGaussContainer(storageContainerImage, storageContainerConfiguration); + return new OpenGaussContainer(storageContainerImage, storageContainerConfig); case "H2": - return new H2Container(storageContainerConfiguration); + return new H2Container(storageContainerConfig); default: throw new RuntimeException(String.format("Database `%s` is unknown.", databaseType.getType())); } diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/h2/H2ContainerConfigurationFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/h2/H2ContainerConfigurationFactory.java index b547317629a73..9f53876b40302 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/h2/H2ContainerConfigurationFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/h2/H2ContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.h2.type.H2DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.runtime.scenario.database.DatabaseEnvironmentManager; import org.apache.shardingsphere.test.e2e.env.runtime.scenario.path.ScenarioDataPath; @@ -55,8 +56,9 @@ public static StorageContainerConfiguration newInstance() { */ public static StorageContainerConfiguration newInstance(final String scenario) { Map mountedResources = new HashMap<>(2, 1F); - mountedResources.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new H2DatabaseType()) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - mountedResources.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new H2DatabaseType()) + "/01-expected-init.sql", + mountedResources.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "H2")) + "/01-actual-init.sql", + "/docker-entrypoint-initdb.d/01-actual-init.sql"); + mountedResources.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "H2")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); return new StorageContainerConfiguration(scenario, "", Collections.emptyMap(), mountedResources, DatabaseEnvironmentManager.getDatabaseNames(scenario), DatabaseEnvironmentManager.getExpectedDatabaseNames(scenario)); diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java index cde84baa83796..275426e939b32 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.MySQLContainer; import org.apache.shardingsphere.test.e2e.env.container.atomic.util.ContainerUtils; @@ -81,8 +82,9 @@ private static Map getMountedResources(final int majorVersion) { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(3, 1F); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new MySQLDatabaseType()) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new MySQLDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "MySQL")) + "/01-actual-init.sql", + "/docker-entrypoint-initdb.d/01-actual-init.sql"); + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "MySQL")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/mysql/my.cnf", MySQLContainer.MYSQL_CONF_IN_CONTAINER); return result; diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/opengauss/OpenGaussContainerConfigurationFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/opengauss/OpenGaussContainerConfigurationFactory.java index eec9284b10b2f..c742469668368 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/opengauss/OpenGaussContainerConfigurationFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/opengauss/OpenGaussContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.constants.StorageContainerConstants; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.OpenGaussContainer; @@ -76,8 +77,9 @@ private static Map getMountedResources() { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(4, 1F); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new OpenGaussDatabaseType()) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new OpenGaussDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "openGauss")) + "/01-actual-init.sql", + "/docker-entrypoint-initdb.d/01-actual-init.sql"); + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "openGauss")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/postgresql/postgresql.conf", OpenGaussContainer.OPENGAUSS_CONF_IN_CONTAINER); result.put("/env/opengauss/pg_hba.conf", OpenGaussContainer.OPENGAUSS_HBA_IN_CONF_CONTAINER); diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/postgresql/PostgreSQLContainerConfigurationFactory.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/postgresql/PostgreSQLContainerConfigurationFactory.java index a7158913535c7..576ef875066a1 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/postgresql/PostgreSQLContainerConfigurationFactory.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/config/impl/postgresql/PostgreSQLContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.postgresql.type.PostgreSQLDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.constants.StorageContainerConstants; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.PostgreSQLContainer; @@ -75,9 +76,9 @@ private static Map getMountedResources() { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(3, 1F); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new PostgreSQLDatabaseType()) + "/01-actual-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new PostgreSQLDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/postgresql/postgresql.conf", PostgreSQLContainer.POSTGRESQL_CONF_IN_CONTAINER); return result; diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/impl/H2Container.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/impl/H2Container.java index 7bdd5934f006d..9282f4fe141a7 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/impl/H2Container.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/storage/impl/H2Container.java @@ -41,9 +41,9 @@ public final class H2Container extends EmbeddedStorageContainer { private final ScenarioDataPath scenarioDataPath; - public H2Container(final StorageContainerConfiguration storageContainerConfiguration) { - super(TypedSPILoader.getService(DatabaseType.class, "H2"), storageContainerConfiguration.getScenario()); - scenarioDataPath = new ScenarioDataPath(storageContainerConfiguration.getScenario()); + public H2Container(final StorageContainerConfiguration storageContainerConfig) { + super(TypedSPILoader.getService(DatabaseType.class, "H2"), storageContainerConfig.getScenario()); + scenarioDataPath = new ScenarioDataPath(storageContainerConfig.getScenario()); } @Override diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/util/DatabaseVersionParser.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/util/DatabaseVersionParser.java index 98d088c47022b..6c1ce6dd9b039 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/util/DatabaseVersionParser.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/atomic/util/DatabaseVersionParser.java @@ -17,9 +17,9 @@ package org.apache.shardingsphere.test.e2e.env.container.atomic.util; +import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.commons.lang.StringUtils; /** * Database version parser. @@ -34,6 +34,6 @@ public final class DatabaseVersionParser { * @return major version */ public static String parseMajorVersion(final String storageContainerImage) { - return StringUtils.isBlank(storageContainerImage) ? "" : storageContainerImage.substring(storageContainerImage.indexOf(':') + 1, storageContainerImage.indexOf('.')); + return Strings.isNullOrEmpty(storageContainerImage) ? "" : storageContainerImage.substring(storageContainerImage.indexOf(':') + 1, storageContainerImage.indexOf('.')); } } diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/wait/JdbcConnectionWaitStrategy.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/wait/JdbcConnectionWaitStrategy.java index 1b5d4130c95bd..a989341858abc 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/wait/JdbcConnectionWaitStrategy.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/wait/JdbcConnectionWaitStrategy.java @@ -19,8 +19,8 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.rnorth.ducttape.unreliables.Unreliables; import org.testcontainers.containers.wait.strategy.AbstractWaitStrategy; +import org.testcontainers.shaded.org.awaitility.Awaitility; import java.sql.Connection; import java.util.concurrent.Callable; @@ -37,19 +37,13 @@ public final class JdbcConnectionWaitStrategy extends AbstractWaitStrategy { @Override protected void waitUntilReady() { - Unreliables.retryUntilSuccess((int) startupTimeout.getSeconds(), TimeUnit.SECONDS, this::mockRateLimiter); + Awaitility.await().ignoreExceptions().atMost(startupTimeout.getSeconds(), TimeUnit.SECONDS).pollInterval(1L, TimeUnit.SECONDS).until(this::checkConnection); } - private boolean mockRateLimiter() { - getRateLimiter().doWhenReady(() -> { - try (Connection ignored = connectionSupplier.call()) { - log.info("Container ready."); - // CHECKSTYLE:OFF - } catch (final Exception ex) { - // CHECKSTYLE:ON - throw new RuntimeException("Not Ready yet.", ex); - } - }); - return true; + private boolean checkConnection() throws Exception { + try (Connection ignored = connectionSupplier.call()) { + log.info("Container ready."); + return true; + } } } diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/DataSourceEnvironment.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/DataSourceEnvironment.java index 865371fc292b0..6b02fb4a84b46 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/DataSourceEnvironment.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/DataSourceEnvironment.java @@ -39,10 +39,6 @@ public static String getDriverClassName(final DatabaseType databaseType) { return "com.mysql.jdbc.Driver"; case "PostgreSQL": return "org.postgresql.Driver"; - case "SQLServer": - return "com.microsoft.sqlserver.jdbc.SQLServerDriver"; - case "Oracle": - return "oracle.jdbc.driver.OracleDriver"; case "openGauss": return "org.opengauss.Driver"; default: @@ -68,10 +64,6 @@ public static String getURL(final DatabaseType databaseType, final String host, + "&useServerPrepStmts=true&serverTimezone=UTC&useLocalSessionState=true&characterEncoding=utf-8", host, port); case "PostgreSQL": return String.format("jdbc:postgresql://%s:%s/?ssl=on&sslmode=prefer", host, port); - case "SQLServer": - return String.format("jdbc:sqlserver://%s:%s", host, port); - case "Oracle": - return String.format("jdbc:oracle:thin:@%s:%s", host, port); case "openGauss": return String.format("jdbc:opengauss://%s:%s/", host, port); default: @@ -100,10 +92,6 @@ public static String getURL(final DatabaseType databaseType, final String host, host, port, dataSourceName); case "PostgreSQL": return String.format("jdbc:postgresql://%s:%s/%s?ssl=on&sslmode=prefer", host, port, dataSourceName); - case "SQLServer": - return String.format("jdbc:sqlserver://%s:%s;DatabaseName=%s", host, port, dataSourceName); - case "Oracle": - return String.format("jdbc:oracle:thin:@%s:%s/%s", host, port, dataSourceName); case "openGauss": return String.format("jdbc:opengauss://%s:%s/%s?batchMode=OFF", host, port, dataSourceName); default: diff --git a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/IntegrationTestEnvironment.java b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/IntegrationTestEnvironment.java index cfacdf51297b6..755a9d656f3fe 100644 --- a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/IntegrationTestEnvironment.java +++ b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/runtime/IntegrationTestEnvironment.java @@ -26,6 +26,7 @@ import java.io.InputStream; import java.util.Collection; import java.util.Properties; +import java.util.TimeZone; /** * Integration test environment. @@ -47,6 +48,7 @@ private IntegrationTestEnvironment() { Properties props = loadProperties(); runModes = Splitter.on(",").trimResults().splitToList(props.getProperty("it.run.modes")); runAdditionalTestCases = Boolean.parseBoolean(props.getProperty("it.run.additional.cases")); + TimeZone.setDefault(TimeZone.getTimeZone(props.getProperty("it.timezone", "UTC"))); scenarios = getScenarios(props); clusterEnvironment = new ClusterEnvironment(props); } diff --git a/test/e2e/fixture/Dockerfile b/test/e2e/fixture/Dockerfile index c521b6c1ec1ca..1c534d337cf68 100644 --- a/test/e2e/fixture/Dockerfile +++ b/test/e2e/fixture/Dockerfile @@ -23,10 +23,6 @@ RUN mv /opt/${APP_NAME} /opt/shardingsphere-proxy WORKDIR /opt/shardingsphere-proxy -RUN cat bin/start.sh | tr -d '\r' > _start.sh && mv _start.sh bin/start.sh -RUN cat bin/stop.sh | tr -d '\r' > _stop.sh && mv _stop.sh bin/stop.sh -RUN chmod +x -R ./bin - FROM eclipse-temurin:8-jdk COPY --from=prepare /opt/shardingsphere-proxy /opt/shardingsphere-proxy @@ -34,4 +30,4 @@ COPY --from=prepare /opt/shardingsphere-proxy /opt/shardingsphere-proxy EXPOSE 3307 EXPOSE 3308 -CMD /opt/shardingsphere-proxy/bin/start.sh && tail -f /opt/shardingsphere-proxy/logs/stdout.log +ENTRYPOINT ["/opt/shardingsphere-proxy/bin/start.sh"] diff --git a/test/e2e/fixture/src/test/assembly/bin/start.sh b/test/e2e/fixture/src/test/assembly/bin/start.sh old mode 100644 new mode 100755 index e5d03c626e956..e817a354f17e8 --- a/test/e2e/fixture/src/test/assembly/bin/start.sh +++ b/test/e2e/fixture/src/test/assembly/bin/start.sh @@ -27,7 +27,6 @@ if [ ! -d ${LOGS_DIR} ]; then mkdir ${LOGS_DIR} fi -STDOUT_FILE=${LOGS_DIR}/stdout.log EXT_LIB=${DEPLOY_DIR}/ext-lib CLASS_PATH=.:${DEPLOY_DIR}/lib/*:${EXT_LIB}/* @@ -38,10 +37,17 @@ JAVA_MEM_OPTS=" -server -Xmx2g -Xms2g -Xmn1g -Xss1m -XX:+DisableExplicitGC -XX:+ MAIN_CLASS=org.apache.shardingsphere.proxy.Bootstrap +unset -v PORT +unset -v ADDRESSES +unset -v CONF_PATH +unset -v FORCE +unset -v SOCKET_FILE + print_usage() { echo "usage: start.sh [port] [config_dir]" echo " port: proxy listen port, default is 3307" echo " config_dir: proxy config directory, default is conf" + echo "-f Force start ShardingSphere-Proxy" exit 0 } @@ -55,21 +61,66 @@ if [ $# == 0 ]; then CLASS_PATH=${DEPLOY_DIR}/conf:${CLASS_PATH} fi -if [ $# == 1 ]; then - MAIN_CLASS=${MAIN_CLASS}" "$1 +if [[ $1 == -a ]] || [[ $1 == -p ]] || [[ $1 == -c ]] || [[ $1 == -f ]] || [[ $1 == -s ]]; then + while getopts ":a:p:c:fs:" opt + do + case $opt in + a) + echo "The address is $OPTARG" + ADDRESSES=$OPTARG;; + p) + echo "The port is $OPTARG" + PORT=$OPTARG;; + c) + echo "The configuration path is $OPTARG" + CONF_PATH=$OPTARG;; + f) + echo "The force param is true" + FORCE=true;; + s) + echo "The socket file is $OPTARG" + SOCKET_FILE=$OPTARG;; + ?) + print_usage;; + esac + done + +elif [ $# == 1 ]; then + PORT=$1 echo "The port is $1" - CLASS_PATH=${DEPLOY_DIR}/conf:${CLASS_PATH} fi if [ $# == 2 ]; then - MAIN_CLASS=${MAIN_CLASS}" "$1" "$2 + PORT=$1 + CONF_PATH=$2 echo "The port is $1" echo "The configuration path is $DEPLOY_DIR/$2" - CLASS_PATH=${DEPLOY_DIR}/$2:${CLASS_PATH} fi +if [ -z "$CONF_PATH" ]; then + CONF_PATH=${DEPLOY_DIR}/conf +fi + +if [ -z "$PORT" ]; then + PORT=-1 +fi + +if [ -z "$ADDRESSES" ]; then + ADDRESSES="0.0.0.0" +fi + +if [ -z "$FORCE" ]; then + FORCE=false +fi + +if [ "$SOCKET_FILE" ]; then + ADDRESSES="${ADDRESSES},${SOCKET_FILE}" +fi + +CLASS_PATH=${CONF_PATH}:${CLASS_PATH} +MAIN_CLASS="${MAIN_CLASS} ${PORT} ${CONF_PATH} ${ADDRESSES} ${FORCE}" + echo "The classpath is ${CLASS_PATH}" +echo "main class ${MAIN_CLASS}" -nohup java ${JAVA_OPTS} ${JAVA_MEM_OPTS} -classpath ${CLASS_PATH} ${MAIN_CLASS} >> ${STDOUT_FILE} 2>&1 & -sleep 1 -echo "Please check the STDOUT file: $STDOUT_FILE" +exec java ${JAVA_OPTS} ${JAVA_MEM_OPTS} -classpath ${CLASS_PATH} ${MAIN_CLASS} diff --git a/test/e2e/fixture/src/test/assembly/bin/stop.sh b/test/e2e/fixture/src/test/assembly/bin/stop.sh old mode 100644 new mode 100755 diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/PipelineContainerComposer.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/PipelineContainerComposer.java index d730ccf823c96..7095b587f94e7 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/PipelineContainerComposer.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/PipelineContainerComposer.java @@ -17,6 +17,7 @@ package org.apache.shardingsphere.test.e2e.data.pipeline.cases; +import com.google.common.base.Splitter; import com.google.common.base.Strings; import lombok.Getter; import lombok.SneakyThrows; @@ -198,7 +199,8 @@ private void cleanUpDataSource() { return; } for (String each : Arrays.asList(DS_0, DS_1, DS_2, DS_3, DS_4)) { - containerComposer.cleanUpDatabase(each); + String databaseName = databaseType instanceof OracleDatabaseType ? each.toUpperCase() : each; + containerComposer.cleanUpDatabase(databaseName); } } @@ -226,11 +228,12 @@ public String appendExtraParameter(final String jdbcUrl) { * @throws SQLException SQL exception */ public void registerStorageUnit(final String storageUnitName) throws SQLException { + String username = getDatabaseType() instanceof OracleDatabaseType ? storageUnitName : getUsername(); String registerStorageUnitTemplate = "REGISTER STORAGE UNIT ${ds} ( URL='${url}', USER='${user}', PASSWORD='${password}')".replace("${ds}", storageUnitName) - .replace("${user}", getUsername()) + .replace("${user}", username) .replace("${password}", getPassword()) .replace("${url}", getActualJdbcUrlTemplate(storageUnitName, true)); - proxyExecuteWithLog(registerStorageUnitTemplate, 1); + proxyExecuteWithLog(registerStorageUnitTemplate, 0); Awaitility.await().ignoreExceptions().atMost(10, TimeUnit.SECONDS).pollInterval(1, TimeUnit.SECONDS).until(() -> showStorageUnitsName().contains(storageUnitName)); } @@ -369,9 +372,12 @@ public void sourceExecuteWithLog(final String sql) throws SQLException { * @throws SQLException SQL exception */ public void proxyExecuteWithLog(final String sql, final int sleepSeconds) throws SQLException { - log.info("proxy execute :{}", sql); + log.info("proxy execute: {}", sql); + List sqlList = Splitter.on(";").trimResults().omitEmptyStrings().splitToList(sql); try (Connection connection = proxyDataSource.getConnection()) { - connection.createStatement().execute(sql); + for (String each : sqlList) { + connection.createStatement().execute(each); + } } Awaitility.await().pollDelay(Math.max(sleepSeconds, 0L), TimeUnit.SECONDS).until(() -> true); } @@ -387,7 +393,9 @@ public void waitJobPrepareSuccess(final String distSQL) { Set statusSet = jobStatus.stream().map(each -> String.valueOf(each.get("status"))).collect(Collectors.toSet()); if (statusSet.contains(JobStatus.PREPARING.name()) || statusSet.contains(JobStatus.RUNNING.name())) { Awaitility.await().pollDelay(2L, TimeUnit.SECONDS).until(() -> true); + continue; } + break; } } diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java index 1067b002e08fd..a54c866caea91 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/cdc/CDCE2EIT.java @@ -20,9 +20,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; import org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.StandardPipelineDataSourceConfiguration; -import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaName; import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; -import org.apache.shardingsphere.data.pipeline.api.metadata.TableName; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData; import org.apache.shardingsphere.data.pipeline.cdc.api.job.type.CDCJobType; @@ -36,9 +34,9 @@ import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; import org.apache.shardingsphere.data.pipeline.common.metadata.loader.StandardPipelineTableMetaDataLoader; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.SingleTableInventoryDataConsistencyChecker; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataMatchDataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryCheckParameter; import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry; @@ -68,9 +66,11 @@ import java.sql.SQLException; import java.time.LocalDateTime; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.TimeUnit; @@ -138,13 +138,13 @@ void assertCDCDataImportSuccess(final PipelineTestParameter testParam) throws SQ Awaitility.await().atMost(20L, TimeUnit.SECONDS).pollInterval(2L, TimeUnit.SECONDS) .until(() -> listOrderRecords(containerComposer, getOrderTableNameWithSchema(dialectDatabaseMetaData)).size() == actualProxyList.size()); SchemaTableName orderSchemaTableName = dialectDatabaseMetaData.isSchemaAvailable() - ? new SchemaTableName(new SchemaName(PipelineContainerComposer.SCHEMA_NAME), new TableName(SOURCE_TABLE_NAME)) - : new SchemaTableName(new SchemaName(null), new TableName(SOURCE_TABLE_NAME)); + ? new SchemaTableName(PipelineContainerComposer.SCHEMA_NAME, SOURCE_TABLE_NAME) + : new SchemaTableName(null, SOURCE_TABLE_NAME); PipelineDataSourceWrapper sourceDataSource = new PipelineDataSourceWrapper(jdbcDataSource, containerComposer.getDatabaseType()); PipelineDataSourceWrapper targetDataSource = new PipelineDataSourceWrapper(createStandardDataSource(containerComposer, PipelineContainerComposer.DS_4), containerComposer.getDatabaseType()); assertDataMatched(sourceDataSource, targetDataSource, orderSchemaTableName); - assertDataMatched(sourceDataSource, targetDataSource, new SchemaTableName(new SchemaName(null), new TableName("t_address"))); + assertDataMatched(sourceDataSource, targetDataSource, new SchemaTableName(null, "t_address")); containerComposer.proxyExecuteWithLog(String.format("DROP STREAMING '%s'", jobId), 0); assertTrue(containerComposer.queryForListWithLog("SHOW STREAMING LIST").isEmpty()); } @@ -200,11 +200,12 @@ private String getOrderTableNameWithSchema(final DialectDatabaseMetaData dialect private void assertDataMatched(final PipelineDataSourceWrapper sourceDataSource, final PipelineDataSourceWrapper targetDataSource, final SchemaTableName schemaTableName) { StandardPipelineTableMetaDataLoader metaDataLoader = new StandardPipelineTableMetaDataLoader(targetDataSource); PipelineTableMetaData tableMetaData = metaDataLoader.getTableMetaData(schemaTableName.getSchemaName().getOriginal(), schemaTableName.getTableName().getOriginal()); - PipelineColumnMetaData primaryKeyMetaData = tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0)); - ConsistencyCheckJobItemProgressContext progressContext = new ConsistencyCheckJobItemProgressContext("", 0); - SingleTableInventoryDataConsistencyChecker checker = new SingleTableInventoryDataConsistencyChecker("", sourceDataSource, targetDataSource, schemaTableName, schemaTableName, - tableMetaData.getColumnNames(), primaryKeyMetaData, null, progressContext); - DataConsistencyCheckResult checkResult = checker.check(new DataMatchDataConsistencyCalculateAlgorithm()); + List uniqueKeys = Collections.singletonList(tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0))); + ConsistencyCheckJobItemProgressContext progressContext = new ConsistencyCheckJobItemProgressContext("", 0, sourceDataSource.getDatabaseType().getType()); + TableInventoryCheckParameter param = new TableInventoryCheckParameter("", sourceDataSource, targetDataSource, schemaTableName, schemaTableName, + tableMetaData.getColumnNames(), uniqueKeys, null, progressContext); + TableDataConsistencyChecker tableChecker = TypedSPILoader.getService(TableDataConsistencyChecker.class, "DATA_MATCH", new Properties()); + TableDataConsistencyCheckResult checkResult = tableChecker.buildTableInventoryChecker(param).checkSingleTableInventoryData(); assertTrue(checkResult.isMatched()); } diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/AbstractMigrationE2EIT.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/AbstractMigrationE2EIT.java index 0819ab249d242..470f46434a149 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/AbstractMigrationE2EIT.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/AbstractMigrationE2EIT.java @@ -35,6 +35,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -150,7 +151,11 @@ protected String getJobIdByTableName(final PipelineContainerComposer containerCo } protected void assertCheckMigrationSuccess(final PipelineContainerComposer containerComposer, final String jobId, final String algorithmType) throws SQLException { - containerComposer.proxyExecuteWithLog(String.format("CHECK MIGRATION '%s' BY TYPE (NAME='%s')", jobId, algorithmType), 0); + assertCheckMigrationSuccess(containerComposer, jobId, algorithmType, new Properties()); + } + + protected void assertCheckMigrationSuccess(final PipelineContainerComposer containerComposer, final String jobId, final String algorithmType, final Properties algorithmProps) throws SQLException { + containerComposer.proxyExecuteWithLog(buildConsistencyCheckDistSQL(jobId, algorithmType, algorithmProps), 0); // TODO Need to add after the stop then to start, can continue the consistency check from the previous progress List> resultList = Collections.emptyList(); for (int i = 0; i < 30; i++) { @@ -160,7 +165,7 @@ protected void assertCheckMigrationSuccess(final PipelineContainerComposer conta continue; } List checkEndTimeList = resultList.stream().map(map -> map.get("check_end_time").toString()).filter(each -> !Strings.isNullOrEmpty(each)).collect(Collectors.toList()); - Set finishedPercentages = resultList.stream().map(map -> map.get("finished_percentage").toString()).collect(Collectors.toSet()); + Set finishedPercentages = resultList.stream().map(map -> map.get("inventory_finished_percentage").toString()).collect(Collectors.toSet()); if (checkEndTimeList.size() == resultList.size() && 1 == finishedPercentages.size() && finishedPercentages.contains("100")) { break; } else { @@ -171,7 +176,17 @@ protected void assertCheckMigrationSuccess(final PipelineContainerComposer conta assertFalse(resultList.isEmpty()); for (Map each : resultList) { assertTrue(Boolean.parseBoolean(each.get("result").toString()), String.format("%s check result is false", each.get("tables"))); - assertThat("finished_percentage is not 100", each.get("finished_percentage").toString(), is("100")); + assertThat("inventory_finished_percentage is not 100", each.get("inventory_finished_percentage").toString(), is("100")); + } + } + + private String buildConsistencyCheckDistSQL(final String jobId, final String algorithmType, final Properties algorithmProps) { + if (null == algorithmProps || algorithmProps.isEmpty()) { + return String.format("CHECK MIGRATION '%s' BY TYPE (NAME='%s')", jobId, algorithmType); } + String sql = "CHECK MIGRATION '%s' BY TYPE (NAME='%s', PROPERTIES(" + + algorithmProps.entrySet().stream().map(entry -> String.format("'%s'='%s'", entry.getKey(), entry.getValue())).collect(Collectors.joining(",")) + + "))"; + return String.format(sql, jobId, algorithmType); } } diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/general/MySQLMigrationGeneralE2EIT.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/general/MySQLMigrationGeneralE2EIT.java index 37f9b4c62948b..e48d3d4c57287 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/general/MySQLMigrationGeneralE2EIT.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/general/MySQLMigrationGeneralE2EIT.java @@ -43,6 +43,7 @@ import java.time.LocalDateTime; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.concurrent.TimeUnit; import static org.hamcrest.CoreMatchers.is; @@ -83,31 +84,34 @@ void assertMigrationSuccess(final PipelineTestParameter testParam) throws SQLExc new E2EIncrementalTask(containerComposer.getSourceDataSource(), SOURCE_TABLE_NAME, new SnowflakeKeyGenerateAlgorithm(), containerComposer.getDatabaseType(), 30)); TimeUnit.SECONDS.timedJoin(containerComposer.getIncreaseTaskThread(), 30); containerComposer.sourceExecuteWithLog(String.format("INSERT INTO %s (order_id, user_id, status) VALUES (10000, 1, 'OK')", SOURCE_TABLE_NAME)); + containerComposer.sourceExecuteWithLog("INSERT INTO t_order_item (item_id, order_id, user_id, status) VALUES (10000, 10000, 1, 'OK')"); stopMigrationByJobId(containerComposer, orderJobId); startMigrationByJobId(containerComposer, orderJobId); DataSource jdbcDataSource = containerComposer.generateShardingSphereDataSourceFromProxy(); containerComposer.assertOrderRecordExist(jdbcDataSource, "t_order", 10000); - assertMigrationSuccessById(containerComposer, orderJobId, "DATA_MATCH"); + containerComposer.assertOrderRecordExist(jdbcDataSource, "t_order_item", 10000); + Properties algorithmProps = new Properties(); + algorithmProps.setProperty("chunk-size", "300"); + assertMigrationSuccessById(containerComposer, orderJobId, "DATA_MATCH", algorithmProps); String orderItemJobId = getJobIdByTableName(containerComposer, "ds_0.t_order_item"); - assertMigrationSuccessById(containerComposer, orderItemJobId, "DATA_MATCH"); + assertMigrationSuccessById(containerComposer, orderItemJobId, "DATA_MATCH", algorithmProps); Awaitility.await().pollDelay(2L, TimeUnit.SECONDS).until(() -> true); - assertMigrationSuccessById(containerComposer, orderItemJobId, "CRC32_MATCH"); + assertMigrationSuccessById(containerComposer, orderItemJobId, "CRC32_MATCH", new Properties()); for (String each : listJobId(containerComposer)) { commitMigrationByJobId(containerComposer, each); } - List lastJobIds = listJobId(containerComposer); - assertTrue(lastJobIds.isEmpty()); + assertTrue(listJobId(containerComposer).isEmpty()); containerComposer.assertGreaterThanOrderTableInitRows(jdbcDataSource, PipelineContainerComposer.TABLE_INIT_ROW_COUNT, ""); } } - private void assertMigrationSuccessById(final PipelineContainerComposer containerComposer, final String jobId, final String algorithmType) throws SQLException { + private void assertMigrationSuccessById(final PipelineContainerComposer containerComposer, final String jobId, final String algorithmType, final Properties algorithmProps) throws SQLException { List> jobStatus = containerComposer.waitIncrementTaskFinished(String.format("SHOW MIGRATION STATUS '%s'", jobId)); for (Map each : jobStatus) { assertTrue(Integer.parseInt(each.get("processed_records_count").toString()) > 0); assertThat(Integer.parseInt(each.get("inventory_finished_percentage").toString()), is(100)); } - assertCheckMigrationSuccess(containerComposer, jobId, algorithmType); + assertCheckMigrationSuccess(containerComposer, jobId, algorithmType, algorithmProps); } private static boolean isEnabled() { diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/primarykey/IndexesMigrationE2EIT.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/primarykey/IndexesMigrationE2EIT.java index 1fadda3e07359..d4b01eee70576 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/primarykey/IndexesMigrationE2EIT.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/migration/primarykey/IndexesMigrationE2EIT.java @@ -20,12 +20,11 @@ import lombok.SneakyThrows; import org.apache.commons.codec.binary.Hex; import org.apache.shardingsphere.data.pipeline.scenario.migration.MigrationJobType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; import org.apache.shardingsphere.infra.database.postgresql.type.PostgreSQLDatabaseType; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.exception.core.external.sql.type.wrapper.SQLWrapperException; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.sharding.algorithm.keygen.SnowflakeKeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.algorithm.keygen.UUIDKeyGenerateAlgorithm; import org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm; import org.apache.shardingsphere.test.e2e.data.pipeline.cases.PipelineContainerComposer; @@ -36,6 +35,7 @@ import org.apache.shardingsphere.test.e2e.data.pipeline.framework.param.PipelineE2ESettings.PipelineE2EDatabaseSettings; import org.apache.shardingsphere.test.e2e.data.pipeline.framework.param.PipelineE2ETestCaseArgumentsProvider; import org.apache.shardingsphere.test.e2e.data.pipeline.framework.param.PipelineTestParameter; +import org.apache.shardingsphere.test.e2e.data.pipeline.util.AutoIncrementKeyGenerateAlgorithm; import org.junit.jupiter.api.condition.EnabledIf; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ArgumentsSource; @@ -193,7 +193,7 @@ void assertMultiUniqueKeyMigrationSuccess(final PipelineTestParameter testParam) } else { return; } - KeyGenerateAlgorithm keyGenerateAlgorithm = new SnowflakeKeyGenerateAlgorithm(); + KeyGenerateAlgorithm keyGenerateAlgorithm = new AutoIncrementKeyGenerateAlgorithm(); Object uniqueKey = keyGenerateAlgorithm.generateKey(); assertMigrationSuccess(containerComposer, sql, "user_id", keyGenerateAlgorithm, consistencyCheckAlgorithmType, dataSource -> { insertOneOrder(containerComposer, uniqueKey); diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/E2EIncrementalTask.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/E2EIncrementalTask.java index d03bb3b7a8652..a79edb85aef06 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/E2EIncrementalTask.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/cases/task/E2EIncrementalTask.java @@ -18,6 +18,7 @@ package org.apache.shardingsphere.test.e2e.data.pipeline.cases.task; import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; @@ -44,6 +45,7 @@ import java.util.List; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; @RequiredArgsConstructor @Slf4j @@ -70,6 +72,7 @@ public final class E2EIncrementalTask extends BaseIncrementTask { private final int loopCount; + @SneakyThrows(InterruptedException.class) @Override public void run() { List orderInsertData = PipelineCaseHelper.generateOrderInsertData(databaseType, primaryKeyGenerateAlgorithm, loopCount); @@ -77,16 +80,19 @@ public void run() { for (Object[] each : orderInsertData) { primaryKeys.add(each[0]); insertOrder(each); + TimeUnit.MILLISECONDS.sleep(100L); } ThreadLocalRandom random = ThreadLocalRandom.current(); for (int i = 0; i < Math.max(1, loopCount / 3); i++) { // TODO 0000-00-00 00:00:00 now will cause consistency check failed of MySQL. // DataSourceUtil.execute(dataSource, String.format("UPDATE %s SET t_datetime='0000-00-00 00:00:00' WHERE order_id = ?", orderTableName) updateOrderById(primaryKeys.get(random.nextInt(0, primaryKeys.size()))); + TimeUnit.MILLISECONDS.sleep(500L); } for (int i = 0; i < Math.max(1, loopCount / 3); i++) { setNullToAllFields(primaryKeys.get(random.nextInt(0, primaryKeys.size()))); deleteOrderById(primaryKeys.remove(random.nextInt(0, primaryKeys.size()))); + TimeUnit.MILLISECONDS.sleep(500L); } log.info("increment task runnable execute successfully."); } diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java index 9a18c3c099ee2..66e0e88f47afe 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/DockerContainerComposer.java @@ -75,8 +75,9 @@ public DockerContainerComposer(final DatabaseType databaseType, final String sto storageContainers.add(storageContainer); } AdaptorContainerConfiguration containerConfig = PipelineProxyClusterContainerConfigurationFactory.newInstance(databaseType); + DatabaseType proxyDatabaseType = databaseType instanceof OracleDatabaseType ? TypedSPILoader.getService(DatabaseType.class, "MySQL") : databaseType; ShardingSphereProxyClusterContainer proxyClusterContainer = (ShardingSphereProxyClusterContainer) AdapterContainerFactory.newInstance( - AdapterMode.CLUSTER, AdapterType.PROXY, databaseType, null, "", containerConfig); + AdapterMode.CLUSTER, AdapterType.PROXY, proxyDatabaseType, null, "", containerConfig); for (DockerStorageContainer each : storageContainers) { proxyClusterContainer.dependsOn(governanceContainer, each); } diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java index 445926387caf1..10a89e4cebf5d 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/compose/NativeContainerComposer.java @@ -79,6 +79,12 @@ public void cleanUpDatabase(final String databaseName) { connection.createStatement().execute("DROP SCHEMA IF EXISTS test;"); } break; + case "Oracle": + jdbcUrl = DataSourceEnvironment.getURL(databaseType, "localhost", actualDatabasePort, ""); + try (Connection connection = DriverManager.getConnection(jdbcUrl, username, password)) { + dropTableWithOracle(connection, databaseName); + } + break; default: } } @@ -101,6 +107,16 @@ private void dropTableWithSchema(final Connection connection, final String schem } } + private void dropTableWithOracle(final Connection connection, final String schema) throws SQLException { + String queryAllTables = String.format("SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = '%s'", schema); + try (ResultSet resultSet = connection.createStatement().executeQuery(String.format(queryAllTables, schema))) { + List actualTableNames = getFirstColumnValueFromResult(resultSet); + for (String each : actualTableNames) { + connection.createStatement().executeUpdate(String.format("DROP TABLE %s.%s", schema, each)); + } + } + } + @Override public String getProxyJdbcUrl(final String databaseName) { if (databaseType instanceof OracleDatabaseType) { diff --git a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/config/proxy/PipelineProxyClusterContainerConfigurationFactory.java b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/config/proxy/PipelineProxyClusterContainerConfigurationFactory.java index 9463ae506b2a7..45edd71b09648 100644 --- a/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/config/proxy/PipelineProxyClusterContainerConfigurationFactory.java +++ b/test/e2e/operation/pipeline/src/test/java/org/apache/shardingsphere/test/e2e/data/pipeline/framework/container/config/proxy/PipelineProxyClusterContainerConfigurationFactory.java @@ -43,7 +43,7 @@ public final class PipelineProxyClusterContainerConfigurationFactory { * @return created instance */ public static AdaptorContainerConfiguration newInstance(final DatabaseType databaseType) { - return new AdaptorContainerConfiguration(getProxyDatasourceName(databaseType), getMountedResource(databaseType), AdapterContainerUtils.getAdapterContainerImage()); + return new AdaptorContainerConfiguration(getProxyDatasourceName(databaseType), getMountedResource(databaseType), AdapterContainerUtils.getAdapterContainerImage(), ""); } private static String getProxyDatasourceName(final DatabaseType databaseType) { diff --git a/test/e2e/operation/pipeline/src/test/resources/env/common/migration-command.xml b/test/e2e/operation/pipeline/src/test/resources/env/common/migration-command.xml index d0d62eb7f7b03..7b455d37552bc 100644 --- a/test/e2e/operation/pipeline/src/test/resources/env/common/migration-command.xml +++ b/test/e2e/operation/pipeline/src/test/resources/env/common/migration-command.xml @@ -37,7 +37,8 @@ REGISTER MIGRATION SOURCE STORAGE UNIT ds_0 ( URL="${ds0}", USER="${user}", - PASSWORD="${password}" + PASSWORD="${password}", + PROPERTIES("maximumPoolSize"="50","idleTimeout"="30000") ); @@ -45,15 +46,18 @@ REGISTER STORAGE UNIT ds_2 ( URL="${ds2}", USER="${user}", - PASSWORD="${password}" + PASSWORD="${password}", + PROPERTIES("maximumPoolSize"="50","idleTimeout"="30000") ),ds_3 ( URL="${ds3}", USER="${user}", - PASSWORD="${password}" + PASSWORD="${password}", + PROPERTIES("maximumPoolSize"="50","idleTimeout"="30000") ),ds_4 ( URL="${ds4}", USER="${user}", - PASSWORD="${password}" + PASSWORD="${password}", + PROPERTIES("maximumPoolSize"="50","idleTimeout"="30000") ) diff --git a/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/mysql.xml b/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/mysql.xml index b1376db0482da..0ba33d184cf90 100644 --- a/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/mysql.xml +++ b/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/mysql.xml @@ -25,12 +25,4 @@ INDEX ( `user_id` ) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; - - - UPDATE t_order SET status= 'unlock' - - - - CREATE index idx_order_status ON t_order (status) - diff --git a/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/postgresql.xml b/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/postgresql.xml index 5136e364fc559..c00b3d80ec0a6 100644 --- a/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/postgresql.xml +++ b/test/e2e/operation/pipeline/src/test/resources/env/scenario/primary_key/text_primary_key/postgresql.xml @@ -23,12 +23,4 @@ PRIMARY KEY (order_id) ) - - - UPDATE t_order SET status= 'unlock' - - - - CREATE INDEX "idx_user_status" ON t_order ( status ) - diff --git a/test/e2e/operation/showprocesslist/src/test/java/org/apache/shardingsphere/test/e2e/showprocesslist/container/composer/ClusterShowProcessListContainerComposer.java b/test/e2e/operation/showprocesslist/src/test/java/org/apache/shardingsphere/test/e2e/showprocesslist/container/composer/ClusterShowProcessListContainerComposer.java index 6183f56bb1a27..253eab64891a9 100644 --- a/test/e2e/operation/showprocesslist/src/test/java/org/apache/shardingsphere/test/e2e/showprocesslist/container/composer/ClusterShowProcessListContainerComposer.java +++ b/test/e2e/operation/showprocesslist/src/test/java/org/apache/shardingsphere/test/e2e/showprocesslist/container/composer/ClusterShowProcessListContainerComposer.java @@ -57,7 +57,8 @@ public ClusterShowProcessListContainerComposer(final ShowProcessListTestParamete StorageContainer storageContainer = containers.registerContainer(StorageContainerFactory.newInstance(testParam.getDatabaseType(), "", StorageContainerConfigurationFactory.newInstance(testParam.getDatabaseType(), testParam.getScenario()))); AdaptorContainerConfiguration containerConfig = new AdaptorContainerConfiguration(testParam.getScenario(), - getMountedResources(testParam.getScenario(), testParam.getDatabaseType(), testParam.getRunMode(), testParam.getGovernanceCenter()), AdapterContainerUtils.getAdapterContainerImage()); + getMountedResources(testParam.getScenario(), testParam.getDatabaseType(), testParam.getRunMode(), testParam.getGovernanceCenter()), AdapterContainerUtils.getAdapterContainerImage(), + ""); jdbcContainer = AdapterContainerFactory.newInstance( AdapterMode.valueOf(testParam.getRunMode().toUpperCase()), AdapterType.JDBC, testParam.getDatabaseType(), storageContainer, testParam.getScenario(), containerConfig); proxyContainer = AdapterContainerFactory.newInstance( diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/alterresource/AddResourceTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/alterresource/AddResourceTestCase.java index 930a1db275b9e..e464224e42a73 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/alterresource/AddResourceTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/alterresource/AddResourceTestCase.java @@ -47,7 +47,7 @@ public void executeTest(final TransactionContainerComposer containerComposer) th private void assertAddResource(final TransactionContainerComposer containerComposer) throws SQLException { try (Connection connection = getDataSource().getConnection()) { - getBaseTransactionITCase().addResource(connection, "transaction_it_2", containerComposer); + getBaseTransactionITCase().addResource(connection, "ds_2", containerComposer); createThreeDataSourceAccountTableRule(connection); reCreateAccountTable(connection); assertRollback(); diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/BroadcastTableTransactionTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/BroadcastTableTransactionTestCase.java index 80cfa396496f3..4f75236166755 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/BroadcastTableTransactionTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/BroadcastTableTransactionTestCase.java @@ -21,6 +21,7 @@ import org.apache.shardingsphere.test.e2e.transaction.cases.base.BaseTransactionTestCase; import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionBaseE2EIT; import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionContainerComposer; +import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionTestCase; import javax.sql.DataSource; import java.sql.Connection; @@ -29,7 +30,7 @@ /** * Broadcast table transaction integration test. */ -// TODO add @TransactionTestCase when migration of broadcast table data completed when adding storage nodes +@TransactionTestCase public final class BroadcastTableTransactionTestCase extends BaseTransactionTestCase { private static final String T_ADDRESS = "t_address"; diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ExceptionInTransactionTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ExceptionInTransactionTestCase.java index 19b8271bddd4b..937d0cf85c2d5 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ExceptionInTransactionTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ExceptionInTransactionTestCase.java @@ -66,17 +66,9 @@ protected void executeTest(final TransactionContainerComposer containerComposer) connection.close(); } } - Thread queryThread = new Thread(() -> { - try (Connection connection2 = getDataSource().getConnection()) { - assertAccountRowCount(connection2, 0); - } catch (final SQLException ignored) { - } - }); - queryThread.start(); - try { - queryThread.join(); - } catch (final InterruptedException ignored) { - Thread.currentThread().interrupt(); + try (Connection connection2 = getDataSource().getConnection()) { + assertAccountRowCount(connection2, 0); + } catch (final SQLException ignored) { } } } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ImplicitCommitTransactionTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ImplicitCommitTransactionTestCase.java new file mode 100644 index 0000000000000..e2b10f49037e9 --- /dev/null +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/commitrollback/ImplicitCommitTransactionTestCase.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.e2e.transaction.cases.commitrollback; + +import org.apache.shardingsphere.test.e2e.transaction.cases.base.BaseTransactionTestCase; +import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionBaseE2EIT; +import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionContainerComposer; +import org.apache.shardingsphere.test.e2e.transaction.engine.base.TransactionTestCase; +import org.apache.shardingsphere.transaction.api.TransactionType; + +import javax.sql.DataSource; +import java.sql.Connection; +import java.sql.SQLException; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Implicit commit transaction integration test. + */ +@TransactionTestCase(transactionTypes = TransactionType.XA) +public final class ImplicitCommitTransactionTestCase extends BaseTransactionTestCase { + + private static final String T_ADDRESS = "t_address"; + + public ImplicitCommitTransactionTestCase(final TransactionBaseE2EIT baseTransactionITCase, final DataSource dataSource) { + super(baseTransactionITCase, dataSource); + } + + @Override + protected void beforeTest() throws SQLException { + super.beforeTest(); + init(); + } + + @Override + protected void afterTest() throws SQLException { + super.afterTest(); + init(); + } + + @Override + protected void executeTest(final TransactionContainerComposer containerComposer) throws SQLException { + assertBroadcastTableImplicitCommit(); + assertShardingTableImplicitCommit(); + } + + private void assertBroadcastTableImplicitCommit() throws SQLException { + try (Connection connection = getDataSource().getConnection()) { + executeWithLog(connection, "INSERT INTO t_address (id, code, address) VALUES (1, '1', 'Nanjing')"); + assertThrows(SQLException.class, () -> executeWithLog(connection, "INSERT INTO t_address (id, code, address) VALUES (1, '1', 'Nanjing')")); + } + try (Connection connection = getDataSource().getConnection()) { + assertTableRowCount(connection, T_ADDRESS, 1); + } + } + + private void assertShardingTableImplicitCommit() throws SQLException { + try (Connection connection = getDataSource().getConnection()) { + executeWithLog(connection, "INSERT INTO account(id, balance, transaction_id) VALUES (1, 1, 1), (2, 2, 2)"); + assertThrows(SQLException.class, () -> executeWithLog(connection, "INSERT INTO account(id, balance, transaction_id) VALUES (1, 1, 1), (2, 2, 2)")); + } + try (Connection connection = getDataSource().getConnection()) { + assertAccountRowCount(connection, 2); + } + } + + private void init() throws SQLException { + try (Connection connection = getDataSource().getConnection()) { + executeWithLog(connection, "DELETE FROM t_address"); + assertTableRowCount(connection, T_ADDRESS, 0); + } + } +} diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/readonly/MySQLSetReadOnlyTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/readonly/MySQLSetReadOnlyTestCase.java index 4bd16dea2b90d..8b09cdc68d328 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/readonly/MySQLSetReadOnlyTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/readonly/MySQLSetReadOnlyTestCase.java @@ -27,6 +27,8 @@ import java.sql.Connection; import java.sql.SQLException; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.fail; /** @@ -47,16 +49,16 @@ public void executeTest(final TransactionContainerComposer containerComposer) th } private void assertSetReadOnly() throws SQLException { - try (Connection connection1 = getDataSource().getConnection()) { - executeUpdateWithLog(connection1, "insert into account(id, balance) values (1, 0), (2, 100);"); + try (Connection connection = getDataSource().getConnection()) { + executeUpdateWithLog(connection, "insert into account(id, balance) values (1, 0), (2, 100);"); } - try (Connection connection2 = getDataSource().getConnection()) { - connection2.setReadOnly(true); - assertQueryBalance(connection2); - executeWithLog(connection2, "update account set balance = 100 where id = 2;"); + try (Connection connection = getDataSource().getConnection()) { + connection.setReadOnly(true); + assertQueryBalance(connection); + executeWithLog(connection, "update account set balance = 100 where id = 2;"); fail("Update ran successfully, should failed."); } catch (final SQLException ex) { - log.info("Update failed for expect."); + assertThat(ex.getMessage(), is("Connection is read-only. Queries leading to data modification are not allowed.")); } } } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/BaseSavePointTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/BaseSavePointTestCase.java index 084e6970a99ba..4d88a180b46ec 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/BaseSavePointTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/BaseSavePointTestCase.java @@ -34,7 +34,7 @@ protected BaseSavePointTestCase(final TransactionBaseE2EIT baseTransactionITCase super(baseTransactionITCase, dataSource); } - void assertRollback2Savepoint() throws SQLException { + void assertRollbackToSavepoint() throws SQLException { try (Connection connection = getDataSource().getConnection()) { connection.setAutoCommit(false); assertAccountRowCount(connection, 0); diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/MySQLSavePointTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/MySQLSavePointTestCase.java index 5ad98bcdd5121..6e79e555b1c5b 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/MySQLSavePointTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/MySQLSavePointTestCase.java @@ -41,7 +41,7 @@ public MySQLSavePointTestCase(final TransactionBaseE2EIT baseTransactionITCase, @Override public void executeTest(final TransactionContainerComposer containerComposer) throws SQLException { - assertRollback2Savepoint(); + assertRollbackToSavepoint(); assertReleaseSavepoint(); assertReleaseSavepointFailure(); } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/OpenGaussSavePointTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/OpenGaussSavePointTestCase.java index 9300e66340761..e982904c28de0 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/OpenGaussSavePointTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/OpenGaussSavePointTestCase.java @@ -44,7 +44,7 @@ public OpenGaussSavePointTestCase(final TransactionBaseE2EIT baseTransactionITCa @Override public void executeTest(final TransactionContainerComposer containerComposer) throws SQLException { - assertRollback2Savepoint(); + assertRollbackToSavepoint(); assertReleaseSavepoint(); assertErrors(); } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/PostgreSQLSavePointTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/PostgreSQLSavePointTestCase.java index 542e81f87fd73..692badc7dca5e 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/PostgreSQLSavePointTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/cases/savepoint/PostgreSQLSavePointTestCase.java @@ -45,7 +45,7 @@ public PostgreSQLSavePointTestCase(final TransactionBaseE2EIT baseTransactionITC @Override public void executeTest(final TransactionContainerComposer containerComposer) throws SQLException { - assertRollback2Savepoint(); + assertRollbackToSavepoint(); assertReleaseSavepoint(); assertErrors(); } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionContainerComposer.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionContainerComposer.java index d173c195342ea..03c0b6388e868 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionContainerComposer.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionContainerComposer.java @@ -31,7 +31,6 @@ import org.apache.shardingsphere.test.e2e.transaction.framework.param.TransactionTestParameter; import javax.sql.DataSource; -import java.util.Map; /** * Transaction container composer. @@ -69,10 +68,6 @@ private ProxyDataSource createProxyDataSource() { private JdbcDataSource createJdbcDataSource() { DockerContainerComposer dockerContainerComposer = (DockerContainerComposer) containerComposer; - DockerStorageContainer storageContainer = dockerContainerComposer.getStorageContainer(); - Map actualDataSourceMap = storageContainer.getActualDataSourceMap(); - actualDataSourceMap.put("ds_0", createDataSource(storageContainer, "transaction_it_0")); - actualDataSourceMap.put("ds_1", createDataSource(storageContainer, "transaction_it_1")); return new JdbcDataSource(dockerContainerComposer); } diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionTestCase.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionTestCase.java index 6949e477eef87..b213564f7ed91 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionTestCase.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/engine/base/TransactionTestCase.java @@ -36,7 +36,7 @@ * * @return database types */ - String[] dbTypes() default {"mysql", "PostgreSQL", "OpenGauss"}; + String[] dbTypes() default {"MySQL", "PostgreSQL", "OpenGauss"}; /** * Specifies which adapters do the case run on. diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/mysql/MySQLContainerConfigurationFactory.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/mysql/MySQLContainerConfigurationFactory.java index c61f0d2aa2c22..fe5aa4ce2a971 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/mysql/MySQLContainerConfigurationFactory.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/mysql/MySQLContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.mysql.type.MySQLDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.MySQLContainer; import org.apache.shardingsphere.test.e2e.env.runtime.scenario.database.DatabaseEnvironmentManager; @@ -60,8 +61,9 @@ private static Map getContainerEnvironments() { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new MySQLDatabaseType()) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new MySQLDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "MySQL")) + "/01-actual-init.sql", + "/docker-entrypoint-initdb.d/01-actual-init.sql"); + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "MySQL")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/mysql/my.cnf", MySQLContainer.MYSQL_CONF_IN_CONTAINER); return result; diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/opengauss/OpenGaussContainerConfigurationFactory.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/opengauss/OpenGaussContainerConfigurationFactory.java index 7b76af4ca94c9..46d856ef16502 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/opengauss/OpenGaussContainerConfigurationFactory.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/opengauss/OpenGaussContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.opengauss.type.OpenGaussDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.constants.StorageContainerConstants; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.OpenGaussContainer; @@ -58,9 +59,9 @@ private static Map getContainerEnvironments() { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(3, 1F); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new OpenGaussDatabaseType()) + "/01-actual-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "openGauss")) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new OpenGaussDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "openGauss")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/postgresql/postgresql.conf", OpenGaussContainer.OPENGAUSS_CONF_IN_CONTAINER); return result; diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/postgresql/PostgreSQLContainerConfigurationFactory.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/postgresql/PostgreSQLContainerConfigurationFactory.java index d1996da480394..ee9964f3e449f 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/postgresql/PostgreSQLContainerConfigurationFactory.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/postgresql/PostgreSQLContainerConfigurationFactory.java @@ -19,7 +19,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.infra.database.postgresql.type.PostgreSQLDatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.e2e.env.container.atomic.constants.StorageContainerConstants; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.config.StorageContainerConfiguration; import org.apache.shardingsphere.test.e2e.env.container.atomic.storage.impl.OpenGaussContainer; @@ -60,9 +61,9 @@ private static Map getContainerEnvironments() { private static Map getMountedResources(final String scenario) { Map result = new HashMap<>(3, 1F); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, new PostgreSQLDatabaseType()) + "/01-actual-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.ACTUAL, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")) + "/01-actual-init.sql", "/docker-entrypoint-initdb.d/01-actual-init.sql"); - result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, new PostgreSQLDatabaseType()) + "/01-expected-init.sql", + result.put(new ScenarioDataPath(scenario).getInitSQLResourcePath(Type.EXPECTED, TypedSPILoader.getService(DatabaseType.class, "PostgreSQL")) + "/01-expected-init.sql", "/docker-entrypoint-initdb.d/01-expected-init.sql"); result.put("/env/postgresql/postgresql.conf", OpenGaussContainer.OPENGAUSS_CONF_IN_CONTAINER); return result; diff --git a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/proxy/ProxyClusterContainerConfigurationFactory.java b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/proxy/ProxyClusterContainerConfigurationFactory.java index e30feb00a7ec3..c5aaabbc1ee52 100644 --- a/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/proxy/ProxyClusterContainerConfigurationFactory.java +++ b/test/e2e/operation/transaction/src/test/java/org/apache/shardingsphere/test/e2e/transaction/framework/container/config/proxy/ProxyClusterContainerConfigurationFactory.java @@ -43,7 +43,8 @@ public final class ProxyClusterContainerConfigurationFactory { * @return created instance */ public static AdaptorContainerConfiguration newInstance(final String scenario, final DatabaseType databaseType) { - return new AdaptorContainerConfiguration(getProxyDatasourceName(databaseType), getMountedResource(scenario, databaseType), AdapterContainerUtils.getAdapterContainerImage()); + String containerCommand = "readwrite-splitting".equals(scenario) ? "-f" : ""; + return new AdaptorContainerConfiguration(getProxyDatasourceName(databaseType), getMountedResource(scenario, databaseType), AdapterContainerUtils.getAdapterContainerImage(), containerCommand); } private static String getProxyDatasourceName(final DatabaseType databaseType) { diff --git a/test/e2e/operation/transaction/src/test/resources/env/it-env.properties b/test/e2e/operation/transaction/src/test/resources/env/it-env.properties index 56401d4fc3c61..71eb6b715c125 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/it-env.properties +++ b/test/e2e/operation/transaction/src/test/resources/env/it-env.properties @@ -17,12 +17,12 @@ # transaction.it.type=NONE,DOCKER,NATIVE transaction.it.env.type=NONE # transaction.it.env.cases=ClassicTransferTestCase, PostgreSQLSavePointTestCase -transaction.it.env.cases=TransactionDeadlockTestCase, MultiJDBCConnectionsTestCase, MultiTransactionInConnectionTestCase, MultiOperationsCommitAndRollbackTestCase, MySQLAutoCommitTestCase, PostgreSQLAutoCommitTestCase, BroadcastTableTransactionTestCase, ExceptionInTransactionTestCase, MultiTableCommitAndRollbackTestCase, SingleTableCommitAndRollbackTestCase, MySQLSetReadOnlyTestCase, MySQLSavePointTestCase, MySQLLocalTruncateTestCase, MySQLXATruncateTestCase, OpenGaussCursorTestCase, NestedTransactionTestCase, ReadwriteSplittingInTransactionTestCase +transaction.it.env.cases=TransactionDeadlockTestCase, MultiJDBCConnectionsTestCase, MultiTransactionInConnectionTestCase, MultiOperationsCommitAndRollbackTestCase, MySQLAutoCommitTestCase, PostgreSQLAutoCommitTestCase, BroadcastTableTransactionTestCase, ExceptionInTransactionTestCase, MultiTableCommitAndRollbackTestCase, SingleTableCommitAndRollbackTestCase, MySQLSetReadOnlyTestCase, MySQLSavePointTestCase, MySQLLocalTruncateTestCase, MySQLXATruncateTestCase, OpenGaussCursorTestCase, NestedTransactionTestCase, ReadwriteSplittingInTransactionTestCase, ImplicitCommitTransactionTestCase # transaction.it.env.transtypes=LOCAL, XA, BASE transaction.it.env.transtypes=LOCAL, XA # transaction.it.env.xa.providers=Atomikos, Narayana transaction.it.env.xa.providers=Atomikos, Narayana -# transaction.it.docker.mysql.version=5.7,8.0 +# transaction.it.docker.mysql.version=mysql:5.7,mysql:8.0 transaction.it.docker.mysql.version= # transaction.it.docker.postgresql.version=postgres:10-alpine,postgres:11-alpine,postgres:12-alpine,postgres:13-alpine,postgres:14-alpine transaction.it.docker.postgresql.version= diff --git a/test/e2e/operation/transaction/src/test/resources/env/jdbc/mysql/config-sharding-xa-narayana.yaml b/test/e2e/operation/transaction/src/test/resources/env/jdbc/mysql/config-sharding-xa-narayana.yaml index 7b8f2e0288b77..4aa7346308d08 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/jdbc/mysql/config-sharding-xa-narayana.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/jdbc/mysql/config-sharding-xa-narayana.yaml @@ -100,4 +100,4 @@ transaction: providerType: Narayana props: - sql-show: false + sql-show: true diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/databases.xml b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/databases.xml index 7ba6877df397e..787fa1ce96ed3 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/databases.xml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/databases.xml @@ -17,7 +17,7 @@ --> - transaction_it_0 - transaction_it_1 - transaction_it_2 + ds_0 + ds_1 + ds_2 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/mysql/01-actual-init.sql index 093677cc45c90..5546773f8c403 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/mysql/01-actual-init.sql @@ -26,11 +26,11 @@ -- limitations under the License. -- -DROP DATABASE IF EXISTS transaction_it_0; -CREATE DATABASE transaction_it_0; +DROP DATABASE IF EXISTS ds_0; +CREATE DATABASE ds_0; -DROP DATABASE IF EXISTS transaction_it_1; -CREATE DATABASE transaction_it_1; +DROP DATABASE IF EXISTS ds_1; +CREATE DATABASE ds_1; -DROP DATABASE IF EXISTS transaction_it_2; -CREATE DATABASE transaction_it_2; +DROP DATABASE IF EXISTS ds_2; +CREATE DATABASE ds_2; diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/opengauss/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/opengauss/01-actual-init.sql index 4b7bc98114362..d1e4b5b8163b7 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/opengauss/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/opengauss/01-actual-init.sql @@ -26,19 +26,19 @@ -- limitations under the License. -- -DROP DATABASE IF EXISTS transaction_it_0; -DROP DATABASE IF EXISTS transaction_it_1; -DROP DATABASE IF EXISTS transaction_it_2; +DROP DATABASE IF EXISTS ds_0; +DROP DATABASE IF EXISTS ds_1; +DROP DATABASE IF EXISTS ds_2; -CREATE DATABASE transaction_it_0; -CREATE DATABASE transaction_it_1; -CREATE DATABASE transaction_it_2; +CREATE DATABASE ds_0; +CREATE DATABASE ds_1; +CREATE DATABASE ds_2; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_0 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_1 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_2 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_0 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_1 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_2 TO test_user; -\c transaction_it_0; +\c ds_0; CREATE TABLE "t_product" ("id" INT PRIMARY KEY, "product_id" INT NOT NULL, "address_id" INT, "product_name" varchar, "category_id" INT NOT NULL, "price" numeric NOT NULL, "status" varchar, "creation_date" date); CREATE TABLE "t_order_0" ("id" INT PRIMARY KEY, "order_id" INT, "address_id" INT, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); @@ -78,7 +78,7 @@ INSERT INTO "t_product" ("id", "product_id", "address_id", "product_name", "cate INSERT INTO "t_product" ("id", "product_id", "address_id", "product_name", "category_id", "price", "status", "creation_date") VALUES (4, 4, 4, 'product4', 4, 4, 'OK', '2022-11-02'); \c -\c transaction_it_1; +\c ds_1; CREATE TABLE "t_order_0" ("id" INT PRIMARY KEY, "order_id" INT, "address_id" INT, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); CREATE TABLE "t_order_1" ("id" INT PRIMARY KEY, "order_id" INT, "address_id" INT, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/postgresql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/postgresql/01-actual-init.sql index 093677cc45c90..5546773f8c403 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/postgresql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/data/actual/init-sql/postgresql/01-actual-init.sql @@ -26,11 +26,11 @@ -- limitations under the License. -- -DROP DATABASE IF EXISTS transaction_it_0; -CREATE DATABASE transaction_it_0; +DROP DATABASE IF EXISTS ds_0; +CREATE DATABASE ds_0; -DROP DATABASE IF EXISTS transaction_it_1; -CREATE DATABASE transaction_it_1; +DROP DATABASE IF EXISTS ds_1; +CREATE DATABASE ds_1; -DROP DATABASE IF EXISTS transaction_it_2; -CREATE DATABASE transaction_it_2; +DROP DATABASE IF EXISTS ds_2; +CREATE DATABASE ds_2; diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/proxy/conf/opengauss/config-sharding.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/proxy/conf/opengauss/config-sharding.yaml index 2d9bfdb85ff33..424a1cdda23a7 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/proxy/conf/opengauss/config-sharding.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/cursor/proxy/conf/opengauss/config-sharding.yaml @@ -24,7 +24,7 @@ databaseName: sharding_db dataSources: ds_0: - url: jdbc:opengauss://opengauss.cursor.host:5432/transaction_it_0 + url: jdbc:opengauss://opengauss.cursor.host:5432/ds_0 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -33,7 +33,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 ds_1: - url: jdbc:opengauss://opengauss.cursor.host:5432/transaction_it_1 + url: jdbc:opengauss://opengauss.cursor.host:5432/ds_1 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/databases.xml b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/databases.xml index 7ba6877df397e..787fa1ce96ed3 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/databases.xml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/databases.xml @@ -17,7 +17,7 @@ --> - transaction_it_0 - transaction_it_1 - transaction_it_2 + ds_0 + ds_1 + ds_2 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/mysql/01-actual-init.sql index 27efd5aa5a02e..5a65dfb657ffd 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/mysql/01-actual-init.sql @@ -18,26 +18,28 @@ SET character_set_database='utf8'; SET character_set_server='utf8'; -DROP DATABASE IF EXISTS transaction_it_0; -DROP DATABASE IF EXISTS transaction_it_1; -DROP DATABASE IF EXISTS transaction_it_2; +DROP DATABASE IF EXISTS ds_0; +DROP DATABASE IF EXISTS ds_1; +DROP DATABASE IF EXISTS ds_2; -CREATE DATABASE transaction_it_0; -CREATE DATABASE transaction_it_1; -CREATE DATABASE transaction_it_2; +CREATE DATABASE ds_0; +CREATE DATABASE ds_1; +CREATE DATABASE ds_2; -CREATE TABLE transaction_it_0.`t_order_0` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); -CREATE TABLE transaction_it_0.`t_order_1` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); -CREATE TABLE transaction_it_0.`t_order_item_0` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); -CREATE TABLE transaction_it_0.`t_order_item_1` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); -CREATE TABLE transaction_it_0.`account_0`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); -CREATE TABLE transaction_it_0.`account_1`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); -CREATE TABLE transaction_it_0.`t_address` (`id` INT PRIMARY KEY, `code` VARCHAR(36) DEFAULT NULL, `address` VARCHAR(36) DEFAULT NULL); +CREATE TABLE ds_0.`t_order_0` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); +CREATE TABLE ds_0.`t_order_1` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); +CREATE TABLE ds_0.`t_order_item_0` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); +CREATE TABLE ds_0.`t_order_item_1` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); +CREATE TABLE ds_0.`account_0`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); +CREATE TABLE ds_0.`account_1`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); +CREATE TABLE ds_0.`t_address` (`id` INT PRIMARY KEY, `code` VARCHAR(36) DEFAULT NULL, `address` VARCHAR(36) DEFAULT NULL); -CREATE TABLE transaction_it_1.`t_order_0` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); -CREATE TABLE transaction_it_1.`t_order_1` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); -CREATE TABLE transaction_it_1.`t_order_item_0` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); -CREATE TABLE transaction_it_1.`t_order_item_1` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); -CREATE TABLE transaction_it_1.`account_0`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); -CREATE TABLE transaction_it_1.`account_1`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); -CREATE TABLE transaction_it_1.`t_address` (`id` INT PRIMARY KEY, `code` VARCHAR(36) DEFAULT NULL, `address` VARCHAR(36) DEFAULT NULL); +CREATE TABLE ds_1.`t_order_0` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); +CREATE TABLE ds_1.`t_order_1` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); +CREATE TABLE ds_1.`t_order_item_0` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); +CREATE TABLE ds_1.`t_order_item_1` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); +CREATE TABLE ds_1.`account_0`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); +CREATE TABLE ds_1.`account_1`(`id` INT PRIMARY KEY, `balance` FLOAT, `transaction_id` INT); +CREATE TABLE ds_1.`t_address` (`id` INT PRIMARY KEY, `code` VARCHAR(36) DEFAULT NULL, `address` VARCHAR(36) DEFAULT NULL); + +CREATE TABLE ds_2.`t_address` (`id` INT PRIMARY KEY, `code` VARCHAR(36) DEFAULT NULL, `address` VARCHAR(36) DEFAULT NULL); diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/opengauss/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/opengauss/01-actual-init.sql index ec3929070ce10..f48e73cf409fa 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/opengauss/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/opengauss/01-actual-init.sql @@ -15,19 +15,19 @@ -- limitations under the License. -- -DROP DATABASE IF EXISTS transaction_it_0; -DROP DATABASE IF EXISTS transaction_it_1; -DROP DATABASE IF EXISTS transaction_it_2; +DROP DATABASE IF EXISTS ds_0; +DROP DATABASE IF EXISTS ds_1; +DROP DATABASE IF EXISTS ds_2; -CREATE DATABASE transaction_it_0; -CREATE DATABASE transaction_it_1; -CREATE DATABASE transaction_it_2; +CREATE DATABASE ds_0; +CREATE DATABASE ds_1; +CREATE DATABASE ds_2; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_0 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_1 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_2 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_0 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_1 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_2 TO test_user; -\c transaction_it_0; +\c ds_0; CREATE TABLE "t_order_0" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); CREATE TABLE "t_order_1" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); @@ -37,7 +37,7 @@ CREATE TABLE "account_0"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" CREATE TABLE "account_1"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); -\c transaction_it_1; +\c ds_1; CREATE TABLE "t_order_0" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); CREATE TABLE "t_order_1" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); @@ -46,3 +46,6 @@ CREATE TABLE "t_order_item_1" ("item_id" INT PRIMARY KEY, "order_id" int NOT NUL CREATE TABLE "account_0"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "account_1"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); + +\c ds_2; +CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/postgresql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/postgresql/01-actual-init.sql index ec3929070ce10..f48e73cf409fa 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/postgresql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/data/actual/init-sql/postgresql/01-actual-init.sql @@ -15,19 +15,19 @@ -- limitations under the License. -- -DROP DATABASE IF EXISTS transaction_it_0; -DROP DATABASE IF EXISTS transaction_it_1; -DROP DATABASE IF EXISTS transaction_it_2; +DROP DATABASE IF EXISTS ds_0; +DROP DATABASE IF EXISTS ds_1; +DROP DATABASE IF EXISTS ds_2; -CREATE DATABASE transaction_it_0; -CREATE DATABASE transaction_it_1; -CREATE DATABASE transaction_it_2; +CREATE DATABASE ds_0; +CREATE DATABASE ds_1; +CREATE DATABASE ds_2; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_0 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_1 TO test_user; -GRANT ALL PRIVILEGES ON DATABASE transaction_it_2 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_0 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_1 TO test_user; +GRANT ALL PRIVILEGES ON DATABASE ds_2 TO test_user; -\c transaction_it_0; +\c ds_0; CREATE TABLE "t_order_0" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); CREATE TABLE "t_order_1" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); @@ -37,7 +37,7 @@ CREATE TABLE "account_0"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" CREATE TABLE "account_1"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); -\c transaction_it_1; +\c ds_1; CREATE TABLE "t_order_0" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); CREATE TABLE "t_order_1" ("order_id" INT PRIMARY KEY, "user_id" INT NOT NULL, "status" VARCHAR(45) NULL); @@ -46,3 +46,6 @@ CREATE TABLE "t_order_item_1" ("item_id" INT PRIMARY KEY, "order_id" int NOT NUL CREATE TABLE "account_0"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "account_1"("id" INT PRIMARY KEY, "balance" FLOAT, "transaction_id" INT); CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); + +\c ds_2; +CREATE TABLE "t_address" ("id" INT PRIMARY KEY, "code" VARCHAR(36) DEFAULT NULL, "address" VARCHAR(36) DEFAULT NULL); diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/mysql/config-sharding.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/mysql/config-sharding.yaml index b3023bcbd46cf..04220b627177b 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/mysql/config-sharding.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/mysql/config-sharding.yaml @@ -24,7 +24,7 @@ databaseName: sharding_db dataSources: ds_0: - url: jdbc:mysql://mysql.default.host:3306/transaction_it_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.default.host:3306/ds_0?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -33,7 +33,7 @@ dataSources: maxPoolSize: 4 minPoolSize: 2 ds_1: - url: jdbc:mysql://mysql.default.host:3306/transaction_it_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + url: jdbc:mysql://mysql.default.host:3306/ds_1?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/opengauss/config-sharding.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/opengauss/config-sharding.yaml index 8c3956bf75464..5ef7bd85b65be 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/opengauss/config-sharding.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/opengauss/config-sharding.yaml @@ -24,7 +24,7 @@ databaseName: sharding_db dataSources: ds_0: - url: jdbc:opengauss://opengauss.default.host:5432/transaction_it_0 + url: jdbc:opengauss://opengauss.default.host:5432/ds_0 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -33,7 +33,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 ds_1: - url: jdbc:opengauss://opengauss.default.host:5432/transaction_it_1 + url: jdbc:opengauss://opengauss.default.host:5432/ds_1 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/postgresql/config-sharding.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/postgresql/config-sharding.yaml index 8ac53f700db00..f1189a1c136a6 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/postgresql/config-sharding.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/default/proxy/conf/postgresql/config-sharding.yaml @@ -24,7 +24,7 @@ databaseName: sharding_db dataSources: ds_0: - url: jdbc:postgresql://postgresql.default.host:5432/transaction_it_0 + url: jdbc:postgresql://postgresql.default.host:5432/ds_0 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 @@ -33,7 +33,7 @@ dataSources: maxPoolSize: 2 minPoolSize: 2 ds_1: - url: jdbc:postgresql://postgresql.default.host:5432/transaction_it_1 + url: jdbc:postgresql://postgresql.default.host:5432/ds_1 username: test_user password: Test@123 connectionTimeoutMilliseconds: 30000 diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/mysql/01-actual-init.sql index e70927b83fc20..989fd8ffceaaa 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/mysql/01-actual-init.sql @@ -21,10 +21,12 @@ SET character_set_server='utf8'; DROP DATABASE IF EXISTS write_ds; DROP DATABASE IF EXISTS read_ds_0; DROP DATABASE IF EXISTS read_ds_1; +DROP DATABASE IF EXISTS read_ds_error; CREATE DATABASE write_ds; CREATE DATABASE read_ds_0; CREATE DATABASE read_ds_1; +CREATE DATABASE read_ds_error; CREATE TABLE write_ds.`t_order` (`order_id` INT PRIMARY KEY, `user_id` INT NOT NULL, `status` VARCHAR(45) NULL); CREATE TABLE write_ds.`t_order_item` (`item_id` INT PRIMARY KEY, `order_id` int NOT NULL, `user_id` int NOT NULL, `status` varchar(50) DEFAULT NULL); diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/opengauss/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/opengauss/01-actual-init.sql index 4d3b9cf448799..62a48f4f166c1 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/opengauss/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/opengauss/01-actual-init.sql @@ -18,10 +18,12 @@ DROP DATABASE IF EXISTS write_ds; DROP DATABASE IF EXISTS read_ds_0; DROP DATABASE IF EXISTS read_ds_1; +DROP DATABASE IF EXISTS read_ds_error; CREATE DATABASE write_ds; CREATE DATABASE read_ds_0; CREATE DATABASE read_ds_1; +CREATE DATABASE read_ds_error; GRANT ALL PRIVILEGES ON DATABASE write_ds TO test_user; GRANT ALL PRIVILEGES ON DATABASE read_ds_0 TO test_user; diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/postgresql/01-actual-init.sql b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/postgresql/01-actual-init.sql index 4d3b9cf448799..62a48f4f166c1 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/postgresql/01-actual-init.sql +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/data/actual/init-sql/postgresql/01-actual-init.sql @@ -18,10 +18,12 @@ DROP DATABASE IF EXISTS write_ds; DROP DATABASE IF EXISTS read_ds_0; DROP DATABASE IF EXISTS read_ds_1; +DROP DATABASE IF EXISTS read_ds_error; CREATE DATABASE write_ds; CREATE DATABASE read_ds_0; CREATE DATABASE read_ds_1; +CREATE DATABASE read_ds_error; GRANT ALL PRIVILEGES ON DATABASE write_ds TO test_user; GRANT ALL PRIVILEGES ON DATABASE read_ds_0 TO test_user; diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/mysql/config-readwrite-splitting.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/mysql/config-readwrite-splitting.yaml index 66b0026b5ce16..c4e830ceb6673 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/mysql/config-readwrite-splitting.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/mysql/config-readwrite-splitting.yaml @@ -50,6 +50,16 @@ dataSources: maxLifetimeMilliseconds: 1800000 maxPoolSize: 2 minPoolSize: 2 + read_ds_error: + url: jdbc:mysql://mysql.readwrite-splitting.host:3306/read_ds_error?serverTimezone=UTC&useSSL=false&characterEncoding=utf-8 + username: test_user + password: wrong_password + connectionTimeoutMilliseconds: 5000 + idleTimeoutMilliseconds: 60000 + maxLifetimeMilliseconds: 1800000 + maxPoolSize: 1 + minPoolSize: 1 + rules: - !SINGLE tables: diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/opengauss/config-readwrite-splitting.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/opengauss/config-readwrite-splitting.yaml index 9d3151e0e5d84..585ad175f8fc7 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/opengauss/config-readwrite-splitting.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/opengauss/config-readwrite-splitting.yaml @@ -50,6 +50,16 @@ dataSources: maxLifetimeMilliseconds: 1800000 maxPoolSize: 2 minPoolSize: 2 + read_ds_error: + url: jdbc:opengauss://opengauss.default.host:5432/read_ds_error + username: test_user + password: wrong_password + connectionTimeoutMilliseconds: 5000 + idleTimeoutMilliseconds: 60000 + maxLifetimeMilliseconds: 1800000 + maxPoolSize: 1 + minPoolSize: 1 + rules: - !SINGLE tables: diff --git a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/postgresql/config-readwrite-splitting.yaml b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/postgresql/config-readwrite-splitting.yaml index 0b1319f303527..961509c274f11 100644 --- a/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/postgresql/config-readwrite-splitting.yaml +++ b/test/e2e/operation/transaction/src/test/resources/env/scenario/readwrite-splitting/proxy/conf/postgresql/config-readwrite-splitting.yaml @@ -50,6 +50,16 @@ dataSources: maxLifetimeMilliseconds: 1800000 maxPoolSize: 2 minPoolSize: 2 + read_ds_error: + url: jdbc:postgresql://postgresql.default.host:5432/read_ds_error + username: test_user + password: wrong_password + connectionTimeoutMilliseconds: 5000 + idleTimeoutMilliseconds: 60000 + maxLifetimeMilliseconds: 1800000 + maxPoolSize: 1 + minPoolSize: 1 + rules: - !SINGLE tables: diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/cases/value/SQLValue.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/cases/value/SQLValue.java index 7089c608a6b7e..667f11e32ceb0 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/cases/value/SQLValue.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/cases/value/SQLValue.java @@ -46,6 +46,8 @@ public final class SQLValue { private final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss"); + private final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + private final DateTimeFormatter timestampFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S"); public SQLValue(final String value, final String type, final int index) { @@ -84,8 +86,12 @@ private Object getValue(final String value, final String type) { case "boolean": return Boolean.parseBoolean(value); case "Date": - case "datetime": return Date.valueOf(LocalDate.parse(value, dateFormatter)); + case "datetime": + if (10 == value.length()) { + return Date.valueOf(LocalDate.parse(value, dateFormatter)); + } + return Date.valueOf(LocalDate.parse(value, dateTimeFormatter)); case "time": return Time.valueOf(LocalTime.parse(value, timeFormatter)); case "timestamp": diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyClusterContainerConfigurationFactory.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyClusterContainerConfigurationFactory.java index fde49e59bf45d..3043d48353836 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyClusterContainerConfigurationFactory.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyClusterContainerConfigurationFactory.java @@ -42,7 +42,7 @@ public final class ProxyClusterContainerConfigurationFactory { * @return created instance */ public static AdaptorContainerConfiguration newInstance(final String scenario, final DatabaseType databaseType, final String adapterContainerImage) { - return new AdaptorContainerConfiguration(scenario, getMountedResources(scenario, databaseType), adapterContainerImage); + return new AdaptorContainerConfiguration(scenario, getMountedResources(scenario, databaseType), adapterContainerImage, ""); } private static Map getMountedResources(final String scenario, final DatabaseType databaseType) { diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyStandaloneContainerConfigurationFactory.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyStandaloneContainerConfigurationFactory.java index f34859e4a13dd..ccd889e380869 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyStandaloneContainerConfigurationFactory.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/container/config/ProxyStandaloneContainerConfigurationFactory.java @@ -42,7 +42,7 @@ public final class ProxyStandaloneContainerConfigurationFactory { * @return created instance */ public static AdaptorContainerConfiguration newInstance(final String scenario, final DatabaseType databaseType) { - return new AdaptorContainerConfiguration(scenario, getMountedResources(scenario, databaseType), AdapterContainerUtils.getAdapterContainerImage()); + return new AdaptorContainerConfiguration(scenario, getMountedResources(scenario, databaseType), AdapterContainerUtils.getAdapterContainerImage(), ""); } private static Map getMountedResources(final String scenario, final DatabaseType databaseType) { diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dml/BaseDMLE2EIT.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dml/BaseDMLE2EIT.java index 6473263866ca5..e1a71a5cf6db2 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dml/BaseDMLE2EIT.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dml/BaseDMLE2EIT.java @@ -86,9 +86,13 @@ void tearDown() { } protected final void assertDataSet(final AssertionTestParameter testParam, final SingleE2EContainerComposer containerComposer, final int actualUpdateCount) throws SQLException { - assertThat("Only support single table for DML.", containerComposer.getDataSet().getMetaDataList().size(), is(1)); assertThat(actualUpdateCount, is(containerComposer.getDataSet().getUpdateCount())); - DataSetMetaData expectedDataSetMetaData = containerComposer.getDataSet().getMetaDataList().get(0); + for (DataSetMetaData each : containerComposer.getDataSet().getMetaDataList()) { + assertDataSet(testParam, containerComposer, each); + } + } + + private void assertDataSet(final AssertionTestParameter testParam, final SingleE2EContainerComposer containerComposer, final DataSetMetaData expectedDataSetMetaData) throws SQLException { for (String each : InlineExpressionParserFactory.newInstance().splitAndEvaluate(expectedDataSetMetaData.getDataNodes())) { DataNode dataNode = new DataNode(each); DataSource dataSource = containerComposer.getActualDataSourceMap().get(dataNode.getDataSourceName()); @@ -148,7 +152,11 @@ private void assertValue(final AssertionTestParameter testParam, final ResultSet } else if (Arrays.asList(Types.TIME, Types.TIME_WITH_TIMEZONE).contains(actual.getMetaData().getColumnType(columnIndex))) { assertThat(timeFormatter.format(actual.getTime(columnIndex).toLocalTime()), is(expected)); } else if (Arrays.asList(Types.TIMESTAMP, Types.TIMESTAMP_WITH_TIMEZONE).contains(actual.getMetaData().getColumnType(columnIndex))) { - assertThat(timestampFormatter.format(actual.getTimestamp(columnIndex).toLocalDateTime()), is(expected)); + if ("Oracle".equals(testParam.getDatabaseType().getType()) && "DATE".equalsIgnoreCase(actual.getMetaData().getColumnTypeName(columnIndex))) { + assertThat(dateFormatter.format(actual.getDate(columnIndex).toLocalDate()), is(expected)); + } else { + assertThat(timestampFormatter.format(actual.getTimestamp(columnIndex).toLocalDateTime()), is(expected)); + } } else if (Types.CHAR == actual.getMetaData().getColumnType(columnIndex) && ("PostgreSQL".equals(testParam.getDatabaseType().getType()) || "openGauss".equals(testParam.getDatabaseType().getType()) || "Oracle".equals(testParam.getDatabaseType().getType()))) { diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dql/BaseDQLE2EIT.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dql/BaseDQLE2EIT.java index 2879c0e21fe32..fd673015fb067 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dql/BaseDQLE2EIT.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/engine/type/dql/BaseDQLE2EIT.java @@ -35,6 +35,8 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; @@ -191,6 +193,9 @@ private void assertRow(final ResultSet actualResultSet, final ResultSetMetaData Object expectedValue = expectedResultSet.getObject(i + 1); if (actualValue instanceof Double || actualValue instanceof Float || actualValue instanceof BigDecimal) { assertThat(Math.floor(Double.parseDouble(actualValue.toString())), is(Math.floor(Double.parseDouble(expectedValue.toString())))); + } else if (actualValue instanceof Timestamp && expectedValue instanceof LocalDateTime) { + // TODO Since mysql 8.0.23, for the DATETIME type, the mysql driver returns the LocalDateTime type, but the proxy returns the Timestamp type. + assertThat(((Timestamp) actualValue).toLocalDateTime(), is(expectedValue)); } else { assertThat(String.valueOf(actualValue), is(String.valueOf(expectedValue))); } diff --git a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/env/DataSetEnvironmentManager.java b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/env/DataSetEnvironmentManager.java index 57b178311522e..a611f55a036cf 100644 --- a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/env/DataSetEnvironmentManager.java +++ b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/env/DataSetEnvironmentManager.java @@ -60,7 +60,7 @@ public final class DataSetEnvironmentManager { // TODO ExecutorEngine.execute and callback - private static final ExecutorServiceManager EXECUTOR_SERVICE_MANAGER = ExecutorEngine.createExecutorEngineWithCPU().getExecutorServiceManager(); + private static final ExecutorServiceManager EXECUTOR_SERVICE_MANAGER = ExecutorEngine.createExecutorEngineWithSize(Runtime.getRuntime().availableProcessors() * 2 - 1).getExecutorServiceManager(); private static final String DATA_COLUMN_DELIMITER = ", "; @@ -98,7 +98,7 @@ public void fillData() { fillDataTasks.add(new InsertTask(dataSourceMap.get(dataNode.getDataSourceName()), insertSQL, sqlValueGroups)); } final List> futures = EXECUTOR_SERVICE_MANAGER.getExecutorService().invokeAll(fillDataTasks); - for (final Future future : futures) { + for (Future future : futures) { future.get(); } } @@ -146,8 +146,8 @@ public void cleanData() { for (Entry> entry : getDataNodeMap().entrySet()) { deleteTasks.add(new DeleteTask(dataSourceMap.get(entry.getKey()), entry.getValue())); } - final List> futures = EXECUTOR_SERVICE_MANAGER.getExecutorService().invokeAll(deleteTasks); - for (final Future future : futures) { + List> futures = EXECUTOR_SERVICE_MANAGER.getExecutorService().invokeAll(deleteTasks); + for (Future future : futures) { future.get(); } } @@ -220,7 +220,7 @@ public Void call() throws SQLException { for (String each : tableNames) { DatabaseType databaseType = DatabaseTypeFactory.get(connection.getMetaData().getURL()); DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData(); - try (PreparedStatement preparedStatement = connection.prepareStatement(String.format("DELETE FROM %s", dialectDatabaseMetaData.getQuoteCharacter().wrap(each)))) { + try (PreparedStatement preparedStatement = connection.prepareStatement(String.format("TRUNCATE TABLE %s", dialectDatabaseMetaData.getQuoteCharacter().wrap(each)))) { preparedStatement.execute(); } } diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-alter-table.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-alter-table.xml index f778fd73b2d6f..8b8d8c0c7f11e 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-alter-table.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-alter-table.xml @@ -17,28 +17,28 @@ --> - + - + - + - + diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-index.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-index.xml index e734412d3972e..74e191da4f25f 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-index.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-index.xml @@ -17,7 +17,7 @@ --> - + @@ -45,7 +45,7 @@ - + diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-table.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-table.xml index 4452634d99db5..e0b3134714058 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-table.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-create-table.xml @@ -17,7 +17,7 @@ --> - + diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-index.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-index.xml index c643b62bc0360..feb267d9932a8 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-index.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-index.xml @@ -45,14 +45,14 @@ - + - + diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-table.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-table.xml index c44adb0159557..390e75ee9707a 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-table.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-drop-table.xml @@ -17,7 +17,7 @@ --> - + diff --git a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-truncate.xml b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-truncate.xml index 2778abbb70c0b..241c12f88adb6 100644 --- a/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-truncate.xml +++ b/test/e2e/sql/src/test/resources/cases/ddl/ddl-integration-truncate.xml @@ -17,14 +17,14 @@ --> - + - + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_with_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_with_auto_generate_key_column.xml new file mode 100644 index 0000000000000..f775e4e2e7a34 --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_with_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column.xml new file mode 100644 index 0000000000000..f775e4e2e7a34 --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_broadcast_rules.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml similarity index 89% rename from test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_broadcast_rules.xml rename to test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml index 2bd51c333760a..36b575dba9d02 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_broadcast_rules.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/db/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml @@ -17,6 +17,8 @@ - + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_with_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_with_auto_generate_key_column.xml new file mode 100644 index 0000000000000..ff37134dd090a --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_with_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column.xml new file mode 100644 index 0000000000000..ff37134dd090a --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_heartbeats.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml similarity index 89% rename from test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_heartbeats.xml rename to test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml index c6634b7b3b2c6..36b575dba9d02 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_heartbeats.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/dbtbl_with_readwrite_splitting/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml @@ -17,7 +17,8 @@ - - + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table.xml index 0f7b3818e3ec1..c9d0c408b9b09 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table.xml @@ -22,8 +22,8 @@ - - + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table.xml index 51c6f01f8a44f..99cd87c70e00a 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table.xml @@ -22,8 +22,8 @@ - - + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table.xml index cebea5bbaa57c..fa6008618e056 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table.xml @@ -22,8 +22,8 @@ - - + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_by_pwd.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_by_pwd.xml index 23c8099caffcf..6c5117913ab49 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_by_pwd.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_by_pwd.xml @@ -22,8 +22,8 @@ - - + + diff --git a/test/e2e/sql/src/test/resources/cases/dal/dataset/sharding_governance/mysql/show_databases.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/insert_date_values.xml similarity index 66% rename from test/e2e/sql/src/test/resources/cases/dal/dataset/sharding_governance/mysql/show_databases.xml rename to test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/insert_date_values.xml index 1d98c0a464779..bdea0fbb7d3e8 100644 --- a/test/e2e/sql/src/test/resources/cases/dal/dataset/sharding_governance/mysql/show_databases.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/insert_date_values.xml @@ -15,14 +15,12 @@ ~ limitations under the License. --> - - - + + + + + - - - - - - + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_single_table.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/update_date_values.xml similarity index 71% rename from test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_single_table.xml rename to test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/update_date_values.xml index e1101e51250fe..e75a6aeab4130 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_single_table.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/passthrough/update_date_values.xml @@ -15,10 +15,11 @@ ~ limitations under the License. --> - - - - + + + + + - + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_with_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_with_auto_generate_key_column.xml new file mode 100644 index 0000000000000..667906f34b154 --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_with_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column.xml new file mode 100644 index 0000000000000..667906f34b154 --- /dev/null +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_types.xml b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml similarity index 89% rename from test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_types.xml rename to test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml index 8af51b212cca3..36b575dba9d02 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_types.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dataset/tbl/mysql/batch_insert_without_auto_generate_key_column_generated_key_data.xml @@ -17,8 +17,8 @@ - - - + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-delete.xml b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-delete.xml index a40382070ae89..937057bb0af44 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-delete.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-delete.xml @@ -17,11 +17,11 @@ --> - + - + @@ -38,16 +38,16 @@ - + - + - + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-insert.xml b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-insert.xml index 6baa6f398e666..a922c24f81984 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-insert.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-insert.xml @@ -17,18 +17,18 @@ --> - + - + - + @@ -37,37 +37,37 @@ - + - + - + - + - + - + - + @@ -108,15 +108,15 @@ - + - + - + @@ -136,8 +136,16 @@ - - + + + + + + + + + + @@ -211,6 +219,10 @@ + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-update.xml b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-update.xml index dcef6d5e8e971..d1345c0073619 100644 --- a/test/e2e/sql/src/test/resources/cases/dml/dml-integration-update.xml +++ b/test/e2e/sql/src/test/resources/cases/dml/dml-integration-update.xml @@ -17,31 +17,31 @@ --> - + - + - + - + - + - + - + @@ -102,4 +102,8 @@ + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select-sub-query.xml b/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select-sub-query.xml index 30f31e54e0cb1..9749b6297bbc1 100644 --- a/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select-sub-query.xml +++ b/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select-sub-query.xml @@ -72,6 +72,11 @@ + + + + diff --git a/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select.xml b/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select.xml index 2422e36649e09..b37cc0bacc654 100644 --- a/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select.xml +++ b/test/e2e/sql/src/test/resources/cases/dql/dql-integration-select.xml @@ -241,6 +241,11 @@ + + + + diff --git a/test/e2e/sql/src/test/resources/cases/ral/dataset/empty_rules/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/ral/dataset/empty_rules/mysql/show_storage_units.xml index ad81784142282..1726532a2f06c 100644 --- a/test/e2e/sql/src/test/resources/cases/ral/dataset/empty_rules/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/ral/dataset/empty_rules/mysql/show_storage_units.xml @@ -30,25 +30,25 @@ - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/ral/ral-integration-show.xml b/test/e2e/sql/src/test/resources/cases/ral/ral-integration-show.xml index 586591b5b41db..8573270398270 100644 --- a/test/e2e/sql/src/test/resources/cases/ral/ral-integration-show.xml +++ b/test/e2e/sql/src/test/resources/cases/ral/ral-integration-show.xml @@ -40,11 +40,11 @@ - + - + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/db/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/db/mysql/show_storage_units.xml index d671a1f2e511c..a288cf4bf6dca 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/db/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/db/mysql/show_storage_units.xml @@ -30,14 +30,14 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting/mysql/show_storage_units.xml index 007441d6e7aee..9dd4e5f876db8 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting/mysql/show_storage_units.xml @@ -30,24 +30,24 @@ - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting_and_encrypt/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting_and_encrypt/mysql/show_storage_units.xml index 4da3fb386da16..aed4c1331a3d7 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting_and_encrypt/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/dbtbl_with_readwrite_splitting_and_encrypt/mysql/show_storage_units.xml @@ -30,24 +30,24 @@ - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/mysql/show_storage_units.xml index 5bd321913fe44..4d0a8d294d928 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/mysql/show_storage_units.xml @@ -30,5 +30,5 @@ - + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rule.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rule.xml index a142eba7481ac..b1b7460494b57 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rule.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rule.xml @@ -32,5 +32,5 @@ - + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rules.xml index 2fc8373933730..488e8d0e859ef 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rules.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt/show_encrypt_rules.xml @@ -32,9 +32,9 @@ - + - + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt_and_readwrite_splitting/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt_and_readwrite_splitting/mysql/show_storage_units.xml index 17a72dba45d6e..358d937e1b59b 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt_and_readwrite_splitting/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/encrypt_and_readwrite_splitting/mysql/show_storage_units.xml @@ -30,6 +30,6 @@ - - + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/readwrite_splitting/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/readwrite_splitting/mysql/show_storage_units.xml index 144bae9a3114e..7f3e41dbffc8e 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/readwrite_splitting/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/readwrite_splitting/mysql/show_storage_units.xml @@ -30,7 +30,7 @@ - - - + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_and_encrypt/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_and_encrypt/mysql/show_storage_units.xml index 311508a5540eb..44aa27da856e1 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_and_encrypt/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_and_encrypt/mysql/show_storage_units.xml @@ -30,14 +30,14 @@ - - - - - - - - - - + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_default_sharding_strategy.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_default_sharding_strategy.xml deleted file mode 100644 index 0bbc14403db9d..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_default_sharding_strategy.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_rules.xml deleted file mode 100644 index 7d98737ac413d..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_discovery_rules.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_resources.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_resources.xml deleted file mode 100644 index 08bb594f641b5..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_resources.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_algorithms.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_algorithms.xml deleted file mode 100644 index 8af51b212cca3..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_algorithms.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_auditors.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_auditors.xml deleted file mode 100644 index 8af51b212cca3..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_auditors.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_binging_table_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_binging_table_rules.xml deleted file mode 100644 index 856326ce87cb0..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_binging_table_rules.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_generators.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_generators.xml deleted file mode 100644 index 8af51b212cca3..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_generators.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_scaling_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_scaling_rules.xml deleted file mode 100644 index f0ad119ee0ac4..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_scaling_rules.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_algorithms.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_algorithms.xml deleted file mode 100644 index 280c500503999..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_algorithms.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_auditor.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_auditor.xml deleted file mode 100644 index 280c500503999..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_auditor.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_generator.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_generator.xml deleted file mode 100644 index 280c500503999..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_generator.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_nodes.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_nodes.xml deleted file mode 100644 index 7bc74891b43dd..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_nodes.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rule.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rule.xml deleted file mode 100644 index 73585dd4726fe..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rule.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rules.xml deleted file mode 100644 index adc9031f54002..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_sharding_table_rules.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table.xml deleted file mode 100644 index 5305511fac439..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table_rules.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table_rules.xml deleted file mode 100644 index 03cf946714156..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_table_rules.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_tables.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_tables.xml deleted file mode 100644 index 5305511fac439..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_single_tables.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_algorithms.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_algorithms.xml deleted file mode 100644 index dd6708874040a..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_algorithms.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_auditors.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_auditors.xml deleted file mode 100644 index 8af51b212cca3..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_auditors.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_generators.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_generators.xml deleted file mode 100644 index 8af51b212cca3..0000000000000 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/show_unused_sharding_generators.xml +++ /dev/null @@ -1,24 +0,0 @@ - - - - - - - - - diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/tbl/mysql/show_storage_units.xml b/test/e2e/sql/src/test/resources/cases/rql/dataset/tbl/mysql/show_storage_units.xml index ae81c8e09368a..9d3a10e7baa81 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/tbl/mysql/show_storage_units.xml +++ b/test/e2e/sql/src/test/resources/cases/rql/dataset/tbl/mysql/show_storage_units.xml @@ -30,5 +30,5 @@ - + diff --git a/test/e2e/sql/src/test/resources/env/scenario/db/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/db/data/actual/init-sql/mysql/01-actual-init.sql index ba743d2f80dd7..34cd6cc32ba55 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/db/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/db/data/actual/init-sql/mysql/01-actual-init.sql @@ -46,7 +46,7 @@ CREATE TABLE db_0.t_user (user_id INT PRIMARY KEY, user_name VARCHAR(50) NOT NUL CREATE TABLE db_0.t_product (product_id INT PRIMARY KEY, product_name VARCHAR(50) NOT NULL, category_id INT NOT NULL, price DECIMAL NOT NULL, status VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE db_0.t_product_category (category_id INT PRIMARY KEY, category_name VARCHAR(50) NOT NULL, parent_id INT NOT NULL, level TINYINT NOT NULL, creation_date DATE NOT NULL); CREATE TABLE db_0.t_country (country_id SMALLINT PRIMARY KEY, country_name VARCHAR(50), continent_name VARCHAR(50), creation_date DATE NOT NULL); -CREATE TABLE db_0.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id), UNIQUE KEY (id, status)); +CREATE TABLE db_0.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id), UNIQUE KEY (id, status)); CREATE TABLE db_0.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); CREATE INDEX order_index_t_order ON db_0.t_order (order_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/db/data/expected/init-sql/mysql/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/db/data/expected/init-sql/mysql/01-expected-init.sql index f0b1435515d87..4781aa1f2c9c2 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/db/data/expected/init-sql/mysql/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/db/data/expected/init-sql/mysql/01-expected-init.sql @@ -34,7 +34,7 @@ CREATE VIEW expected_dataset.t_order_subquery_view AS SELECT * FROM expected_dat CREATE VIEW expected_dataset.t_order_aggregation_view AS SELECT MAX(p.price) AS max_price, MIN(p.price) AS min_price, SUM(p.price) AS sum_price, AVG(p.price) AS avg_price, COUNT(1) AS count FROM expected_dataset.t_order o INNER JOIN expected_dataset.t_order_item i ON o.order_id = i.order_id INNER JOIN expected_dataset.t_product p ON i.product_id = p.product_id GROUP BY o.order_id HAVING SUM(p.price) > 10000 ORDER BY max_price; CREATE VIEW expected_dataset.t_order_union_view AS SELECT * FROM expected_dataset.t_order WHERE order_id > 2000 UNION SELECT * FROM expected_dataset.t_order WHERE order_id > 1500; -- TODO replace these tables with standard tables -CREATE TABLE expected_dataset.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id), UNIQUE KEY (id, status)); +CREATE TABLE expected_dataset.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id), UNIQUE KEY (id, status)); CREATE TABLE expected_dataset.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); CREATE INDEX order_index_t_order ON expected_dataset.t_order (order_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/actual/init-sql/mysql/01-actual-init.sql index 3b969e9725e36..d2bee271c09fd 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/actual/init-sql/mysql/01-actual-init.sql @@ -85,7 +85,7 @@ CREATE TABLE write_ds_0.t_product (product_id INT PRIMARY KEY, product_name VARC CREATE TABLE write_ds_0.t_product_category( category_id INT PRIMARY KEY, category_name VARCHAR(50) NOT NULL, parent_id INT NOT NULL, level INT NOT NULL, creation_date DATE NOT NULL); CREATE TABLE write_ds_0.t_country (country_id INT PRIMARY KEY, country_name VARCHAR(50), continent_name VARCHAR(50), creation_date DATE NOT NULL); CREATE TABLE write_ds_0.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); -CREATE TABLE write_ds_0.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE write_ds_0.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE INDEX order_index_t_order_0 ON write_ds_0.t_order_0 (order_id); CREATE INDEX order_index_t_order_1 ON write_ds_0.t_order_1 (order_id); CREATE INDEX order_index_t_order_2 ON write_ds_0.t_order_2 (order_id); @@ -430,7 +430,7 @@ CREATE TABLE read_ds_0.t_product (product_id INT PRIMARY KEY, product_name VARCH CREATE TABLE read_ds_0.t_product_category( category_id INT PRIMARY KEY, category_name VARCHAR(50) NOT NULL, parent_id INT NOT NULL, level INT NOT NULL, creation_date DATE NOT NULL); CREATE TABLE read_ds_0.t_country (country_id INT PRIMARY KEY, country_name VARCHAR(50), continent_name VARCHAR(50), creation_date DATE NOT NULL); CREATE TABLE read_ds_0.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); -CREATE TABLE read_ds_0.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE read_ds_0.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE INDEX order_index_t_order_0 ON read_ds_0.t_order_0 (order_id); CREATE INDEX order_index_t_order_1 ON read_ds_0.t_order_1 (order_id); CREATE INDEX order_index_t_order_2 ON read_ds_0.t_order_2 (order_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/expected/init-sql/mysql/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/expected/init-sql/mysql/01-expected-init.sql index a027cba51a737..844d5eb282b07 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/expected/init-sql/mysql/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/dbtbl_with_readwrite_splitting/data/expected/init-sql/mysql/01-expected-init.sql @@ -30,7 +30,7 @@ CREATE TABLE write_dataset.t_product_detail (detail_id INT PRIMARY KEY, product_ CREATE TABLE write_dataset.t_product_category( category_id INT PRIMARY KEY, category_name VARCHAR(50) NOT NULL, parent_id INT NOT NULL, level INT NOT NULL, creation_date DATE NOT NULL); CREATE TABLE write_dataset.t_country (country_id INT PRIMARY KEY, country_name VARCHAR(50), continent_name VARCHAR(50), creation_date DATE NOT NULL); -- TODO replace these tables with standard tables -CREATE TABLE write_dataset.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE write_dataset.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE TABLE write_dataset.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); CREATE INDEX order_index_t_order ON write_dataset.t_order (order_id); @@ -48,7 +48,7 @@ CREATE TABLE read_dataset.t_product_detail (detail_id INT PRIMARY KEY, product_i CREATE TABLE read_dataset.t_product_category( category_id INT PRIMARY KEY, category_name VARCHAR(50) NOT NULL, parent_id INT NOT NULL, level INT NOT NULL, creation_date DATE NOT NULL); CREATE TABLE read_dataset.t_country (country_id INT PRIMARY KEY, country_name VARCHAR(50), continent_name VARCHAR(50), creation_date DATE NOT NULL); -- TODO replace these tables with standard tables -CREATE TABLE read_dataset.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE read_dataset.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE TABLE read_dataset.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); CREATE INDEX order_index_t_order ON read_dataset.t_order (order_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/dataset.xml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/dataset.xml index 06f0c3e633916..20e20ee30d661 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/dataset.xml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/dataset.xml @@ -16,14 +16,30 @@ --> + + + + + + + + + + + + + + + + - - + + @@ -43,10 +59,130 @@ - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/h2/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/h2/01-actual-init.sql index 0225d6915fd5d..6cbccd24e9932 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/h2/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/h2/01-actual-init.sql @@ -24,9 +24,9 @@ DROP TABLE IF EXISTS t_merchant; CREATE TABLE t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_order_item (item_id INT PRIMARY KEY, order_id INT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, creation_date DATE NOT NULL); -CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, user_telephone_cipher CHAR(50) NOT NULL, user_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_user_item (item_id INT NOT NULL, user_id INT NOT NULL, status VARCHAR(45) NULL, creation_date DATE, PRIMARY KEY (item_id)); CREATE TABLE t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); -CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, merchant_telephone_cipher CHAR(50) NOT NULL, merchant_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE INDEX user_index_t_user ON t_user (user_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/mysql/01-actual-init.sql index 95537a32f624c..149b1e0b1f36c 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/mysql/01-actual-init.sql @@ -23,9 +23,9 @@ CREATE DATABASE encrypt; CREATE TABLE encrypt.t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE encrypt.t_order_item (item_id INT PRIMARY KEY, order_id INT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, creation_date DATE NOT NULL); -CREATE TABLE encrypt.t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE encrypt.t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, user_telephone_cipher CHAR(50) NOT NULL, user_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE encrypt.t_user_item (item_id INT NOT NULL, user_id INT NOT NULL, status VARCHAR(45) NULL, creation_date DATE, PRIMARY KEY (item_id)); CREATE TABLE encrypt.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); -CREATE TABLE encrypt.t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE encrypt.t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, merchant_telephone_cipher CHAR(50) NOT NULL, merchant_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE INDEX user_index_t_user ON encrypt.t_user (user_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/opengauss/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/opengauss/01-actual-init.sql index d4cfdcc338b7a..b2042b2df5243 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/opengauss/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/opengauss/01-actual-init.sql @@ -30,9 +30,9 @@ DROP TABLE IF EXISTS t_merchant; CREATE TABLE t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_order_item (item_id INT PRIMARY KEY, order_id INT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, creation_date DATE NOT NULL); -CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, user_telephone_cipher CHAR(50) NOT NULL, user_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_user_item (item_id INT NOT NULL, user_id INT NOT NULL, status VARCHAR(45) NULL, creation_date DATE, PRIMARY KEY (item_id)); CREATE TABLE t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); -CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, merchant_telephone_cipher CHAR(50) NOT NULL, merchant_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE INDEX user_index_t_user ON t_user (user_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/postgresql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/postgresql/01-actual-init.sql index d4cfdcc338b7a..b2042b2df5243 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/postgresql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/actual/init-sql/postgresql/01-actual-init.sql @@ -30,9 +30,9 @@ DROP TABLE IF EXISTS t_merchant; CREATE TABLE t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT, remark VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_order_item (item_id INT PRIMARY KEY, order_id INT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, creation_date DATE NOT NULL); -CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_user (user_id INT PRIMARY KEY, user_name_cipher VARCHAR(50) NOT NULL, user_name_like VARCHAR(50) NOT NULL, password_cipher VARCHAR(50) NOT NULL, email_cipher VARCHAR(50) NOT NULL, user_telephone_cipher CHAR(50) NOT NULL, user_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE t_user_item (item_id INT NOT NULL, user_id INT NOT NULL, status VARCHAR(45) NULL, creation_date DATE, PRIMARY KEY (item_id)); CREATE TABLE t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); -CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, telephone_cipher CHAR(50) NOT NULL, telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); +CREATE TABLE t_merchant (merchant_id INT PRIMARY KEY, country_id SMALLINT NOT NULL, merchant_name VARCHAR(50) NOT NULL, business_code_cipher VARCHAR(50) NOT NULL, business_code_like VARCHAR(50) NOT NULL, merchant_telephone_cipher CHAR(50) NOT NULL, merchant_telephone_like CHAR(11) NOT NULL, creation_date DATE NOT NULL); CREATE INDEX user_index_t_user ON t_user (user_id); diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/dataset.xml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/dataset.xml index fa4fb7461cd32..38433ca61067c 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/dataset.xml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/data/expected/dataset.xml @@ -16,6 +16,22 @@ --> + + + + + + + + + + + + + + + + @@ -43,6 +59,126 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml index d2f27c8fb8a53..1a6b944972f75 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/mysql/config-encrypt.yaml @@ -62,10 +62,10 @@ rules: encryptorName: aes_encryptor telephone: cipher: - name: telephone_cipher + name: user_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: user_telephone_like encryptorName: like_encryptor t_user_details: columns: @@ -88,8 +88,8 @@ rules: encryptorName: like_encryptor telephone: cipher: - name: telephone_cipher + name: merchant_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: merchant_telephone_like encryptorName: like_encryptor diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/opengauss/config-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/opengauss/config-encrypt.yaml index d04fe06b1cf40..89a633b9d4e9b 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/opengauss/config-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/opengauss/config-encrypt.yaml @@ -62,10 +62,10 @@ rules: encryptorName: aes_encryptor telephone: cipher: - name: telephone_cipher + name: user_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: user_telephone_like encryptorName: like_encryptor t_user_details: columns: @@ -88,8 +88,8 @@ rules: encryptorName: like_encryptor telephone: cipher: - name: telephone_cipher + name: merchant_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: merchant_telephone_like encryptorName: like_encryptor diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/postgresql/config-encrypt.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/postgresql/config-encrypt.yaml index 48aed704d13fe..9cc2b3d538277 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/postgresql/config-encrypt.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/proxy/conf/postgresql/config-encrypt.yaml @@ -62,10 +62,10 @@ rules: encryptorName: aes_encryptor telephone: cipher: - name: telephone_cipher + name: user_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: user_telephone_like encryptorName: like_encryptor t_user_details: columns: @@ -88,8 +88,8 @@ rules: encryptorName: like_encryptor telephone: cipher: - name: telephone_cipher + name: merchant_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: merchant_telephone_like encryptorName: like_encryptor diff --git a/test/e2e/sql/src/test/resources/env/scenario/encrypt/rules.yaml b/test/e2e/sql/src/test/resources/env/scenario/encrypt/rules.yaml index 07222031cce92..69a2ac6a6d94c 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/encrypt/rules.yaml +++ b/test/e2e/sql/src/test/resources/env/scenario/encrypt/rules.yaml @@ -49,10 +49,10 @@ rules: encryptorName: aes_encryptor telephone: cipher: - name: telephone_cipher + name: user_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: user_telephone_like encryptorName: like_encryptor t_user_details: columns: @@ -75,10 +75,10 @@ rules: encryptorName: like_encryptor telephone: cipher: - name: telephone_cipher + name: merchant_telephone_cipher encryptorName: aes_encryptor likeQuery: - name: telephone_like + name: merchant_telephone_like encryptorName: like_encryptor props: diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/dataset.xml b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/dataset.xml index da704299ae720..9695514e4ae7f 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/dataset.xml +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/dataset.xml @@ -49,8 +49,14 @@ + + + + + + diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/mysql/01-actual-init.sql index 80037d3fd8466..a2e2368d88047 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/mysql/01-actual-init.sql @@ -27,3 +27,4 @@ CREATE TABLE passthrough.t_data_type_floating_point (id INT PRIMARY KEY, col_flo CREATE TABLE passthrough.t_with_generated_id (id INT AUTO_INCREMENT PRIMARY KEY, val VARCHAR(100) NOT NULL); CREATE TABLE passthrough.t_data_type_money (id INT PRIMARY KEY, val NUMERIC(16, 2)); CREATE TABLE passthrough.t_data_type_bytea (id INT PRIMARY KEY, val BLOB NOT NULL); +CREATE TABLE passthrough.t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date DATETIME NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/opengauss/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/opengauss/01-actual-init.sql index 3872828744032..5b8773bd75337 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/opengauss/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/opengauss/01-actual-init.sql @@ -28,3 +28,4 @@ CREATE TABLE t_data_type_floating_point (id INT PRIMARY KEY, col_float REAL NOT CREATE TABLE t_with_generated_id (id SERIAL PRIMARY KEY, val VARCHAR NOT NULL); CREATE TABLE t_data_type_money (id INT PRIMARY KEY, val money); CREATE TABLE t_data_type_bytea (id INT PRIMARY KEY, val bytea NOT NULL); +CREATE TABLE t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date TIMESTAMP NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/postgresql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/postgresql/01-actual-init.sql index 3872828744032..5b8773bd75337 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/postgresql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/actual/init-sql/postgresql/01-actual-init.sql @@ -28,3 +28,4 @@ CREATE TABLE t_data_type_floating_point (id INT PRIMARY KEY, col_float REAL NOT CREATE TABLE t_with_generated_id (id SERIAL PRIMARY KEY, val VARCHAR NOT NULL); CREATE TABLE t_data_type_money (id INT PRIMARY KEY, val money); CREATE TABLE t_data_type_bytea (id INT PRIMARY KEY, val bytea NOT NULL); +CREATE TABLE t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date TIMESTAMP NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/dataset.xml b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/dataset.xml index c8c8a3b5d719d..5e865e65acc4c 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/dataset.xml +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/dataset.xml @@ -49,8 +49,14 @@ + + + + + + diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/mysql/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/mysql/01-expected-init.sql index 12d63624cecb2..b3c3f764ffb02 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/mysql/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/mysql/01-expected-init.sql @@ -27,3 +27,4 @@ CREATE TABLE expected_dataset.t_data_type_floating_point (id INT PRIMARY KEY, co CREATE TABLE expected_dataset.t_with_generated_id (id INT AUTO_INCREMENT PRIMARY KEY, val VARCHAR(100) NOT NULL); CREATE TABLE expected_dataset.t_data_type_money (id INT PRIMARY KEY, val NUMERIC(16, 2)); CREATE TABLE expected_dataset.t_data_type_bytea (id INT PRIMARY KEY, val BLOB NOT NULL); +CREATE TABLE expected_dataset.t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date DATETIME NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/opengauss/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/opengauss/01-expected-init.sql index 579ac4a61f997..fba69f833f20b 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/opengauss/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/opengauss/01-expected-init.sql @@ -29,3 +29,4 @@ CREATE TABLE t_data_type_floating_point (id INT PRIMARY KEY, col_float REAL NOT CREATE TABLE t_with_generated_id (id SERIAL PRIMARY KEY, val VARCHAR NOT NULL); CREATE TABLE t_data_type_money (id INT PRIMARY KEY, val money); CREATE TABLE t_data_type_bytea (id INT PRIMARY KEY, val bytea NOT NULL); +CREATE TABLE t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date TIMESTAMP NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/postgresql/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/postgresql/01-expected-init.sql index 43118c9a4fd62..c5c871b28115d 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/postgresql/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/passthrough/data/expected/init-sql/postgresql/01-expected-init.sql @@ -29,3 +29,4 @@ CREATE TABLE t_data_type_floating_point (id INT PRIMARY KEY, col_float REAL NOT CREATE TABLE t_with_generated_id (id SERIAL PRIMARY KEY, val VARCHAR NOT NULL); CREATE TABLE t_data_type_money (id INT PRIMARY KEY, val money); CREATE TABLE t_data_type_bytea (id INT PRIMARY KEY, val bytea NOT NULL); +CREATE TABLE t_data_type_date (id INT PRIMARY KEY, creation_date DATE NOT NULL, update_date TIMESTAMP NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/tbl/data/actual/init-sql/mysql/01-actual-init.sql b/test/e2e/sql/src/test/resources/env/scenario/tbl/data/actual/init-sql/mysql/01-actual-init.sql index caa783b97da33..ffc1d360ba568 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/tbl/data/actual/init-sql/mysql/01-actual-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/tbl/data/actual/init-sql/mysql/01-actual-init.sql @@ -64,7 +64,7 @@ CREATE INDEX order_index_t_order_9 ON tbl.t_order_9 (order_id); CREATE TABLE tbl.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); -CREATE TABLE tbl.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE tbl.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE TABLE tbl.t_product_0 (product_id INT PRIMARY KEY, product_name VARCHAR(50) NOT NULL, category_id INT NOT NULL, price DECIMAL NOT NULL, status VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE tbl.t_product_1 (product_id INT PRIMARY KEY, product_name VARCHAR(50) NOT NULL, category_id INT NOT NULL, price DECIMAL NOT NULL, status VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); diff --git a/test/e2e/sql/src/test/resources/env/scenario/tbl/data/expected/init-sql/mysql/01-expected-init.sql b/test/e2e/sql/src/test/resources/env/scenario/tbl/data/expected/init-sql/mysql/01-expected-init.sql index 9f52e6ff479a0..562a00092a99b 100644 --- a/test/e2e/sql/src/test/resources/env/scenario/tbl/data/expected/init-sql/mysql/01-expected-init.sql +++ b/test/e2e/sql/src/test/resources/env/scenario/tbl/data/expected/init-sql/mysql/01-expected-init.sql @@ -23,7 +23,7 @@ CREATE DATABASE expected_dataset; CREATE TABLE expected_dataset.t_order(order_id BIGINT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(50) NOT NULL, merchant_id INT NOT NULL, remark VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); CREATE TABLE expected_dataset.t_order_item(item_id BIGINT PRIMARY KEY, order_id BIGINT NOT NULL, user_id INT NOT NULL, product_id INT NOT NULL, quantity INT NOT NULL, creation_date DATE NOT NULL); -CREATE TABLE expected_dataset.t_single_table (single_id INT NOT NULL, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); +CREATE TABLE expected_dataset.t_single_table (single_id INT NOT NULL AUTO_INCREMENT, id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (single_id)); CREATE TABLE expected_dataset.t_broadcast_table (id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (id)); CREATE TABLE expected_dataset.t_product (product_id INT PRIMARY KEY, product_name VARCHAR(50) NOT NULL, category_id INT NOT NULL, price DECIMAL NOT NULL, status VARCHAR(50) NOT NULL, creation_date DATE NOT NULL); diff --git a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolMetaData.java b/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolMetaData.java deleted file mode 100644 index bd8f994d4814a..0000000000000 --- a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/datasource/pool/metadata/MockedDataSourcePoolMetaData.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.test.fixture.infra.datasource.pool.metadata; - -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolFieldMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolPropertiesValidator; -import org.apache.shardingsphere.infra.datasource.pool.metadata.DefaultDataSourcePoolPropertiesValidator; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * Mocked data source pool meta data. - */ -public final class MockedDataSourcePoolMetaData implements DataSourcePoolMetaData { - - @Override - public Map getDefaultProperties() { - return Collections.singletonMap("maxPoolSize", 100); - } - - @Override - public Map getInvalidProperties() { - Map result = new HashMap<>(2, 1F); - result.put("maxPoolSize", -1); - result.put("minPoolSize", -1); - return result; - } - - @Override - public Map getPropertySynonyms() { - Map result = new HashMap<>(2, 1F); - result.put("maxPoolSize", "maxPoolSize"); - result.put("minPoolSize", "minPoolSize"); - return result; - } - - @Override - public Collection getTransientFieldNames() { - return Collections.singleton("closed"); - } - - @Override - public DataSourcePoolFieldMetaData getFieldMetaData() { - return new MockedDataSourcePoolFieldMetaData(); - } - - @Override - public DataSourcePoolPropertiesValidator getDataSourcePoolPropertiesValidator() { - return new DefaultDataSourcePoolPropertiesValidator(); - } - - @Override - public String getType() { - return "org.apache.shardingsphere.test.fixture.jdbc.MockedDataSource"; - } -} diff --git a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/rule/MockedRule.java b/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/rule/MockedRule.java index b5db7812574ff..0425cec047cc6 100644 --- a/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/rule/MockedRule.java +++ b/test/fixture/infra/src/main/java/org/apache/shardingsphere/test/fixture/infra/rule/MockedRule.java @@ -31,9 +31,4 @@ public final class MockedRule implements ShardingSphereRule { public RuleConfiguration getConfiguration() { return mock(RuleConfiguration.class); } - - @Override - public String getType() { - return MockedRule.class.getSimpleName(); - } } diff --git a/test/fixture/infra/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData b/test/fixture/infra/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData deleted file mode 100644 index 52f43df55b2f5..0000000000000 --- a/test/fixture/infra/src/main/resources/META-INF/services/org.apache.shardingsphere.infra.datasource.pool.metadata.DataSourcePoolMetaData +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.test.fixture.infra.datasource.pool.metadata.MockedDataSourcePoolMetaData diff --git a/test/it/optimizer/src/test/java/org/apache/shardingsphere/test/it/optimizer/converter/SQLNodeConverterEngineIT.java b/test/it/optimizer/src/test/java/org/apache/shardingsphere/test/it/optimizer/converter/SQLNodeConverterEngineIT.java index d78bd5437400b..fc1233355d9a9 100644 --- a/test/it/optimizer/src/test/java/org/apache/shardingsphere/test/it/optimizer/converter/SQLNodeConverterEngineIT.java +++ b/test/it/optimizer/src/test/java/org/apache/shardingsphere/test/it/optimizer/converter/SQLNodeConverterEngineIT.java @@ -60,13 +60,19 @@ class SQLNodeConverterEngineIT { private static final String DELETE_STATEMENT_PREFIX = "DELETE"; + private static final String EXPLAIN_STATEMENT_PREFIX = "EXPLAIN"; + + private static final String UPDATE_STATEMENT_PREFIX = "UPDATE"; + + private static final String INSERT_STATEMENT_PREFIX = "INSERT"; + @ParameterizedTest(name = "{0} ({1}) -> {2}") @ArgumentsSource(TestCaseArgumentsProvider.class) void assertConvert(final String sqlCaseId, final SQLCaseType sqlCaseType, final String databaseType) { String expected; try { expected = SQL_NODE_CONVERTER_TEST_CASES.get(sqlCaseId, sqlCaseType, databaseType).getExpectedSQL(); - } catch (IllegalStateException ex) { + } catch (final IllegalStateException ex) { log.warn(ex.getMessage()); return; } @@ -83,7 +89,7 @@ private static class TestCaseArgumentsProvider implements ArgumentsProvider { @Override public Stream provideArguments(final ExtensionContext extensionContext) { - return getTestParameters("MySQL", "PostgreSQL", "openGauss").stream(); + return getTestParameters("MySQL", "PostgreSQL", "openGauss", "Oracle").stream(); } private Collection getTestParameters(final String... databaseTypes) { @@ -101,7 +107,11 @@ private boolean isPlaceholderWithoutParameter(final InternalSQLParserTestParamet } private boolean isSupportedSQLCase(final InternalSQLParserTestParameter testParam) { - return testParam.getSqlCaseId().toUpperCase().startsWith(SELECT_STATEMENT_PREFIX) || testParam.getSqlCaseId().toUpperCase().startsWith(DELETE_STATEMENT_PREFIX); + return testParam.getSqlCaseId().toUpperCase().startsWith(SELECT_STATEMENT_PREFIX) + || testParam.getSqlCaseId().toUpperCase().startsWith(DELETE_STATEMENT_PREFIX) + || testParam.getSqlCaseId().toUpperCase().startsWith(EXPLAIN_STATEMENT_PREFIX) + || testParam.getSqlCaseId().toUpperCase().startsWith(UPDATE_STATEMENT_PREFIX) + || testParam.getSqlCaseId().toUpperCase().startsWith(INSERT_STATEMENT_PREFIX); } } } diff --git a/test/it/optimizer/src/test/resources/converter/delete.xml b/test/it/optimizer/src/test/resources/converter/delete.xml index 60671ebf223d0..7145066efdb6b 100644 --- a/test/it/optimizer/src/test/resources/converter/delete.xml +++ b/test/it/optimizer/src/test/resources/converter/delete.xml @@ -33,7 +33,9 @@ - + + + diff --git a/test/it/optimizer/src/test/resources/converter/explain.xml b/test/it/optimizer/src/test/resources/converter/explain.xml new file mode 100644 index 0000000000000..043298d031126 --- /dev/null +++ b/test/it/optimizer/src/test/resources/converter/explain.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/optimizer/src/test/resources/converter/insert.xml b/test/it/optimizer/src/test/resources/converter/insert.xml new file mode 100644 index 0000000000000..65ec6621645c6 --- /dev/null +++ b/test/it/optimizer/src/test/resources/converter/insert.xml @@ -0,0 +1,76 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/optimizer/src/test/resources/converter/select-expression.xml b/test/it/optimizer/src/test/resources/converter/select-expression.xml index d61c3bfa03194..c414191201e46 100644 --- a/test/it/optimizer/src/test/resources/converter/select-expression.xml +++ b/test/it/optimizer/src/test/resources/converter/select-expression.xml @@ -48,4 +48,17 @@ + + + + + + + + + + + + + diff --git a/test/it/optimizer/src/test/resources/converter/select-special-function.xml b/test/it/optimizer/src/test/resources/converter/select-special-function.xml index 812bb212128bd..38f417e54f290 100644 --- a/test/it/optimizer/src/test/resources/converter/select-special-function.xml +++ b/test/it/optimizer/src/test/resources/converter/select-special-function.xml @@ -18,4 +18,7 @@ + + + diff --git a/test/it/optimizer/src/test/resources/converter/select-sub-query.xml b/test/it/optimizer/src/test/resources/converter/select-sub-query.xml new file mode 100644 index 0000000000000..7ee7d0489b17c --- /dev/null +++ b/test/it/optimizer/src/test/resources/converter/select-sub-query.xml @@ -0,0 +1,22 @@ + + + + + + + diff --git a/test/it/optimizer/src/test/resources/converter/select-table.xml b/test/it/optimizer/src/test/resources/converter/select-table.xml index 5c0dc9575a59e..0e9eacf4b2f4e 100644 --- a/test/it/optimizer/src/test/resources/converter/select-table.xml +++ b/test/it/optimizer/src/test/resources/converter/select-table.xml @@ -18,4 +18,8 @@ + + + + diff --git a/test/it/optimizer/src/test/resources/converter/select.xml b/test/it/optimizer/src/test/resources/converter/select.xml index 8bcd8cc3c1533..e16ff8d319e80 100644 --- a/test/it/optimizer/src/test/resources/converter/select.xml +++ b/test/it/optimizer/src/test/resources/converter/select.xml @@ -17,6 +17,20 @@ --> + + + + + + + + + + + + + + diff --git a/test/it/optimizer/src/test/resources/converter/update.xml b/test/it/optimizer/src/test/resources/converter/update.xml new file mode 100644 index 0000000000000..53569a080d66c --- /dev/null +++ b/test/it/optimizer/src/test/resources/converter/update.xml @@ -0,0 +1,65 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/ExternalSQLParserIT.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/ExternalSQLParserIT.java index e82943d458606..8c38558755378 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/ExternalSQLParserIT.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/ExternalSQLParserIT.java @@ -18,7 +18,6 @@ package org.apache.shardingsphere.test.it.sql.parser.external; import com.google.common.base.Preconditions; -import lombok.SneakyThrows; import org.apache.shardingsphere.infra.exception.core.external.ShardingSphereExternalException; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.sql.parser.api.CacheOption; @@ -28,9 +27,10 @@ import org.apache.shardingsphere.test.it.sql.parser.external.env.SQLParserExternalITEnvironment; import org.apache.shardingsphere.test.it.sql.parser.external.result.SQLParseResultReporter; import org.apache.shardingsphere.test.it.sql.parser.external.result.SQLParseResultReporterCreator; -import org.apache.shardingsphere.test.loader.AbstractTestParameterLoader; -import org.apache.shardingsphere.test.loader.ExternalCaseSettings; -import org.apache.shardingsphere.test.loader.ExternalSQLParserTestParameter; +import org.apache.shardingsphere.test.it.sql.parser.loader.ExternalCaseSettings; +import org.apache.shardingsphere.test.loader.TestParameterLoadTemplate; +import org.apache.shardingsphere.test.loader.ExternalSQLTestParameter; +import org.apache.shardingsphere.test.loader.TestParameterLoader; import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; import org.apache.shardingsphere.test.loader.strategy.impl.GitHubTestParameterLoadStrategy; import org.junit.jupiter.api.condition.EnabledIf; @@ -76,17 +76,19 @@ private static boolean isEnabled() { private static class TestCaseArgumentsProvider implements ArgumentsProvider { @Override - public Stream provideArguments(final ExtensionContext extensionContext) { + public Stream provideArguments(final ExtensionContext extensionContext) throws ReflectiveOperationException { ExternalCaseSettings settings = extensionContext.getRequiredTestClass().getAnnotation(ExternalCaseSettings.class); Preconditions.checkNotNull(settings, "Annotation ExternalSQLParserITSettings is required."); return getTestParameters(settings).stream().map(each -> Arguments.of(each.getSqlCaseId(), each.getDatabaseType(), each.getSql(), each.getReportType())); } - @SneakyThrows - private Collection getTestParameters(final ExternalCaseSettings settings) { - AbstractTestParameterLoader loader = settings.caseLoader().getConstructor(TestParameterLoadStrategy.class) - .newInstance(new GitHubTestParameterLoadStrategy()); - return loader.load(URI.create(settings.caseURL()), URI.create(settings.resultURL()), settings.value(), settings.reportType()); + private Collection getTestParameters(final ExternalCaseSettings settings) throws ReflectiveOperationException { + TestParameterLoadStrategy loadStrategy = new GitHubTestParameterLoadStrategy(); + URI sqlCaseURI = URI.create(settings.caseURL()); + URI resultURI = URI.create(settings.resultURL()); + TestParameterLoadTemplate loadTemplate = settings.template().getConstructor().newInstance(); + TestParameterLoader loader = new TestParameterLoader(loadStrategy, loadTemplate); + return loader.load(sqlCaseURI, resultURI, settings.value(), settings.reportType()); } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/ExternalMySQLTestParameterLoader.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/MySQLExternalTestParameterLoadTemplate.java similarity index 77% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/ExternalMySQLTestParameterLoader.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/MySQLExternalTestParameterLoadTemplate.java index 22f46f1a21436..430b0c6c5248c 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/ExternalMySQLTestParameterLoader.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/MySQLExternalTestParameterLoadTemplate.java @@ -17,9 +17,8 @@ package org.apache.shardingsphere.test.it.sql.parser.external.loader; -import org.apache.shardingsphere.test.loader.ExternalSQLParserTestParameter; -import org.apache.shardingsphere.test.loader.AbstractTestParameterLoader; -import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; +import org.apache.shardingsphere.test.loader.TestParameterLoadTemplate; +import org.apache.shardingsphere.test.loader.ExternalSQLTestParameter; import java.util.ArrayList; import java.util.Collection; @@ -28,20 +27,16 @@ import java.util.Objects; /** - * External MySQL SQL parser test parameter loader. + * External test parameter load template for MySQL. */ -public final class ExternalMySQLTestParameterLoader extends AbstractTestParameterLoader { +public final class MySQLExternalTestParameterLoadTemplate implements TestParameterLoadTemplate { private static final int DELIMITER_COMMAND_LENGTH = "DELIMITER".length(); - public ExternalMySQLTestParameterLoader(final TestParameterLoadStrategy loadStrategy) { - super(loadStrategy); - } - @Override - public Collection createTestParameters(final String sqlCaseFileName, final List sqlCaseFileContent, - final List resultFileContent, final String databaseType, final String reportType) { - Collection result = new LinkedList<>(); + public Collection load(final String sqlCaseFileName, final List sqlCaseFileContent, + final List resultFileContent, final String databaseType, final String reportType) { + Collection result = new LinkedList<>(); List lines = new ArrayList<>(); int sqlCaseIndex = 1; String delimiter = ";"; @@ -60,7 +55,7 @@ public Collection createTestParameters(final Str String sqlCaseId = sqlCaseFileName + sqlCaseIndex++; String sql = String.join("\n", lines); sql = sql.substring(0, sql.length() - delimiter.length()); - result.add(new ExternalSQLParserTestParameter(sqlCaseId, databaseType, sql, reportType)); + result.add(new ExternalSQLTestParameter(sqlCaseId, databaseType, sql, reportType)); } lines.clear(); } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/DefaultExternalTestParameterLoader.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/StandardExternalTestParameterLoadTemplate.java similarity index 74% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/DefaultExternalTestParameterLoader.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/StandardExternalTestParameterLoadTemplate.java index d3ebcfb53b39e..6c3911dbbb85e 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/DefaultExternalTestParameterLoader.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/external/loader/StandardExternalTestParameterLoadTemplate.java @@ -17,27 +17,22 @@ package org.apache.shardingsphere.test.it.sql.parser.external.loader; -import org.apache.shardingsphere.test.loader.ExternalSQLParserTestParameter; -import org.apache.shardingsphere.test.loader.AbstractTestParameterLoader; -import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; +import org.apache.shardingsphere.test.loader.TestParameterLoadTemplate; +import org.apache.shardingsphere.test.loader.ExternalSQLTestParameter; import java.util.Collection; import java.util.LinkedList; import java.util.List; /** - * Default external SQL parser test parameter loader. + * Standard external test parameter load template. */ -public final class DefaultExternalTestParameterLoader extends AbstractTestParameterLoader { - - public DefaultExternalTestParameterLoader(final TestParameterLoadStrategy loadStrategy) { - super(loadStrategy); - } +public final class StandardExternalTestParameterLoadTemplate implements TestParameterLoadTemplate { @Override - public Collection createTestParameters(final String sqlCaseFileName, final List sqlCaseFileContent, - final List resultFileContent, final String databaseType, final String reportType) { - Collection result = new LinkedList<>(); + public Collection load(final String sqlCaseFileName, final List sqlCaseFileContent, + final List resultFileContent, final String databaseType, final String reportType) { + Collection result = new LinkedList<>(); String completedSQL = ""; int sqlCaseEnum = 1; int statementLines = 0; @@ -51,7 +46,7 @@ public Collection createTestParameters(final Str resultIndex = searchInResultContent(resultIndex, resultFileContent, completedSQL, statementLines); if (resultIndex >= resultFileContent.size() || !resultFileContent.get(resultIndex).contains("ERROR")) { String sqlCaseId = sqlCaseFileName + sqlCaseEnum; - result.add(new ExternalSQLParserTestParameter(sqlCaseId, databaseType, completedSQL, reportType)); + result.add(new ExternalSQLTestParameter(sqlCaseId, databaseType, completedSQL, reportType)); sqlCaseEnum++; } completedSQL = ""; diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/assignment/AssignmentValueAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/assignment/AssignmentValueAssert.java index e3b9e5f1e5113..05c3dda05ccf3 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/assignment/AssignmentValueAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/assignment/AssignmentValueAssert.java @@ -20,6 +20,8 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.BinaryOperationExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.CaseWhenExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment; @@ -44,23 +46,44 @@ public final class AssignmentValueAssert { * @param assertContext assert context * @param actual actual expression segment * @param expected expected assignment value + * @throws UnsupportedOperationException unsupported assertion segment exception */ public static void assertIs(final SQLCaseAssertContext assertContext, final ExpressionSegment actual, final ExpectedAssignmentValue expected) { if (actual instanceof ParameterMarkerExpressionSegment) { ExpressionAssert.assertParameterMarkerExpression(assertContext, (ParameterMarkerExpressionSegment) actual, expected.getParameterMarkerExpression()); - } else if (actual instanceof LiteralExpressionSegment) { + return; + } + if (actual instanceof LiteralExpressionSegment) { ExpressionAssert.assertLiteralExpression(assertContext, (LiteralExpressionSegment) actual, expected.getLiteralExpression()); // FIXME should be CommonExpressionProjection, not ExpressionProjectionSegment - } else if (actual instanceof ExpressionProjectionSegment) { + return; + } + if (actual instanceof ExpressionProjectionSegment) { ExpressionAssert.assertCommonExpression(assertContext, (ExpressionProjectionSegment) actual, expected.getCommonExpression()); - } else if (actual instanceof ColumnSegment) { + return; + } + if (actual instanceof ColumnSegment) { ColumnAssert.assertIs(assertContext, (ColumnSegment) actual, expected.getColumn()); - } else if (actual instanceof SubqueryExpressionSegment) { + return; + } + if (actual instanceof SubqueryExpressionSegment) { ExpressionAssert.assertSubqueryExpression(assertContext, (SubqueryExpressionSegment) actual, expected.getSubquery()); - } else if (actual instanceof FunctionSegment) { + return; + } + if (actual instanceof FunctionSegment) { ExpressionAssert.assertFunction(assertContext, (FunctionSegment) actual, expected.getFunction()); - } else if (actual instanceof CommonExpressionSegment) { + return; + } + if (actual instanceof CommonExpressionSegment) { ExpressionAssert.assertCommonExpression(assertContext, (CommonExpressionSegment) actual, expected.getCommonExpression()); + return; + } + if (actual instanceof CaseWhenExpression) { + ExpressionAssert.assertCaseWhenExpression(assertContext, (CaseWhenExpression) actual, expected.getCaseWhenExpression()); + return; + } + if (actual instanceof BinaryOperationExpression) { + ExpressionAssert.assertBinaryOperationExpression(assertContext, (BinaryOperationExpression) actual, expected.getBinaryOperationExpression()); } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/definition/ColumnDefinitionAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/definition/ColumnDefinitionAssert.java index b09012e1b2a84..e856c6b9a3c13 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/definition/ColumnDefinitionAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/definition/ColumnDefinitionAssert.java @@ -26,6 +26,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; /** * Column definition assert. @@ -42,7 +44,12 @@ public final class ColumnDefinitionAssert { */ public static void assertIs(final SQLCaseAssertContext assertContext, final ColumnDefinitionSegment actual, final ExpectedColumnDefinition expected) { assertThat(assertContext.getText("Column definition name assertion error: "), actual.getColumnName().getIdentifier().getValue(), is(expected.getColumn().getName())); - assertThat(assertContext.getText("Column definition data type assertion error: "), actual.getDataType().getDataTypeName(), is(expected.getType())); + if (null != expected.getType()) { + assertNotNull(actual.getDataType(), assertContext.getText("Column definition data type should exist.")); + assertThat(assertContext.getText("Column definition data type assertion error: "), actual.getDataType().getDataTypeName(), is(expected.getType())); + } else { + assertNull(actual.getDataType(), assertContext.getText("Column definition data type should not exist.")); + } assertThat(assertContext.getText("Column definition primary key assertion error: "), actual.isPrimaryKey(), is(expected.isPrimaryKey())); TableAssert.assertIs(assertContext, actual.getReferencedTables(), expected.getReferencedTables()); assertThat(assertContext.getText("Column definition start index assertion error: "), actual.getStartIndex(), is(expected.getStartIndex())); diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/expression/ExpressionAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/expression/ExpressionAssert.java index e65dd42cbe325..9c7e5d72deee2 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/expression/ExpressionAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/expression/ExpressionAssert.java @@ -31,12 +31,13 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.InExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.IntervalDayToSecondExpression; -import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.IntervalExpressionProjection; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.IntervalYearToMonthExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ListExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MatchAgainstExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.MultisetExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.NotExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.TypeCastExpression; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.UnaryOperationExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ValuesExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.CommonExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.complex.ComplexExpressionSegment; @@ -45,8 +46,10 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubqueryExpressionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.subquery.SubquerySegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.RowExpression; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.AggregationProjectionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.ExpressionProjectionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.item.IntervalExpressionProjection; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DataTypeSegment; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; @@ -70,10 +73,13 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedIntervalYearToMonthExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedListExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedMatchExpression; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedMultisetExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedNotExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedTypeCastExpression; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedUnaryOperationExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedValuesExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedVariableSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedRowExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.complex.ExpectedCommonExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.simple.ExpectedLiteralExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.simple.ExpectedParameterMarkerExpression; @@ -209,6 +215,7 @@ public static void assertExistsSubqueryExpression(final SQLCaseAssertContext ass /** * Assert binary operation expression. + * * @param assertContext assert context * @param actual actual binary operation expression * @param expected expected binary operation expression @@ -229,6 +236,7 @@ public static void assertBinaryOperationExpression(final SQLCaseAssertContext as /** * Assert in operation expression. + * * @param assertContext assert context * @param actual actual in operation expression * @param expected expected in operation expression @@ -249,6 +257,7 @@ public static void assertInExpression(final SQLCaseAssertContext assertContext, /** * Assert not operation expression. + * * @param assertContext assert context * @param actual actual not operation expression * @param expected expected not operation expression @@ -266,6 +275,7 @@ public static void assertNotExpression(final SQLCaseAssertContext assertContext, /** * Assert list operation expression. + * * @param assertContext assert context * @param actual actual list operation expression * @param expected expected list operation expression @@ -289,6 +299,7 @@ public static void assertListExpression(final SQLCaseAssertContext assertContext /** * Assert between operation expression. + * * @param assertContext assert context * @param actual actual between operation expression * @param expected expected between operation expression @@ -328,7 +339,7 @@ public static void assertFunction(final SQLCaseAssertContext assertContext, fina while (expectedIterator.hasNext()) { ExpressionAssert.assertExpression(assertContext, actualIterator.next(), expectedIterator.next()); } - if (expected.getOwner() != null) { + if (null != expected.getOwner()) { OwnerAssert.assertIs(assertContext, actual.getOwner(), expected.getOwner()); } } @@ -498,6 +509,60 @@ private static void assertIntervalYearToMonthExpression(final SQLCaseAssertConte } } + private static void assertMultisetExpression(final SQLCaseAssertContext assertContext, final MultisetExpression actual, final ExpectedMultisetExpression expected) { + if (null == expected) { + assertNull(actual, assertContext.getText("Multiset expression should not exist.")); + return; + } + assertNotNull(actual, assertContext.getText("Multiset expression should exist.")); + assertExpression(assertContext, actual.getLeft(), expected.getLeft()); + assertExpression(assertContext, actual.getRight(), expected.getRight()); + assertThat(assertContext.getText("Multiset operator assertion error: "), actual.getOperator(), is(expected.getOperator())); + assertThat(assertContext.getText("Multiset keyword assertion error: "), actual.getKeyWord(), is(expected.getKeyWord())); + } + + /** + * Assert row expression. + * + * @param assertContext assert context + * @param actual actual row expression + * @param expected expected row expression + */ + private static void assertRowExpression(final SQLCaseAssertContext assertContext, final RowExpression actual, final ExpectedRowExpression expected) { + if (null == expected) { + assertNull(actual, assertContext.getText("Row expression should not exist.")); + } else { + assertNotNull(actual, assertContext.getText("Actual list expression should not exist.")); + assertThat(assertContext.getText("Row expression item size assert error."), + actual.getItems().size(), is(expected.getItems().size())); + Iterator actualItems = actual.getItems().iterator(); + Iterator expectedItems = expected.getItems().iterator(); + while (actualItems.hasNext()) { + assertExpression(assertContext, actualItems.next(), expectedItems.next()); + } + SQLSegmentAssert.assertIs(assertContext, actual, expected); + } + } + + /** + * Assert unary operation expression. + * + * @param assertContext assert context + * @param actual actual unary operation expression + * @param expected expected unary operation expression + */ + private static void assertUnaryOperationExpression(final SQLCaseAssertContext assertContext, final UnaryOperationExpression actual, final ExpectedUnaryOperationExpression expected) { + if (null == expected) { + assertNull(actual, assertContext.getText("Actual unary operation expression should not exist.")); + } else { + assertNotNull(actual, assertContext.getText("Actual unary operation expression should exist.")); + assertExpression(assertContext, actual.getExpression(), expected.getExpr()); + assertThat(assertContext.getText("Unary operation expression operator assert error."), + actual.getOperator(), is(expected.getOperator())); + SQLSegmentAssert.assertIs(assertContext, actual, expected); + } + } + /** * Assert expression by actual expression segment class type. * @@ -561,6 +626,12 @@ public static void assertExpression(final SQLCaseAssertContext assertContext, ColumnWithJoinOperatorAssert.assertIs(assertContext, (ColumnWithJoinOperatorSegment) actual, expected.getColumnWithJoinOperatorSegment()); } else if (actual instanceof IntervalExpressionProjection) { assertIntervalExpression(assertContext, (IntervalExpressionProjection) actual, expected.getIntervalExpression()); + } else if (actual instanceof MultisetExpression) { + assertMultisetExpression(assertContext, (MultisetExpression) actual, expected.getMultisetExpression()); + } else if (actual instanceof RowExpression) { + assertRowExpression(assertContext, (RowExpression) actual, expected.getRowExpression()); + } else if (actual instanceof UnaryOperationExpression) { + assertUnaryOperationExpression(assertContext, (UnaryOperationExpression) actual, expected.getUnaryOperationExpression()); } else { throw new UnsupportedOperationException(String.format("Unsupported expression: %s", actual.getClass().getName())); } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoClauseAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoClauseAssert.java new file mode 100644 index 0000000000000..712f9fe1083b5 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoClauseAssert.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoWhenThenSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.expression.ExpressionAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableConditionalIntoClause; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Multi table conditional into assert. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class MultiTableConditionalIntoClauseAssert { + + /** + * Assert actual multi table conditional into segment is correct with expected multi table conditional into segment. + * + * @param assertContext assert context + * @param actual actual multi table conditional into segment + * @param expected expected multi table conditional into segment + */ + public static void assertIs(final SQLCaseAssertContext assertContext, final MultiTableConditionalIntoSegment actual, final ExpectedMultiTableConditionalIntoClause expected) { + assertThat(assertContext.getText("Conditional into when then segment size assertion error: "), actual.getWhenThenSegments().size(), is(expected.getConditionalIntoWhenThenClauses().size())); + int index = 0; + for (MultiTableConditionalIntoWhenThenSegment each : actual.getWhenThenSegments()) { + ExpressionAssert.assertExpression(assertContext, each.getWhenSegment(), expected.getConditionalIntoWhenThenClauses().get(index).getWhenClause()); + MultiTableConditionalIntoThenSegmentAssert.assertIs(assertContext, each.getThenSegment(), expected.getConditionalIntoWhenThenClauses().get(index).getThenClause()); + SQLSegmentAssert.assertIs(assertContext, actual, expected); + index++; + } + if (null == expected.getElseClause()) { + assertFalse(actual.getElseSegment().isPresent(), assertContext.getText("Actual multi table conditional into else segment should not exist.")); + } else { + assertTrue(actual.getElseSegment().isPresent(), assertContext.getText("Actual multi table conditional into else segment should exist.")); + MultiTableConditionalIntoElseSegmentAssert.assertIs(assertContext, actual.getElseSegment().get(), expected.getElseClause()); + } + } +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoElseSegmentAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoElseSegmentAssert.java new file mode 100644 index 0000000000000..af4102ec054c1 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoElseSegmentAssert.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoElseSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.dml.impl.InsertStatementAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableConditionalIntoElseClause; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * Multi table conditional into else segment assert. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class MultiTableConditionalIntoElseSegmentAssert { + + /** + * Assert actual multi table conditional into else segment is correct with expected multi table conditional into else segment. + * + * @param assertContext assert context + * @param actual actual multi table conditional into else segment + * @param expected expected multi table conditional into else segment + */ + public static void assertIs(final SQLCaseAssertContext assertContext, final MultiTableConditionalIntoElseSegment actual, final ExpectedMultiTableConditionalIntoElseClause expected) { + assertThat(assertContext.getText("Multi table conditional into else segment' insert values size assertion error: "), actual.getInsertStatements().size(), + is(expected.getInsertTestCases().size())); + int count = 0; + for (InsertStatement each : actual.getInsertStatements()) { + InsertStatementAssert.assertIs(assertContext, each, expected.getInsertTestCases().get(count)); + SQLSegmentAssert.assertIs(assertContext, actual, expected); + count++; + } + } +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoThenSegmentAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoThenSegmentAssert.java new file mode 100644 index 0000000000000..9b02954dd7ed1 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableConditionalIntoThenSegmentAssert.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoThenSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.dml.impl.InsertStatementAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableConditionalIntoThenClause; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * Multi table conditional into then segment assert. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class MultiTableConditionalIntoThenSegmentAssert { + + /** + * Assert actual multi table conditional into then segment is correct with expected multi table conditional into then segment. + * + * @param assertContext assert context + * @param actual actual multi table conditional into then segment + * @param expected expected multi table conditional into then segment + */ + public static void assertIs(final SQLCaseAssertContext assertContext, final MultiTableConditionalIntoThenSegment actual, final ExpectedMultiTableConditionalIntoThenClause expected) { + assertThat(assertContext.getText("Multi table conditional into then segment' insert values size assertion error: "), actual.getInsertStatements().size(), + is(expected.getInsertTestCases().size())); + int count = 0; + for (InsertStatement each : actual.getInsertStatements()) { + InsertStatementAssert.assertIs(assertContext, each, expected.getInsertTestCases().get(count)); + SQLSegmentAssert.assertIs(assertContext, actual, expected); + count++; + } + } +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/InsertMultiTableElementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableInsertIntoClauseAssert.java similarity index 78% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/InsertMultiTableElementAssert.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableInsertIntoClauseAssert.java index d9d211da26f51..c39c3cc486097 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/InsertMultiTableElementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/insert/MultiTableInsertIntoClauseAssert.java @@ -19,30 +19,30 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.dml.impl.InsertStatementAssert; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedInsertMultiTableElement; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableInsertIntoClause; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; /** - * Insert multi table element assert. + * Multi table insert into assert. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class InsertMultiTableElementAssert { +public final class MultiTableInsertIntoClauseAssert { /** - * Assert actual insert multi table element segment is correct with expected multi table element. + * Assert actual multi table insert into segment is correct with expected multi table insert into segment. * * @param assertContext assert context - * @param actual actual insert multi table element - * @param expected expected insert multi table element + * @param actual actual multi table insert into segment + * @param expected expected multi table insert into segment */ - public static void assertIs(final SQLCaseAssertContext assertContext, final InsertMultiTableElementSegment actual, final ExpectedInsertMultiTableElement expected) { + public static void assertIs(final SQLCaseAssertContext assertContext, final MultiTableInsertIntoSegment actual, final ExpectedMultiTableInsertIntoClause expected) { assertThat(assertContext.getText("Insert values size assertion error: "), actual.getInsertStatements().size(), is(expected.getInsertTestCases().size())); int count = 0; for (InsertStatement each : actual.getInsertStatements()) { diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/projection/ProjectionAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/projection/ProjectionAssert.java index 0e0d29775f97e..257fe519796c7 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/projection/ProjectionAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/projection/ProjectionAssert.java @@ -161,9 +161,8 @@ private static void assertExpressionProjection(final SQLCaseAssertContext assert String expectedText = SQLCaseType.LITERAL == assertContext.getCaseType() && null != expected.getLiteralText() ? expected.getLiteralText() : expected.getText(); - assertThat(assertContext.getText("Expression projection text assertion error: "), - actual.getText(), is(expectedText)); - if (expected.getExpr() != null) { + assertThat(assertContext.getText("Expression projection text assertion error: "), actual.getText(), is(expectedText)); + if (null != expected.getExpr()) { ExpressionAssert.assertExpression(assertContext, actual.getExpr(), expected.getExpr()); } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/table/TableAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/table/TableAssert.java index ca21f9b371cf5..9115fc2d68e0b 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/table/TableAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/segment/table/TableAssert.java @@ -20,13 +20,15 @@ import com.google.common.base.Strings; import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.FunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.XmlTableFunctionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.CollectionTableSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.FunctionTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.JoinTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SubqueryTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.TableSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.XmlTableSegment; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.column.ColumnAssert; @@ -35,13 +37,13 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.owner.OwnerAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.dml.impl.SelectStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.column.ExpectedColumn; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedXmlTableFunction; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedTableFunction; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedCollectionTable; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedJoinTable; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSimpleTable; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSubqueryTable; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedTable; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedXmlTable; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedFunctionTable; import java.util.Collection; import java.util.List; @@ -72,8 +74,8 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final Tabl assertIs(assertContext, (SimpleTableSegment) actual, expected.getSimpleTable()); } else if (actual instanceof SubqueryTableSegment) { assertIs(assertContext, (SubqueryTableSegment) actual, expected.getSubqueryTable()); - } else if (actual instanceof XmlTableSegment) { - assertIs(assertContext, (XmlTableSegment) actual, expected.getXmlTable()); + } else if (actual instanceof FunctionTableSegment) { + assertIs(assertContext, (FunctionTableSegment) actual, expected.getFunctionTable()); } else if (actual instanceof CollectionTableSegment) { assertIs(assertContext, (CollectionTableSegment) actual, expected.getCollectionTable()); } else { @@ -95,17 +97,15 @@ private static void assertIs(final SQLCaseAssertContext assertContext, final Col } /** - * Assert actual xml table segment is correct with expected xml table. + * Assert actual function table segment is correct with expected table. * * @param assertContext assert context - * @param actual actual xml table - * @param expected expected xml table + * @param actual actual function table + * @param expected expected function table */ - private static void assertIs(final SQLCaseAssertContext assertContext, final XmlTableSegment actual, final ExpectedXmlTable expected) { - assertXmlTableFunction(assertContext, actual.getXmlTableFunction(), expected.getXmlTableFunction()); - if (null != actual.getXmlTableFunctionAlias()) { - assertThat(assertContext.getText("Xml table function alias assertion error"), actual.getXmlTableFunctionAlias(), is(expected.getXmlTableFunctionAlias())); - } + private static void assertIs(final SQLCaseAssertContext assertContext, final FunctionTableSegment actual, final ExpectedFunctionTable expected) { + assertTableFunction(assertContext, actual.getTableFunction(), expected.getTableFunction()); + actual.getAliasName().ifPresent(optional -> assertThat(assertContext.getText("Table function alias assertion error"), optional, is(expected.getTableAlias()))); } /** @@ -200,14 +200,21 @@ private static void assertJoinType(final SQLCaseAssertContext assertContext, fin } /** - * Assert actual xml table function segment is correct with expected xml table function. + * Assert actual table function segment is correct with expected table function. * * @param assertContext assert context - * @param actual actual xml table function - * @param expected expected xml table function + * @param actual actual table function + * @param expected expected table function */ - private static void assertXmlTableFunction(final SQLCaseAssertContext assertContext, final XmlTableFunctionSegment actual, final ExpectedXmlTableFunction expected) { - assertThat(assertContext.getText("Function name assertion error"), actual.getFunctionName(), is(expected.getFunctionName())); - assertThat(assertContext.getText("Function text assert error"), actual.getText(), is(expected.getText())); + private static void assertTableFunction(final SQLCaseAssertContext assertContext, final ExpressionSegment actual, final ExpectedTableFunction expected) { + if (actual instanceof XmlTableFunctionSegment) { + XmlTableFunctionSegment actualXmlTableFunction = (XmlTableFunctionSegment) actual; + assertThat(assertContext.getText("Function name assertion error"), actualXmlTableFunction.getFunctionName(), is(expected.getFunctionName())); + assertThat(assertContext.getText("Function text assert error"), actual.getText(), is(expected.getText())); + } else if (actual instanceof FunctionSegment) { + FunctionSegment actualTableFunction = (FunctionSegment) actual; + assertThat(assertContext.getText("Function name assertion error"), actualTableFunction.getFunctionName(), is(expected.getFunctionName())); + assertThat(assertContext.getText("Function text assert error"), actual.getText(), is(expected.getText())); + } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/comment/CommentAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/comment/CommentAssert.java index 645fbd305f6d5..9bd7d9aeef36a 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/comment/CommentAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/comment/CommentAssert.java @@ -63,7 +63,7 @@ private static void assertCorrectComment(final SQLCaseAssertContext assertContex assertTrue(actual instanceof AbstractSQLStatement, assertContext.getText("Comment should exist.")); assertThat(assertContext.getText("Comments size assertion error: "), ((AbstractSQLStatement) actual).getCommentSegments().size(), is(expected.getComments().size())); Iterator actualIterator = ((AbstractSQLStatement) actual).getCommentSegments().iterator(); - for (final ExpectedComment each : expected.getComments()) { + for (ExpectedComment each : expected.getComments()) { assertThat(assertContext.getText("Comments assertion error: "), actualIterator.next().getText(), is(each.getText())); } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/DDLStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/DDLStatementAssert.java index 1d0ac4f616abb..67c52260951b0 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/DDLStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/DDLStatementAssert.java @@ -28,6 +28,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.ClusterStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CommentStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateIndexStatement; +import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateSequenceStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateTableStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateViewStatement; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.DDLStatement; @@ -68,6 +69,7 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.ClusterStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CommentStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CreateIndexStatementAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CreateSequenceStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CreateTableStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CreateViewStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl.CursorStatementAssert; @@ -102,6 +104,7 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.ClusterStatementTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CommentStatementTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CreateIndexStatementTestCase; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CreateSequenceStatementTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CreateTableStatementTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CreateViewStatementTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CursorStatementTestCase; @@ -202,6 +205,8 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final DDLS DropViewStatementAssert.assertIs(assertContext, (DropViewStatement) actual, (DropViewStatementTestCase) expected); } else if (actual instanceof AlterTablespaceStatement) { AlterTablespaceStatementAssert.assertIs(assertContext, (AlterTablespaceStatement) actual, (AlterTablespaceStatementTestCase) expected); + } else if (actual instanceof CreateSequenceStatement) { + CreateSequenceStatementAssert.assertIs(assertContext, (CreateSequenceStatement) actual, (CreateSequenceStatementTestCase) expected); } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterTableStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterTableStatementAssert.java index f95e634f45d6a..1fe30ca563c18 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterTableStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterTableStatementAssert.java @@ -24,6 +24,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ChangeColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.DropColumnDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyColumnDefinitionSegment; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.ModifyCollectionRetrievalSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.column.alter.RenameColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.alter.AddConstraintDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.RenameIndexDefinitionSegment; @@ -31,6 +32,7 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.ColumnSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.table.SimpleTableSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterTableStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl.AlterTableStatementHandler; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.SQLSegmentAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.charset.CharsetAssert; @@ -85,6 +87,7 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final Alte assertRenameIndexDefinitions(assertContext, actual, expected); assertRenameColumnDefinitions(assertContext, actual, expected); assertConvertTable(assertContext, actual, expected); + assertModifyCollectionRetrievalDefinitions(assertContext, actual, expected); } private static void assertConvertTable(final SQLCaseAssertContext assertContext, final AlterTableStatement actual, final AlterTableStatementTestCase expected) { @@ -223,4 +226,19 @@ private static void assertRenameColumnDefinitions(final SQLCaseAssertContext ass count++; } } + + private static void assertModifyCollectionRetrievalDefinitions(final SQLCaseAssertContext assertContext, final AlterTableStatement actual, final AlterTableStatementTestCase expected) { + Optional modifyCollectionRetrieval = AlterTableStatementHandler.getModifyCollectionRetrievalSegment(actual); + if (null == expected.getModifyCollectionRetrievalDefinition()) { + assertFalse(modifyCollectionRetrieval.isPresent(), assertContext.getText("Actual modify collection retrieval definitions should not exist.")); + } else { + assertTrue(modifyCollectionRetrieval.isPresent(), assertContext.getText("Actual modify collection retrieval definitions should exist.")); + if (null == expected.getModifyCollectionRetrievalDefinition().getTable()) { + assertNull(modifyCollectionRetrieval.get().getNestedTable(), "Actual nested table should not exist."); + } else { + assertNotNull(modifyCollectionRetrieval.get().getNestedTable(), "Actual nested table should exist."); + TableAssert.assertIs(assertContext, modifyCollectionRetrieval.get().getNestedTable(), expected.getModifyCollectionRetrievalDefinition().getTable()); + } + } + } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterViewStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterViewStatementAssert.java index e024603f5570f..9d9e8366d9b57 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterViewStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AlterViewStatementAssert.java @@ -19,13 +19,17 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.constraint.ConstraintDefinitionSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterViewStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl.AlterViewStatementHandler; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.definition.ConstraintDefinitionAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.table.TableAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.dml.impl.SelectStatementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.AlterViewStatementTestCase; +import java.util.Optional; + import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -48,6 +52,7 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final Alte assertView(assertContext, actual, expected); assertViewDefinition(assertContext, actual, expected); assertSelect(assertContext, actual, expected); + assertConstraintDefinition(assertContext, actual, expected); } private static void assertView(final SQLCaseAssertContext assertContext, final AlterViewStatement actual, final AlterViewStatementTestCase expected) { @@ -72,4 +77,14 @@ private static void assertSelect(final SQLCaseAssertContext assertContext, final SelectStatementAssert.assertIs(assertContext, AlterViewStatementHandler.getSelectStatement(actual).get(), expected.getSelectStatement()); } } + + private static void assertConstraintDefinition(final SQLCaseAssertContext assertContext, final AlterViewStatement actual, final AlterViewStatementTestCase expected) { + Optional constraintDefinition = AlterViewStatementHandler.getConstraintDefinition(actual); + if (null == expected.getConstraintDefinition()) { + assertFalse(constraintDefinition.isPresent(), "actual constraint definition should not exist"); + } else { + assertTrue(constraintDefinition.isPresent(), "actual constraint definition should exist"); + ConstraintDefinitionAssert.assertIs(assertContext, constraintDefinition.get(), expected.getConstraintDefinition()); + } + } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AnalyzeStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AnalyzeStatementAssert.java index 2fb1743dc717f..fa625788696ed 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AnalyzeStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/AnalyzeStatementAssert.java @@ -25,6 +25,9 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.table.TableAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.AnalyzeStatementTestCase; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + /** * Analyze statement assert. */ @@ -45,13 +48,19 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final Orac private static void assertTables(final SQLCaseAssertContext assertContext, final OracleAnalyzeStatement actual, final AnalyzeStatementTestCase expected) { if (null != expected.getTable()) { + assertNotNull(actual.getTable(), assertContext.getText("Table should exist.")); TableAssert.assertIs(assertContext, actual.getTable(), expected.getTable()); + } else { + assertNull(actual.getTable(), assertContext.getText("Table should not exist.")); } } private static void assertIndex(final SQLCaseAssertContext assertContext, final OracleAnalyzeStatement actual, final AnalyzeStatementTestCase expected) { if (null != expected.getIndex()) { + assertNotNull(actual.getIndex(), assertContext.getText("Index should exist.")); IndexAssert.assertIs(assertContext, actual.getIndex(), expected.getIndex()); + } else { + assertNull(actual.getIndex(), assertContext.getText("Index should not exist.")); } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/CreateSequenceStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/CreateSequenceStatementAssert.java new file mode 100644 index 0000000000000..ed17fd888c17c --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/ddl/impl/CreateSequenceStatementAssert.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.asserts.statement.ddl.impl; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.CreateSequenceStatement; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl.CreateSequenceStatementTestCase; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +/** + * Create sequence statement assert. + */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class CreateSequenceStatementAssert { + + /** + * Assert create sequence statement is correct with expected parser result. + * + * @param assertContext assert context + * @param actual actual create sequence statement + * @param expected expected create sequence statement test case + */ + public static void assertIs(final SQLCaseAssertContext assertContext, final CreateSequenceStatement actual, final CreateSequenceStatementTestCase expected) { + assertSequenceName(assertContext, actual, expected); + } + + private static void assertSequenceName(final SQLCaseAssertContext assertContext, final CreateSequenceStatement actual, final CreateSequenceStatementTestCase expected) { + if (null == expected.getSequenceName()) { + assertNull(actual.getSequenceName(), assertContext.getText("Actual create sequence segment should not exist.")); + } else { + assertNotNull(actual.getSequenceName(), assertContext.getText("Actual create sequence segment should exist.")); + assertThat(assertContext.getText(String.format("`%s`'s create sequence assertion error: ", actual.getClass().getSimpleName())), + actual.getSequenceName(), is(expected.getSequenceName().getName())); + } + } +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/InsertStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/InsertStatementAssert.java index 16c18f66d73b0..1df0c82214a76 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/InsertStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/InsertStatementAssert.java @@ -22,17 +22,21 @@ import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.ReturningSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.assignment.SetAssignmentSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.column.OnDuplicateKeyColumnsSegment; -import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.InsertMultiTableElementSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.OutputSegment; import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.WithSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.InsertStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.dml.InsertStatementHandler; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableConditionalIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertIntoSegment; +import org.apache.shardingsphere.sql.parser.sql.dialect.segment.oracle.insert.MultiTableInsertType; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.InsertColumnsClauseAssert; -import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.InsertMultiTableElementAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.InsertValuesClauseAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.MultiTableConditionalIntoClauseAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.MultiTableInsertIntoClauseAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.insert.OnDuplicateKeyColumnsAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.output.OutputClauseAssert; +import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.parameter.ParameterMarkerAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.returning.ReturningClauseAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.set.SetClauseAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.table.TableAssert; @@ -41,6 +45,8 @@ import java.util.Optional; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -67,7 +73,9 @@ public static void assertIs(final SQLCaseAssertContext assertContext, final Inse assertOnDuplicateKeyColumns(assertContext, actual, expected); assertWithClause(assertContext, actual, expected); assertOutputClause(assertContext, actual, expected); - assertInsertMultiTableElement(assertContext, actual, expected); + assertMultiTableInsertType(assertContext, actual, expected); + assertMultiTableInsertIntoClause(assertContext, actual, expected); + assertMultiTableConditionalIntoClause(assertContext, actual, expected); assertReturningClause(assertContext, actual, expected); } @@ -112,6 +120,7 @@ private static void assertInsertSelectClause(final SQLCaseAssertContext assertCo assertFalse(actual.getInsertSelect().isPresent(), assertContext.getText("Actual insert select segment should not exist.")); } else { assertTrue(actual.getInsertSelect().isPresent(), assertContext.getText("Actual insert select segment should exist.")); + ParameterMarkerAssert.assertCount(assertContext, actual.getInsertSelect().get().getSelect().getParameterCount(), expected.getSelectTestCase().getParameters().size()); SelectStatementAssert.assertIs(assertContext, actual.getInsertSelect().get().getSelect(), expected.getSelectTestCase()); } } @@ -146,13 +155,34 @@ private static void assertOutputClause(final SQLCaseAssertContext assertContext, } } - private static void assertInsertMultiTableElement(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) { - Optional insertTableElementSegment = InsertStatementHandler.getInsertMultiTableElementSegment(actual); - if (null == expected.getInsertTableElement()) { - assertFalse(insertTableElementSegment.isPresent(), assertContext.getText("Actual insert multi table element segment should not exist.")); + private static void assertMultiTableInsertType(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) { + Optional multiTableInsertType = InsertStatementHandler.getMultiTableInsertType(actual); + if (null == expected.getMultiTableInsertType()) { + assertFalse(multiTableInsertType.isPresent(), assertContext.getText("Actual multi table insert type should not exist.")); } else { - assertTrue(insertTableElementSegment.isPresent(), assertContext.getText("Actual insert multi table element segment should exist.")); - InsertMultiTableElementAssert.assertIs(assertContext, insertTableElementSegment.get(), expected.getInsertTableElement()); + assertTrue(multiTableInsertType.isPresent(), assertContext.getText("Actual multi table insert type should exist.")); + assertThat(assertContext.getText(String.format("`%s`'s multiTableInsertType assertion error: ", actual.getClass().getSimpleName())), multiTableInsertType.get().name(), + is(expected.getMultiTableInsertType().getMultiTableInsertType())); + } + } + + private static void assertMultiTableInsertIntoClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) { + Optional multiTableInsertIntoSegment = InsertStatementHandler.getMultiTableInsertIntoSegment(actual); + if (null == expected.getMultiTableInsertInto()) { + assertFalse(multiTableInsertIntoSegment.isPresent(), assertContext.getText("Actual multi table insert into segment should not exist.")); + } else { + assertTrue(multiTableInsertIntoSegment.isPresent(), assertContext.getText("Actual multi table insert into segment should exist.")); + MultiTableInsertIntoClauseAssert.assertIs(assertContext, multiTableInsertIntoSegment.get(), expected.getMultiTableInsertInto()); + } + } + + private static void assertMultiTableConditionalIntoClause(final SQLCaseAssertContext assertContext, final InsertStatement actual, final InsertStatementTestCase expected) { + Optional multiTableConditionalIntoSegment = InsertStatementHandler.getMultiTableConditionalIntoSegment(actual); + if (null == expected.getMultiTableConditionalInto()) { + assertFalse(multiTableConditionalIntoSegment.isPresent(), assertContext.getText("Actual multi table conditional into segment should not exist.")); + } else { + assertTrue(multiTableConditionalIntoSegment.isPresent(), assertContext.getText("Actual multi table conditional into segment should exist.")); + MultiTableConditionalIntoClauseAssert.assertIs(assertContext, multiTableConditionalIntoSegment.get(), expected.getMultiTableConditionalInto()); } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/MergeStatementAssert.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/MergeStatementAssert.java index c691f92412beb..d65dacf166650 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/MergeStatementAssert.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/asserts/statement/dml/impl/MergeStatementAssert.java @@ -20,6 +20,7 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; import org.apache.shardingsphere.sql.parser.sql.common.statement.dml.MergeStatement; +import org.apache.shardingsphere.sql.parser.sql.dialect.statement.oracle.dml.OracleInsertStatement; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.expression.ExpressionAssert; import org.apache.shardingsphere.test.it.sql.parser.internal.asserts.segment.set.SetClauseAssert; @@ -91,13 +92,9 @@ private static void assertWhereClause(final SQLCaseAssertContext assertContext, WhereClauseAssert.assertIs(assertContext, actual.getUpdate().getWhere().get(), expected.getUpdateClause().getWhereClause()); } } - if (null != expected.getDeleteClause()) { - if (null == expected.getDeleteClause().getWhereClause()) { - assertFalse(actual.getDelete().getWhere().isPresent(), assertContext.getText("Actual delete where segment should not exist.")); - } else { - assertTrue(actual.getDelete().getWhere().isPresent(), assertContext.getText("Actual delete where segment should exist.")); - WhereClauseAssert.assertIs(assertContext, actual.getDelete().getWhere().get(), expected.getDeleteClause().getWhereClause()); - } + if (null != expected.getInsertClause() && null != expected.getInsertClause().getWhereClause() && actual.getInsert() instanceof OracleInsertStatement) { + assertTrue(((OracleInsertStatement) actual.getInsert()).getWhere().isPresent(), assertContext.getText("Actual insert where segment should exist.")); + WhereClauseAssert.assertIs(assertContext, ((OracleInsertStatement) actual.getInsert()).getWhere().get(), expected.getInsertClause().getWhereClause()); } } } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/assignment/ExpectedAssignmentValue.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/assignment/ExpectedAssignmentValue.java index 0ed36a71dba50..797b8ad074f16 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/assignment/ExpectedAssignmentValue.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/assignment/ExpectedAssignmentValue.java @@ -21,6 +21,8 @@ import lombok.Setter; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.column.ExpectedColumn; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedBinaryOperationExpression; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedCaseWhenExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.complex.ExpectedCommonExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.simple.ExpectedLiteralExpression; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.simple.ExpectedParameterMarkerExpression; @@ -45,6 +47,12 @@ public final class ExpectedAssignmentValue extends AbstractExpectedSQLSegment { @XmlElement(name = "common-expression") private ExpectedCommonExpression commonExpression; + @XmlElement(name = "case-when-expression") + private ExpectedCaseWhenExpression caseWhenExpression; + + @XmlElement(name = "binary-operation-expression") + private ExpectedBinaryOperationExpression binaryOperationExpression; + @XmlElement private ExpectedColumn column; diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/definition/ExpectedModifyCollectionRetrievalDefinition.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/definition/ExpectedModifyCollectionRetrievalDefinition.java new file mode 100644 index 0000000000000..6ffbf6e3a2dc9 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/definition/ExpectedModifyCollectionRetrievalDefinition.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition; + +import lombok.Getter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSimpleTable; + +import javax.xml.bind.annotation.XmlElement; + +/** + * Expected modify collection retrieval definition. + */ +@Getter +public final class ExpectedModifyCollectionRetrievalDefinition extends AbstractExpectedSQLSegment { + + @XmlElement(name = "table") + private ExpectedSimpleTable table; +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedExpression.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedExpression.java index 08d66195e4bd3..9486759f47f7a 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedExpression.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedExpression.java @@ -110,4 +110,13 @@ public final class ExpectedExpression extends AbstractExpectedSQLSegment { @XmlElement(name = "interval-expression") private ExpectedIntervalExpression intervalExpression; + + @XmlElement(name = "multiset-expression") + private ExpectedMultisetExpression multisetExpression; + + @XmlElement(name = "row-expression") + private ExpectedRowExpression rowExpression; + + @XmlElement(name = "unary-operation-expression") + private ExpectedUnaryOperationExpression unaryOperationExpression; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedMultisetExpression.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedMultisetExpression.java new file mode 100644 index 0000000000000..917ca93ebf879 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedMultisetExpression.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr; + +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; + +import javax.xml.bind.annotation.XmlElement; + +/** + * Expected multiset expression. + */ +@Getter +@Setter +public final class ExpectedMultisetExpression extends AbstractExpectedSQLSegment implements ExpectedExpressionSegment { + + @XmlElement + private ExpectedExpression left; + + @XmlElement + private ExpectedExpression right; + + @XmlElement + private String operator; + + @XmlElement + private String keyWord; +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedRowExpression.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedRowExpression.java new file mode 100644 index 0000000000000..bc801b3eedf67 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedRowExpression.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr; + +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; + +import javax.xml.bind.annotation.XmlElement; +import java.util.ArrayList; +import java.util.List; + +@Getter +@Setter +public final class ExpectedRowExpression extends AbstractExpectedSQLSegment implements ExpectedExpressionSegment { + + @XmlElement + private final List items = new ArrayList<>(); +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedXmlTableFunction.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedTableFunction.java similarity index 89% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedXmlTableFunction.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedTableFunction.java index 34340e0e68378..e050331c87abc 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedXmlTableFunction.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedTableFunction.java @@ -24,11 +24,11 @@ import javax.xml.bind.annotation.XmlAttribute; /** - * Expected xml table function. + * Expected table function. */ @Getter @Setter -public final class ExpectedXmlTableFunction extends AbstractExpectedSQLSegment implements ExpectedExpressionSegment { +public final class ExpectedTableFunction extends AbstractExpectedSQLSegment implements ExpectedExpressionSegment { @XmlAttribute(name = "function-name") private String functionName; diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedUnaryOperationExpression.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedUnaryOperationExpression.java new file mode 100644 index 0000000000000..c2e7c066adac5 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/expr/ExpectedUnaryOperationExpression.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr; + +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; + +import javax.xml.bind.annotation.XmlElement; + +/** + * Expected binary operation expression. + */ +@Getter +@Setter +public class ExpectedUnaryOperationExpression extends AbstractExpectedSQLSegment implements ExpectedExpressionSegment { + + @XmlElement + private String operator; + + @XmlElement + private ExpectedExpression expr; +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoClause.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoClause.java new file mode 100644 index 0000000000000..fead100aaaa84 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoClause.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; + +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; + +import javax.xml.bind.annotation.XmlElement; +import java.util.LinkedList; +import java.util.List; + +/** + * Expected multi table conditional into clause. + */ +@Getter +@Setter +public final class ExpectedMultiTableConditionalIntoClause extends AbstractExpectedSQLSegment { + + @XmlElement(name = "conditional-into-when-then") + private final List conditionalIntoWhenThenClauses = new LinkedList<>(); + + @XmlElement(name = "conditional-into-else") + private ExpectedMultiTableConditionalIntoElseClause elseClause; +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoElseClause.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoElseClause.java new file mode 100644 index 0000000000000..ce7b3f4972597 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoElseClause.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; + +import lombok.Getter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.dml.InsertStatementTestCase; + +import javax.xml.bind.annotation.XmlElement; +import java.util.LinkedList; +import java.util.List; + +/** + * Expected multi table conditional into else clause. + */ +@Getter +public final class ExpectedMultiTableConditionalIntoElseClause extends AbstractExpectedSQLSegment { + + @XmlElement(name = "insert-statement") + private final List insertTestCases = new LinkedList<>(); +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoThenClause.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoThenClause.java new file mode 100644 index 0000000000000..89cabe3a45478 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoThenClause.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; + +import lombok.Getter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.dml.InsertStatementTestCase; + +import javax.xml.bind.annotation.XmlElement; +import java.util.LinkedList; +import java.util.List; + +/** + * Expected multi table conditional into then clause. + */ +@Getter +public final class ExpectedMultiTableConditionalIntoThenClause extends AbstractExpectedSQLSegment { + + @XmlElement(name = "insert-statement") + private final List insertTestCases = new LinkedList<>(); +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoWhenThenClause.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoWhenThenClause.java new file mode 100644 index 0000000000000..d9566807e4ab4 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableConditionalIntoWhenThenClause.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; + +import lombok.Getter; +import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedExpression; + +import javax.xml.bind.annotation.XmlElement; + +/** + * Expected multi table conditional into when then clause. + */ +@Getter +@Setter +public final class ExpectedMultiTableConditionalIntoWhenThenClause extends AbstractExpectedSQLSegment { + + @XmlElement(name = "when") + private ExpectedExpression whenClause; + + @XmlElement(name = "then") + private ExpectedMultiTableConditionalIntoThenClause thenClause; +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedInsertMultiTableElement.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertIntoClause.java similarity index 88% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedInsertMultiTableElement.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertIntoClause.java index 3101d5d36f452..9c379b793cfd0 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedInsertMultiTableElement.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertIntoClause.java @@ -26,11 +26,11 @@ import java.util.List; /** - * Expected insert multi table element. + * Expected multi table insert into clause. */ @Getter -public final class ExpectedInsertMultiTableElement extends AbstractExpectedSQLSegment { +public final class ExpectedMultiTableInsertIntoClause extends AbstractExpectedSQLSegment { - @XmlElement(name = "table-element") + @XmlElement(name = "insert-statement") private final List insertTestCases = new LinkedList<>(); } diff --git a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderStatisticsInfo.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertType.java similarity index 65% rename from examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderStatisticsInfo.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertType.java index da222e8f6449d..4c7877309c94d 100644 --- a/examples/example-core/example-api/src/main/java/org/apache/shardingsphere/example/core/api/entity/OrderStatisticsInfo.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/insert/ExpectedMultiTableInsertType.java @@ -15,27 +15,21 @@ * limitations under the License. */ -package org.apache.shardingsphere.example.core.api.entity; +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert; import lombok.Getter; import lombok.Setter; -import lombok.ToString; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedSQLSegment; -import java.io.Serializable; -import java.time.LocalDate; +import javax.xml.bind.annotation.XmlAttribute; +/** + * Expected multi table insert type. + */ @Getter @Setter -@ToString -public class OrderStatisticsInfo implements Serializable { - - private static final long serialVersionUID = -1770007969944794302L; - - private Long id; - - private Long userId; - - private LocalDate orderDate; +public class ExpectedMultiTableInsertType extends AbstractExpectedSQLSegment { - private int orderNum; + @XmlAttribute(name = "value") + private String multiTableInsertType; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/sequence/ExpectedSequenceNameClause.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/sequence/ExpectedSequenceNameClause.java new file mode 100644 index 0000000000000..d5b2936cb4404 --- /dev/null +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/sequence/ExpectedSequenceNameClause.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.sequence; + +import lombok.Getter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedIdentifierSQLSegment; + +/** + * Excepted sequence name clause. + */ +@Getter +public final class ExpectedSequenceNameClause extends AbstractExpectedIdentifierSQLSegment { +} diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedXmlTable.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedFunctionTable.java similarity index 79% rename from test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedXmlTable.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedFunctionTable.java index d23712e7f845e..7c3fac4c24b5f 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedXmlTable.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedFunctionTable.java @@ -20,17 +20,17 @@ import lombok.Getter; import lombok.Setter; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.AbstractExpectedDelimiterSQLSegment; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedXmlTableFunction; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.expr.ExpectedTableFunction; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; /** - * Expected xml table. + * Expected function table. */ @Getter @Setter -public final class ExpectedXmlTable extends AbstractExpectedDelimiterSQLSegment { +public final class ExpectedFunctionTable extends AbstractExpectedDelimiterSQLSegment { @XmlAttribute(name = "table-name") private String tableName; @@ -38,9 +38,6 @@ public final class ExpectedXmlTable extends AbstractExpectedDelimiterSQLSegment @XmlAttribute(name = "table-alias") private String tableAlias; - @XmlElement(name = "xml-table-function") - private ExpectedXmlTableFunction xmlTableFunction; - - @XmlAttribute(name = "xml-table-function-alias") - private String xmlTableFunctionAlias; + @XmlElement(name = "table-function") + private ExpectedTableFunction tableFunction; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedTable.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedTable.java index e102dd9bdae4f..0917f3cfdee73 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedTable.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/segment/impl/table/ExpectedTable.java @@ -39,8 +39,8 @@ public final class ExpectedTable extends AbstractExpectedDelimiterSQLSegment { @XmlElement(name = "join-table") private ExpectedJoinTable joinTable; - @XmlElement(name = "xml-table") - private ExpectedXmlTable xmlTable; + @XmlElement(name = "function-table") + private ExpectedFunctionTable functionTable; @XmlElement(name = "collection-table") private ExpectedCollectionTable collectionTable; diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterTableStatementTestCase.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterTableStatementTestCase.java index 89fba335be081..c968cff0ad1eb 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterTableStatementTestCase.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterTableStatementTestCase.java @@ -26,6 +26,7 @@ import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedConstraintDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedConvertTableDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedModifyColumnDefinition; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedModifyCollectionRetrievalDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedRenameIndexDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedRenameColumnDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSimpleTable; @@ -70,4 +71,7 @@ public final class AlterTableStatementTestCase extends SQLParserTestCase { @XmlElement(name = "drop-column") private final List dropColumns = new LinkedList<>(); + + @XmlElement(name = "modify-collection-retrieval") + private ExpectedModifyCollectionRetrievalDefinition modifyCollectionRetrievalDefinition; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterViewStatementTestCase.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterViewStatementTestCase.java index d113e03563ea7..6fc06b5eb1847 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterViewStatementTestCase.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/AlterViewStatementTestCase.java @@ -19,6 +19,7 @@ import lombok.Getter; import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.definition.ExpectedConstraintDefinition; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSimpleTable; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.SQLParserTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.dml.SelectStatementTestCase; @@ -44,4 +45,7 @@ public final class AlterViewStatementTestCase extends SQLParserTestCase { @XmlElement(name = "select") private SelectStatementTestCase selectStatement; + + @XmlElement(name = "constraint-definition") + private ExpectedConstraintDefinition constraintDefinition; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/CreateSequenceStatementTestCase.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/CreateSequenceStatementTestCase.java index d6d594c2107b5..f729b040f0b0f 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/CreateSequenceStatementTestCase.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/ddl/CreateSequenceStatementTestCase.java @@ -17,10 +17,20 @@ package org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.statement.ddl; +import lombok.Getter; +import lombok.Setter; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.SQLParserTestCase; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.sequence.ExpectedSequenceNameClause; + +import javax.xml.bind.annotation.XmlElement; /** * Create sequence statement test case. */ +@Getter +@Setter public final class CreateSequenceStatementTestCase extends SQLParserTestCase { + + @XmlElement(name = "sequence-name") + private ExpectedSequenceNameClause sequenceName; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/InsertStatementTestCase.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/InsertStatementTestCase.java index 599e69a3ae3fc..404e58ad87f66 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/InsertStatementTestCase.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/InsertStatementTestCase.java @@ -19,16 +19,19 @@ import lombok.Getter; import lombok.Setter; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.SQLParserTestCase; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedInsertColumnsClause; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedInsertMultiTableElement; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedInsertValuesClause; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableConditionalIntoClause; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableInsertIntoClause; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedMultiTableInsertType; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedOnDuplicateKeyColumns; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.insert.ExpectedReturningClause; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.output.ExpectedOutputClause; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.set.ExpectedSetClause; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.table.ExpectedSimpleTable; +import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.where.ExpectedWhereClause; import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.segment.impl.with.ExpectedWithClause; -import org.apache.shardingsphere.test.it.sql.parser.internal.cases.parser.jaxb.SQLParserTestCase; import javax.xml.bind.annotation.XmlElement; @@ -63,12 +66,21 @@ public final class InsertStatementTestCase extends SQLParserTestCase { @XmlElement(name = "output") private ExpectedOutputClause outputClause; - @XmlElement(name = "multi-table-element") - private ExpectedInsertMultiTableElement insertTableElement; + @XmlElement(name = "multi-table-insert-type") + private ExpectedMultiTableInsertType multiTableInsertType; + + @XmlElement(name = "multi-table-insert-into") + private ExpectedMultiTableInsertIntoClause multiTableInsertInto; + + @XmlElement(name = "multi-table-conditional-into") + private ExpectedMultiTableConditionalIntoClause multiTableConditionalInto; @XmlElement(name = "select-subquery") private SelectStatementTestCase selectSubquery; @XmlElement(name = "returning") private ExpectedReturningClause returningClause; + + @XmlElement(name = "where") + private ExpectedWhereClause whereClause; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/MergeStatementTestCase.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/MergeStatementTestCase.java index 066987f8a82da..61cb3cf8ad9c6 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/MergeStatementTestCase.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/parser/jaxb/statement/dml/MergeStatementTestCase.java @@ -44,6 +44,6 @@ public final class MergeStatementTestCase extends SQLParserTestCase { @XmlElement(name = "update") private UpdateStatementTestCase updateClause; - @XmlElement(name = "delete") - private DeleteStatementTestCase deleteClause; + @XmlElement(name = "insert") + private InsertStatementTestCase insertClause; } diff --git a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/sql/SQLCases.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/sql/SQLCases.java index d481778339f8c..5108585965da4 100644 --- a/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/sql/SQLCases.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/internal/cases/sql/SQLCases.java @@ -80,7 +80,7 @@ private Collection getAllDatabaseTypes() { } private boolean containsSQLCaseType(final SQLCase sqlCase, final SQLCaseType caseType) { - return null == sqlCase.getCaseTypes() || Splitter.on(',').trimResults().splitToList(sqlCase.getCaseTypes()).contains(caseType.name()); + return null == sqlCase.getCaseTypes() || Splitter.on(',').trimResults().splitToList(sqlCase.getCaseTypes().toUpperCase()).contains(caseType.name()); } /** diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalCaseSettings.java b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/loader/ExternalCaseSettings.java similarity index 84% rename from test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalCaseSettings.java rename to test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/loader/ExternalCaseSettings.java index 3429d693b3a36..fd09116edbcbd 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalCaseSettings.java +++ b/test/it/parser/src/main/java/org/apache/shardingsphere/test/it/sql/parser/loader/ExternalCaseSettings.java @@ -15,7 +15,9 @@ * limitations under the License. */ -package org.apache.shardingsphere.test.loader; +package org.apache.shardingsphere.test.it.sql.parser.loader; + +import org.apache.shardingsphere.test.loader.TestParameterLoadTemplate; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; @@ -57,9 +59,9 @@ String reportType() default "CSV"; /** - * Get case loader. + * Get test parameter load template. * - * @return case loader + * @return test parameter load template */ - Class> caseLoader(); + Class template(); } diff --git a/test/it/parser/src/main/resources/case/dcl/alter-user.xml b/test/it/parser/src/main/resources/case/dcl/alter-user.xml index 876d202dc4e8e..f9e12310a4539 100644 --- a/test/it/parser/src/main/resources/case/dcl/alter-user.xml +++ b/test/it/parser/src/main/resources/case/dcl/alter-user.xml @@ -44,6 +44,7 @@ + diff --git a/test/it/parser/src/main/resources/case/dcl/drop-user.xml b/test/it/parser/src/main/resources/case/dcl/drop-user.xml index 2386ca48345f2..d6b32b1e1dda6 100644 --- a/test/it/parser/src/main/resources/case/dcl/drop-user.xml +++ b/test/it/parser/src/main/resources/case/dcl/drop-user.xml @@ -21,6 +21,7 @@ + diff --git a/test/it/parser/src/main/resources/case/dcl/grant.xml b/test/it/parser/src/main/resources/case/dcl/grant.xml index 1c1f6fa959dd9..aeacaf585867b 100644 --- a/test/it/parser/src/main/resources/case/dcl/grant.xml +++ b/test/it/parser/src/main/resources/case/dcl/grant.xml @@ -104,6 +104,7 @@ +

- - - - @@ -217,9 +220,6 @@ chapter = true
- Xiaoman Wang - Haisheng Sun
@@ -273,4 +274,6 @@ + + diff --git a/test/it/parser/src/main/resources/case/dcl/revoke.xml b/test/it/parser/src/main/resources/case/dcl/revoke.xml index 2066c45975612..a5969a0dc533c 100644 --- a/test/it/parser/src/main/resources/case/dcl/revoke.xml +++ b/test/it/parser/src/main/resources/case/dcl/revoke.xml @@ -228,4 +228,6 @@ + + diff --git a/test/it/parser/src/main/resources/case/ddl/alter-operator.xml b/test/it/parser/src/main/resources/case/ddl/alter-operator.xml index c472f58711fd8..9f5c5ed9ec680 100644 --- a/test/it/parser/src/main/resources/case/ddl/alter-operator.xml +++ b/test/it/parser/src/main/resources/case/ddl/alter-operator.xml @@ -22,4 +22,5 @@ + diff --git a/test/it/parser/src/main/resources/case/ddl/alter-table.xml b/test/it/parser/src/main/resources/case/ddl/alter-table.xml index b333a2721f858..adb5985c32e9c 100644 --- a/test/it/parser/src/main/resources/case/ddl/alter-table.xml +++ b/test/it/parser/src/main/resources/case/ddl/alter-table.xml @@ -17,6 +17,222 @@ --> + +
+ + + +
+ + + + + + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + + + +
+ +
+ + + + + + + + + + + + + + +
+ +
+
+ + + + + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + +
+ + + + + + + + + + + + + + + + +
+ + + +
+ + + +
+ + + +
+ +
+
+ + + + + + +
+ +
+
+ @@ -671,6 +887,10 @@
+ +
+ +
@@ -1267,9 +1487,219 @@
- + +
+ +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ + + + +
+
+ +
+ + + + + + +
+ + + +
+ + + + + + + + +
+ + + +
+ + + +
+ + + +
+ + + + + + + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + + + + + + +
+ + + + + + +
+ + + + + + + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ +
+ + + + +
+ +
+ +
+ +
+ + + + + + +
+ + + +
+ + + +
+ + + +
+ + + + + + +
+ + + +
+ + + + + + + + +
+ + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/alter-type.xml b/test/it/parser/src/main/resources/case/ddl/alter-type.xml index 747f1d86f4797..ad6edc7f61293 100644 --- a/test/it/parser/src/main/resources/case/ddl/alter-type.xml +++ b/test/it/parser/src/main/resources/case/ddl/alter-type.xml @@ -20,6 +20,7 @@ + diff --git a/test/it/parser/src/main/resources/case/ddl/alter-view.xml b/test/it/parser/src/main/resources/case/ddl/alter-view.xml index edfc0706dd5bb..205e23ff859a8 100644 --- a/test/it/parser/src/main/resources/case/ddl/alter-view.xml +++ b/test/it/parser/src/main/resources/case/ddl/alter-view.xml @@ -65,4 +65,24 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/analyze.xml b/test/it/parser/src/main/resources/case/ddl/analyze.xml index 4638eb6b88bd1..985b7877003ef 100644 --- a/test/it/parser/src/main/resources/case/ddl/analyze.xml +++ b/test/it/parser/src/main/resources/case/ddl/analyze.xml @@ -58,4 +58,16 @@
+ + +
+ + + +
+ + + +
+ diff --git a/test/it/parser/src/main/resources/case/ddl/audit.xml b/test/it/parser/src/main/resources/case/ddl/audit.xml index b3c9b380f9324..d1784296d6abb 100644 --- a/test/it/parser/src/main/resources/case/ddl/audit.xml +++ b/test/it/parser/src/main/resources/case/ddl/audit.xml @@ -21,5 +21,143 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/create-sequence.xml b/test/it/parser/src/main/resources/case/ddl/create-sequence.xml index 2ba44d0cc8a4f..4a955611092dd 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-sequence.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-sequence.xml @@ -17,9 +17,22 @@ --> - - - - - + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/create-table.xml b/test/it/parser/src/main/resources/case/ddl/create-table.xml index 9d1a21b2fa1f4..521c7c3cdef3a 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-table.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-table.xml @@ -2021,4 +2021,19 @@ + + +
+ + + + + + + + + + +
+ diff --git a/test/it/parser/src/main/resources/case/ddl/create-view.xml b/test/it/parser/src/main/resources/case/ddl/create-view.xml index 5a329c5245555..0d6972b547baf 100644 --- a/test/it/parser/src/main/resources/case/ddl/create-view.xml +++ b/test/it/parser/src/main/resources/case/ddl/create-view.xml @@ -167,4 +167,79 @@ + + + + + diff --git a/test/it/parser/src/main/resources/case/ddl/execute.xml b/test/it/parser/src/main/resources/case/ddl/execute.xml index 3d2a8e706eb8e..4251a129fbf29 100644 --- a/test/it/parser/src/main/resources/case/ddl/execute.xml +++ b/test/it/parser/src/main/resources/case/ddl/execute.xml @@ -18,4 +18,5 @@ + diff --git a/test/it/parser/src/main/resources/case/dml/delete.xml b/test/it/parser/src/main/resources/case/dml/delete.xml index a4e3e5f85207e..a717a5067bfd4 100644 --- a/test/it/parser/src/main/resources/case/dml/delete.xml +++ b/test/it/parser/src/main/resources/case/dml/delete.xml @@ -129,7 +129,17 @@ false - + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/insert.xml b/test/it/parser/src/main/resources/case/dml/insert.xml index a5cae8cc3f00a..34c2b6b610784 100644 --- a/test/it/parser/src/main/resources/case/dml/insert.xml +++ b/test/it/parser/src/main/resources/case/dml/insert.xml @@ -1200,12 +1200,36 @@ - + - - - + + + + + + + + + VALUES(salary) + + + + + + + + + + VALUES(salary) + + + + + * + + + + @@ -1338,8 +1362,9 @@ - - + + +
@@ -1362,8 +1387,8 @@ - - + +
@@ -1386,8 +1411,8 @@ - - + +
@@ -1410,8 +1435,8 @@ - - + + +
- @@ -1532,7 +1557,7 @@ - @@ -1568,7 +1593,7 @@ - @@ -1602,7 +1627,7 @@ - @@ -2021,8 +2046,9 @@ - - + + +
@@ -2046,8 +2072,8 @@ - - + +
@@ -2067,12 +2093,20 @@ - + + + + + + + + + + - - + + - - - -
- - - -
- - - -
- - - + + + + + + + + + + <= + + + + + + + +
+ + + + + + + + + + + + + > + + + + + + AND + + + + + + <= + + + + + + + + + +
+ + + + + + + + + + + > + + + + + + + +
+ + + + + - - - -
- - - -
- - - -
- - - + + + + + + + + + + <= + + + + + + + +
+ + + + + + + + + + + + + > + + + + + + AND + + + + + + <= + + + + + + + + + +
+ + + + + + +
+ + + +
+ + + + + + + + + + + + TO_NCHAR('John Smith') + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/merge.xml b/test/it/parser/src/main/resources/case/dml/merge.xml index cc6acd523030e..005ea1d2703f8 100644 --- a/test/it/parser/src/main/resources/case/dml/merge.xml +++ b/test/it/parser/src/main/resources/case/dml/merge.xml @@ -197,12 +197,32 @@ - - - - + + + + - + + + + + + + + + + + + + + + + + + * + + + + @@ -225,4 +245,194 @@ + + + + + + + + + + + + + + + + + + + = + + + + + + + + + + + + + + + + + <= + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + = + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + * + + + + + + + + + + + + + + + + + + > + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/replace.xml b/test/it/parser/src/main/resources/case/dml/replace.xml index e3dccb901e72a..415a09c21c3a9 100644 --- a/test/it/parser/src/main/resources/case/dml/replace.xml +++ b/test/it/parser/src/main/resources/case/dml/replace.xml @@ -867,7 +867,7 @@ - @@ -896,7 +896,7 @@
- @@ -931,7 +931,7 @@ - @@ -967,7 +967,7 @@ - diff --git a/test/it/parser/src/main/resources/case/dml/select-expression.xml b/test/it/parser/src/main/resources/case/dml/select-expression.xml index 44ea9fe7ba837..9a9166a21234f 100644 --- a/test/it/parser/src/main/resources/case/dml/select-expression.xml +++ b/test/it/parser/src/main/resources/case/dml/select-expression.xml @@ -1007,7 +1007,7 @@ - > ALL + > @@ -1806,7 +1809,14 @@ - + + ~ + + + + + + @@ -1883,11 +1893,26 @@ - + + + + + + + + + = - + + + + + + + + @@ -1980,7 +2005,8 @@ - + + @@ -2044,7 +2070,17 @@ - + + + + + + + + + + + @@ -2059,7 +2095,18 @@ - + + + + + + + + + + + + @@ -2687,7 +2734,21 @@ - + + cust_address_ntab MULTISET INTERSECT DISTINCT cust_address2_ntab + + + + + + + + + INTERSECT + DISTINCT + + + @@ -2709,7 +2770,25 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/select-group-by.xml b/test/it/parser/src/main/resources/case/dml/select-group-by.xml index e3cb0819f1f46..688a1ef93e6ba 100644 --- a/test/it/parser/src/main/resources/case/dml/select-group-by.xml +++ b/test/it/parser/src/main/resources/case/dml/select-group-by.xml @@ -583,7 +583,14 @@ false - + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/select-join.xml b/test/it/parser/src/main/resources/case/dml/select-join.xml index 11a61f36b0a2c..b8dd63f2a9687 100644 --- a/test/it/parser/src/main/resources/case/dml/select-join.xml +++ b/test/it/parser/src/main/resources/case/dml/select-join.xml @@ -591,4 +591,56 @@ + + diff --git a/test/it/parser/src/main/resources/case/dml/select-special-function.xml b/test/it/parser/src/main/resources/case/dml/select-special-function.xml index 89d44df472dc3..44ef9346357b7 100644 --- a/test/it/parser/src/main/resources/case/dml/select-special-function.xml +++ b/test/it/parser/src/main/resources/case/dml/select-special-function.xml @@ -41,7 +41,11 @@ - + + + + + @@ -427,7 +431,7 @@ - + EXTRACT(YEAR FROM TIMESTAMP '2001-02-16 20:38:40') @@ -567,4 +571,24 @@ + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/select.xml b/test/it/parser/src/main/resources/case/dml/select.xml index 70d8bc7b30a2c..38059feba3ec8 100644 --- a/test/it/parser/src/main/resources/case/dml/select.xml +++ b/test/it/parser/src/main/resources/case/dml/select.xml @@ -899,7 +899,6 @@ - AND @@ -951,7 +950,6 @@ - AND @@ -3750,7 +3748,17 @@ - + + + + + + + + + + + @@ -4707,12 +4715,22 @@ @@ -5158,7 +5176,22 @@ - + + CUME_DIST(15500, .05) WITHIN GROUP (ORDER BY salary, commission_pct) + + + + + + + + + CUME_DIST(15500, .05) WITHIN GROUP (ORDER BY salary, commission_pct) + + + @@ -5521,7 +5567,22 @@ - + + XMLCOLATTVAL(e.employee_id AS EVALNAME 'ID', e.last_name AS name, e.salary) + + + + + + + + + + + + + + + + + + XMLCOLATTVAL(e.employee_id AS EVALNAME 'ID', e.last_name AS name, e.salary) + + + @@ -5629,7 +5712,7 @@ + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/dml/update.xml b/test/it/parser/src/main/resources/case/dml/update.xml index 88204ebc909f2..684251ad2d582 100644 --- a/test/it/parser/src/main/resources/case/dml/update.xml +++ b/test/it/parser/src/main/resources/case/dml/update.xml @@ -469,7 +469,16 @@ - + + + + + - + + + + + @@ -539,7 +548,6 @@ - @@ -620,19 +628,134 @@ - + + + + + + + = + + + + + + + + + + + + = + + + + + + + + + + + + = + + + + + + + + + + + + + + + + + + + - + + + + + + + = + + + + + + + + + + + + = + + + + + + + + + + + + = + + + + + + + + + + + + + + + + + + + - + + + + + + + = + + + + + + + + + + + @@ -1034,8 +1157,18 @@ - - + + + VALUE(q) + + + + + + VALUE(q) + + + @@ -1201,8 +1334,19 @@
- - + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/case/rdl/alter.xml b/test/it/parser/src/main/resources/case/rdl/alter.xml index 72c2589464827..49f2cc02b2ee1 100644 --- a/test/it/parser/src/main/resources/case/rdl/alter.xml +++ b/test/it/parser/src/main/resources/case/rdl/alter.xml @@ -197,7 +197,7 @@ - + @@ -209,7 +209,9 @@ - + + + @@ -229,7 +231,9 @@ - + + + @@ -249,7 +253,9 @@ - + + + @@ -266,7 +272,9 @@ - + + + diff --git a/test/it/parser/src/main/resources/case/rdl/create.xml b/test/it/parser/src/main/resources/case/rdl/create.xml index 377cb397e0ab4..3a7c16391406e 100644 --- a/test/it/parser/src/main/resources/case/rdl/create.xml +++ b/test/it/parser/src/main/resources/case/rdl/create.xml @@ -381,7 +381,9 @@ - + + + @@ -398,7 +400,9 @@ - + + + @@ -415,7 +419,9 @@ - + + + @@ -435,7 +441,9 @@ - + + + @@ -455,7 +463,9 @@ - + + + @@ -670,7 +680,9 @@ - + + + diff --git a/test/it/parser/src/main/resources/sql/supported/dcl/alter-user.xml b/test/it/parser/src/main/resources/sql/supported/dcl/alter-user.xml index 049f7e87821e9..86e576f4cf405 100644 --- a/test/it/parser/src/main/resources/sql/supported/dcl/alter-user.xml +++ b/test/it/parser/src/main/resources/sql/supported/dcl/alter-user.xml @@ -44,6 +44,7 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/dcl/drop-user.xml b/test/it/parser/src/main/resources/sql/supported/dcl/drop-user.xml index f19e47d39a93a..297a520c0d044 100644 --- a/test/it/parser/src/main/resources/sql/supported/dcl/drop-user.xml +++ b/test/it/parser/src/main/resources/sql/supported/dcl/drop-user.xml @@ -20,7 +20,8 @@ - + + diff --git a/test/it/parser/src/main/resources/sql/supported/dcl/grant-user.xml b/test/it/parser/src/main/resources/sql/supported/dcl/grant-user.xml index cf7df56d6aff3..c95279f47ff27 100644 --- a/test/it/parser/src/main/resources/sql/supported/dcl/grant-user.xml +++ b/test/it/parser/src/main/resources/sql/supported/dcl/grant-user.xml @@ -46,6 +46,7 @@ + @@ -149,4 +150,6 @@ + + diff --git a/test/it/parser/src/main/resources/sql/supported/dcl/revoke-user.xml b/test/it/parser/src/main/resources/sql/supported/dcl/revoke-user.xml index 8ccf8f496aa16..4db968095c47a 100644 --- a/test/it/parser/src/main/resources/sql/supported/dcl/revoke-user.xml +++ b/test/it/parser/src/main/resources/sql/supported/dcl/revoke-user.xml @@ -125,4 +125,6 @@ + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/alter-operator.xml b/test/it/parser/src/main/resources/sql/supported/ddl/alter-operator.xml index 6d133915790aa..ccd6791d4c78f 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/alter-operator.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/alter-operator.xml @@ -22,4 +22,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/alter-table.xml b/test/it/parser/src/main/resources/sql/supported/ddl/alter-table.xml index 2a96944dafb7e..12195abe562d0 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/alter-table.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/alter-table.xml @@ -17,6 +17,34 @@ --> + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -27,6 +55,7 @@ + @@ -158,5 +187,44 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/alter-type.xml b/test/it/parser/src/main/resources/sql/supported/ddl/alter-type.xml index 9281778e4415d..7b40f967020ea 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/alter-type.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/alter-type.xml @@ -20,6 +20,7 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/alter-view.xml b/test/it/parser/src/main/resources/sql/supported/ddl/alter-view.xml index a6cfb5c5a41f2..1990b3a6ddcf2 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/alter-view.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/alter-view.xml @@ -27,4 +27,7 @@ + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/analyze.xml b/test/it/parser/src/main/resources/sql/supported/ddl/analyze.xml index d57d4c281efac..405cecb392fa5 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/analyze.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/analyze.xml @@ -28,4 +28,7 @@ + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/audit.xml b/test/it/parser/src/main/resources/sql/supported/ddl/audit.xml index bf2f00535f79c..ce24256d545ff 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/audit.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/audit.xml @@ -21,5 +21,143 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-sequence.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-sequence.xml index f6291fe183c4b..bf8d45fa71820 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-sequence.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-sequence.xml @@ -33,4 +33,5 @@ INCREMENT BY 1 NOCACHE NOCYCLE;" db-types="Oracle" /> + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-table.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-table.xml index bfc7981844485..d9bfa0a1a656b 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-table.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-table.xml @@ -148,4 +148,6 @@ + + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml b/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml index 2a366f0410fff..eec620e4abf10 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/create-view.xml @@ -24,4 +24,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/ddl/execute.xml b/test/it/parser/src/main/resources/sql/supported/ddl/execute.xml index 5682d3c55a38f..209cdbb3199d0 100644 --- a/test/it/parser/src/main/resources/sql/supported/ddl/execute.xml +++ b/test/it/parser/src/main/resources/sql/supported/ddl/execute.xml @@ -18,4 +18,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/insert.xml b/test/it/parser/src/main/resources/sql/supported/dml/insert.xml index c5cad9f56671b..d5ceb3b464c2e 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/insert.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/insert.xml @@ -17,7 +17,7 @@ --> - + @@ -84,8 +84,8 @@ - - + + @@ -94,4 +94,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/merge.xml b/test/it/parser/src/main/resources/sql/supported/dml/merge.xml index 39669775ce0ec..de66626b0211e 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/merge.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/merge.xml @@ -21,4 +21,6 @@ + + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/select-expression.xml b/test/it/parser/src/main/resources/sql/supported/dml/select-expression.xml index e5e31570faa27..96e30ebb469f6 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/select-expression.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/select-expression.xml @@ -70,7 +70,7 @@ - + @@ -117,4 +117,23 @@ + + + + + + + + + + + + + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/select-join.xml b/test/it/parser/src/main/resources/sql/supported/dml/select-join.xml index 718c0a480ec6f..fd2dcd40b418e 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/select-join.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/select-join.xml @@ -31,4 +31,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/select-special-function.xml b/test/it/parser/src/main/resources/sql/supported/dml/select-special-function.xml index 5f8e8b70518a1..33706e16d67fa 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/select-special-function.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/select-special-function.xml @@ -43,4 +43,5 @@ + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/select.xml b/test/it/parser/src/main/resources/sql/supported/dml/select.xml index 512d6e7e07db8..47d15ee9abf1b 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/select.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/select.xml @@ -206,4 +206,11 @@ + + + + + + + diff --git a/test/it/parser/src/main/resources/sql/supported/dml/update.xml b/test/it/parser/src/main/resources/sql/supported/dml/update.xml index f47e805fc80d0..6b3fcc6b2ef93 100644 --- a/test/it/parser/src/main/resources/sql/supported/dml/update.xml +++ b/test/it/parser/src/main/resources/sql/supported/dml/update.xml @@ -30,7 +30,7 @@ + update_user=case WHEN (id=?) THEN ? WHEN (id=?) THEN ? WHEN (id=?) THEN ? end, update_time=case WHEN (id=?) THEN ? end where tenant_id = ?" db-types="MySQL,Oracle" /> diff --git a/test/it/parser/src/main/resources/sql/supported/rdl/alter.xml b/test/it/parser/src/main/resources/sql/supported/rdl/alter.xml index c8dc5cf0b81f5..caf39b603244f 100644 --- a/test/it/parser/src/main/resources/sql/supported/rdl/alter.xml +++ b/test/it/parser/src/main/resources/sql/supported/rdl/alter.xml @@ -26,19 +26,19 @@ - - - - + + + + - - - + + + diff --git a/test/it/parser/src/main/resources/sql/supported/rdl/create.xml b/test/it/parser/src/main/resources/sql/supported/rdl/create.xml index 7ba52f0ccefc4..a81cbdecad89d 100644 --- a/test/it/parser/src/main/resources/sql/supported/rdl/create.xml +++ b/test/it/parser/src/main/resources/sql/supported/rdl/create.xml @@ -39,10 +39,10 @@ - - - - + + + + @@ -55,23 +55,23 @@ - - - + + + - - - + + + - - + + - + diff --git a/test/it/parser/src/main/resources/sql/unsupported/unsupported.xml b/test/it/parser/src/main/resources/sql/unsupported/unsupported.xml index 0da27863a0180..a46faf20ed0ce 100644 --- a/test/it/parser/src/main/resources/sql/unsupported/unsupported.xml +++ b/test/it/parser/src/main/resources/sql/unsupported/unsupported.xml @@ -106,11 +106,6 @@ - - - - - @@ -129,7 +124,6 @@ - @@ -142,32 +136,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -187,193 +155,74 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -395,29 +244,16 @@ - - - - - - - - - - - - - @@ -430,59 +266,29 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/common/DefaultPipelineDataSourceManagerTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/common/DefaultPipelineDataSourceManagerTest.java index 161f9c3f9da72..8cabdb4d804e0 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/common/DefaultPipelineDataSourceManagerTest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/common/DefaultPipelineDataSourceManagerTest.java @@ -54,25 +54,23 @@ void setUp() { @Test void assertGetDataSource() { - PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); - PipelineDataSourceConfiguration source = jobConfig.getSources().values().iterator().next(); - DataSource actual = dataSourceManager.getDataSource(PipelineDataSourceConfigurationFactory.newInstance(source.getType(), source.getParameter())); - assertThat(actual, instanceOf(PipelineDataSourceWrapper.class)); + try (PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager()) { + PipelineDataSourceConfiguration source = jobConfig.getSources().values().iterator().next(); + DataSource actual = dataSourceManager.getDataSource(PipelineDataSourceConfigurationFactory.newInstance(source.getType(), source.getParameter())); + assertThat(actual, instanceOf(PipelineDataSourceWrapper.class)); + } } @Test void assertClose() throws ReflectiveOperationException { PipelineDataSourceConfiguration source = jobConfig.getSources().values().iterator().next(); - PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); - try { + try (PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager()) { dataSourceManager.getDataSource(PipelineDataSourceConfigurationFactory.newInstance(source.getType(), source.getParameter())); dataSourceManager.getDataSource(PipelineDataSourceConfigurationFactory.newInstance(jobConfig.getTarget().getType(), jobConfig.getTarget().getParameter())); Map cachedDataSources = (Map) Plugins.getMemberAccessor().get(DefaultPipelineDataSourceManager.class.getDeclaredField("cachedDataSources"), dataSourceManager); assertThat(cachedDataSources.size(), is(2)); dataSourceManager.close(); assertTrue(cachedDataSources.isEmpty()); - } finally { - dataSourceManager.close(); } } } diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithmTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithmTest.java deleted file mode 100644 index cbf3b5bb04a29..0000000000000 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/algorithm/DataMatchDataConsistencyCalculateAlgorithmTest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.test.it.data.pipeline.core.consistencycheck.algorithm; - -import com.zaxxer.hikari.HikariDataSource; -import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; -import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataMatchDataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; -import org.apache.shardingsphere.test.util.PropertiesBuilder; -import org.apache.shardingsphere.test.util.PropertiesBuilder.Property; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.mockito.internal.configuration.plugins.Plugins; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Collections; -import java.util.Optional; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; - -class DataMatchDataConsistencyCalculateAlgorithmTest { - - private static PipelineDataSourceWrapper source; - - private static PipelineDataSourceWrapper target; - - @BeforeAll - static void setUp() throws Exception { - source = new PipelineDataSourceWrapper(createHikariDataSource("source_ds"), TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); - createTableAndInitData(source, "t_order_copy"); - target = new PipelineDataSourceWrapper(createHikariDataSource("target_ds"), TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); - createTableAndInitData(target, "t_order"); - } - - @AfterAll - static void tearDown() throws Exception { - source.close(); - target.close(); - } - - private static HikariDataSource createHikariDataSource(final String databaseName) { - HikariDataSource result = new HikariDataSource(); - result.setJdbcUrl(String.format("jdbc:h2:mem:%s;DATABASE_TO_UPPER=false;MODE=MySQL", databaseName)); - result.setUsername("root"); - result.setPassword("root"); - result.setMaximumPoolSize(10); - result.setMinimumIdle(2); - result.setConnectionTimeout(15 * 1000); - result.setIdleTimeout(40 * 1000); - return result; - } - - private static void createTableAndInitData(final PipelineDataSourceWrapper dataSource, final String tableName) throws SQLException { - try (Connection connection = dataSource.getConnection()) { - String sql = String.format("CREATE TABLE %s (order_id INT NOT NULL, user_id INT NOT NULL, status VARCHAR(45) NULL, PRIMARY KEY (order_id))", tableName); - connection.createStatement().execute(sql); - PreparedStatement preparedStatement = connection.prepareStatement(String.format("INSERT INTO %s (order_id, user_id, status) VALUES (?, ?, ?)", tableName)); - for (int i = 0; i < 10; i++) { - preparedStatement.setInt(1, i + 1); - preparedStatement.setInt(2, i + 1); - preparedStatement.setString(3, "test"); - preparedStatement.execute(); - } - } - } - - @Test - void assertCalculateFromBegin() throws ReflectiveOperationException { - DataMatchDataConsistencyCalculateAlgorithm calculateAlgorithm = new DataMatchDataConsistencyCalculateAlgorithm(); - Plugins.getMemberAccessor().set(DataMatchDataConsistencyCalculateAlgorithm.class.getDeclaredField("chunkSize"), calculateAlgorithm, 5); - DataConsistencyCalculateParameter sourceParam = generateParameter(source, "t_order_copy", 0); - Optional sourceCalculateResult = calculateAlgorithm.calculateChunk(sourceParam); - DataConsistencyCalculateParameter targetParam = generateParameter(target, "t_order", 0); - Optional targetCalculateResult = calculateAlgorithm.calculateChunk(targetParam); - assertTrue(sourceCalculateResult.isPresent()); - assertTrue(targetCalculateResult.isPresent()); - assertTrue(sourceCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertTrue(targetCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertThat(sourceCalculateResult.get().getMaxUniqueKeyValue().get(), is(targetCalculateResult.get().getMaxUniqueKeyValue().get())); - assertThat(targetCalculateResult.get().getMaxUniqueKeyValue().get(), is(5L)); - assertThat(sourceCalculateResult.get(), is(targetCalculateResult.get())); - } - - @Test - void assertCalculateFromMiddle() throws ReflectiveOperationException { - DataMatchDataConsistencyCalculateAlgorithm calculateAlgorithm = new DataMatchDataConsistencyCalculateAlgorithm(); - Plugins.getMemberAccessor().set(DataMatchDataConsistencyCalculateAlgorithm.class.getDeclaredField("chunkSize"), calculateAlgorithm, 5); - DataConsistencyCalculateParameter sourceParam = generateParameter(source, "t_order_copy", 5); - Optional sourceCalculateResult = calculateAlgorithm.calculateChunk(sourceParam); - DataConsistencyCalculateParameter targetParam = generateParameter(target, "t_order", 5); - Optional targetCalculateResult = calculateAlgorithm.calculateChunk(targetParam); - assertTrue(sourceCalculateResult.isPresent()); - assertTrue(targetCalculateResult.isPresent()); - assertTrue(sourceCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertTrue(targetCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertThat(sourceCalculateResult.get().getMaxUniqueKeyValue().get(), is(targetCalculateResult.get().getMaxUniqueKeyValue().get())); - assertThat(targetCalculateResult.get().getMaxUniqueKeyValue().get(), is(10L)); - assertThat(sourceCalculateResult.get(), is(targetCalculateResult.get())); - } - - @Test - void assertInitWithWrongProps() { - DataMatchDataConsistencyCalculateAlgorithm calculateAlgorithm = new DataMatchDataConsistencyCalculateAlgorithm(); - calculateAlgorithm.init(PropertiesBuilder.build(new Property("chunk-size", "wrong"))); - DataConsistencyCalculateParameter sourceParam = generateParameter(source, "t_order_copy", 0); - Optional sourceCalculateResult = calculateAlgorithm.calculateChunk(sourceParam); - DataConsistencyCalculateParameter targetParam = generateParameter(target, "t_order", 0); - Optional targetCalculateResult = calculateAlgorithm.calculateChunk(targetParam); - assertTrue(sourceCalculateResult.isPresent()); - assertTrue(targetCalculateResult.isPresent()); - assertTrue(sourceCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertTrue(targetCalculateResult.get().getMaxUniqueKeyValue().isPresent()); - assertThat(sourceCalculateResult.get().getMaxUniqueKeyValue().get(), is(targetCalculateResult.get().getMaxUniqueKeyValue().get())); - assertThat(targetCalculateResult.get().getMaxUniqueKeyValue().get(), is(10L)); - assertThat(sourceCalculateResult.get(), is(targetCalculateResult.get())); - } - - private DataConsistencyCalculateParameter generateParameter(final PipelineDataSourceWrapper dataSource, final String logicTableName, final Object dataCheckPosition) { - DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, "H2"); - PipelineColumnMetaData uniqueKey = new PipelineColumnMetaData(1, "order_id", Types.INTEGER, "integer", false, true, true); - return new DataConsistencyCalculateParameter(dataSource, null, logicTableName, Collections.emptyList(), databaseType, uniqueKey, dataCheckPosition); - } -} diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculatorTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculatorTest.java new file mode 100644 index 0000000000000..9425f95ccba18 --- /dev/null +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/consistencycheck/table/calculator/RecordSingleTableInventoryCalculatorTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.data.pipeline.core.consistencycheck.table.calculator; + +import com.zaxxer.hikari.HikariDataSource; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.shardingsphere.data.pipeline.api.metadata.SchemaTableName; +import org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.RecordSingleTableInventoryCalculator; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.calculator.SingleTableInventoryCalculateParameter; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Types; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class RecordSingleTableInventoryCalculatorTest { + + private static PipelineDataSourceWrapper dataSource; + + @BeforeAll + static void setUp() throws Exception { + dataSource = new PipelineDataSourceWrapper(createHikariDataSource("calc_" + RandomStringUtils.randomAlphanumeric(9)), TypedSPILoader.getService(DatabaseType.class, "H2")); + createTableAndInitData(dataSource); + } + + @AfterAll + static void tearDown() throws Exception { + dataSource.close(); + } + + private static HikariDataSource createHikariDataSource(final String databaseName) { + HikariDataSource result = new HikariDataSource(); + result.setJdbcUrl(String.format("jdbc:h2:mem:%s;DATABASE_TO_UPPER=false;MODE=MySQL", databaseName)); + result.setUsername("root"); + result.setPassword("root"); + result.setMaximumPoolSize(10); + result.setMinimumIdle(2); + result.setConnectionTimeout(15 * 1000); + result.setIdleTimeout(40 * 1000); + return result; + } + + private static void createTableAndInitData(final PipelineDataSourceWrapper dataSource) throws SQLException { + try (Connection connection = dataSource.getConnection()) { + String sql = "CREATE TABLE t_order (order_id INT PRIMARY KEY, user_id INT NOT NULL, status VARCHAR(12))"; + connection.createStatement().execute(sql); + PreparedStatement preparedStatement = connection.prepareStatement("INSERT INTO t_order (order_id, user_id, status) VALUES (?, ?, ?)"); + for (int i = 0; i < 10; i++) { + preparedStatement.setInt(1, i + 1); + preparedStatement.setInt(2, i + 1); + preparedStatement.setString(3, "test"); + preparedStatement.execute(); + } + } + } + + @Test + void assertCalculateOfAllQueryFromBegin() { + RecordSingleTableInventoryCalculator calculator = new RecordSingleTableInventoryCalculator(5); + SingleTableInventoryCalculateParameter param = generateParameter(dataSource, 0); + Optional calculateResult = calculator.calculateChunk(param); + assertTrue(calculateResult.isPresent()); + SingleTableInventoryCalculatedResult actual = calculateResult.get(); + assertTrue(actual.getMaxUniqueKeyValue().isPresent()); + assertThat(actual.getMaxUniqueKeyValue().get(), is(5)); + } + + @Test + void assertCalculateOfAllQueryFromMiddle() { + RecordSingleTableInventoryCalculator calculator = new RecordSingleTableInventoryCalculator(5); + SingleTableInventoryCalculateParameter param = generateParameter(dataSource, 5); + Optional calculateResult = calculator.calculateChunk(param); + assertTrue(calculateResult.isPresent()); + SingleTableInventoryCalculatedResult actual = calculateResult.get(); + assertTrue(actual.getMaxUniqueKeyValue().isPresent()); + assertThat(actual.getMaxUniqueKeyValue().get(), is(10)); + } + + private SingleTableInventoryCalculateParameter generateParameter(final PipelineDataSourceWrapper dataSource, final Object dataCheckPosition) { + List uniqueKeys = Collections.singletonList(new PipelineColumnMetaData(1, "order_id", Types.INTEGER, "integer", false, true, true)); + return new SingleTableInventoryCalculateParameter(dataSource, new SchemaTableName(null, "t_order"), Collections.emptyList(), uniqueKeys, dataCheckPosition); + } +} diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/dump/ColumnValueReaderEngineTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/dump/ColumnValueReaderEngineTest.java index 913933ecdc024..a6459fcc7c5c0 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/dump/ColumnValueReaderEngineTest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/dump/ColumnValueReaderEngineTest.java @@ -27,7 +27,6 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; -import java.util.Objects; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; @@ -45,8 +44,8 @@ void assertReadValue() throws SQLException { connection.createStatement().executeUpdate("INSERT INTO t_order(order_id, user_id, status, c_year) VALUES (1, 2,'ok', null)"); ResultSet resultSet = connection.createStatement().executeQuery("SELECT * FROM t_order"); resultSet.next(); - assertThat(((Long) Objects.requireNonNull(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 1))).intValue(), is(1)); - assertThat(((Long) Objects.requireNonNull(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 2))).intValue(), is(2)); + assertThat(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 1), is(1)); + assertThat(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 2), is(2)); assertThat(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 3), is("ok")); assertNull(columnValueReaderEngine.read(resultSet, resultSet.getMetaData(), 4)); } diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureDataConsistencyCalculatedResult.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureSingleTableInventoryCalculatedResult.java similarity index 88% rename from test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureDataConsistencyCalculatedResult.java rename to test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureSingleTableInventoryCalculatedResult.java index 15cbc5be97b10..ef1f39b791d90 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureDataConsistencyCalculatedResult.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureSingleTableInventoryCalculatedResult.java @@ -20,14 +20,14 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.SingleTableInventoryCalculatedResult; import java.util.Optional; @RequiredArgsConstructor @EqualsAndHashCode @Getter -public final class FixtureDataConsistencyCalculatedResult implements DataConsistencyCalculatedResult { +public final class FixtureSingleTableInventoryCalculatedResult implements SingleTableInventoryCalculatedResult { private final int recordsCount; diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/DataConsistencyCalculateAlgorithmFixture.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableDataConsistencyChecker.java similarity index 74% rename from test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/DataConsistencyCalculateAlgorithmFixture.java rename to test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableDataConsistencyChecker.java index cc5bc8bbbfeca..fdcc39f512033 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/DataConsistencyCalculateAlgorithmFixture.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableDataConsistencyChecker.java @@ -17,36 +17,30 @@ package org.apache.shardingsphere.test.it.data.pipeline.core.fixture; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.DataConsistencyCalculateParameter; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCalculatedResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryCheckParameter; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryChecker; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader; import org.apache.shardingsphere.infra.spi.annotation.SPIDescription; import java.util.Collection; -import java.util.Collections; @SPIDescription("Fixture description.") -public final class DataConsistencyCalculateAlgorithmFixture implements DataConsistencyCalculateAlgorithm { +public final class FixtureTableDataConsistencyChecker implements TableDataConsistencyChecker { @Override - public Iterable calculate(final DataConsistencyCalculateParameter param) { - return Collections.singletonList(new FixtureDataConsistencyCalculatedResult(2)); + public TableInventoryChecker buildTableInventoryChecker(final TableInventoryCheckParameter param) { + return new FixtureTableInventoryChecker(); } @Override - public void cancel() { - } - - @Override - public boolean isCanceling() { - return false; + public Collection getSupportedDatabaseTypes() { + return ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class); } @Override - public Collection getSupportedDatabaseTypes() { - return ShardingSphereServiceLoader.getServiceInstances(DatabaseType.class); + public void close() { } @Override diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableInventoryChecker.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableInventoryChecker.java new file mode 100644 index 0000000000000..0a9e184122069 --- /dev/null +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/fixture/FixtureTableInventoryChecker.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.it.data.pipeline.core.fixture; + +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableInventoryChecker; + +public final class FixtureTableInventoryChecker implements TableInventoryChecker { + + @Override + public void cancel() { + } + + @Override + public boolean isCanceling() { + return false; + } + + @Override + public TableDataConsistencyCheckResult checkSingleTableInventoryData() { + return new TableDataConsistencyCheckResult(new TableDataConsistencyCountCheckResult(2, 2), new TableDataConsistencyContentCheckResult(true)); + } +} diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/job/service/GovernanceRepositoryAPIImplTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/job/service/GovernanceRepositoryAPIImplTest.java index cef3a6aebabe6..d2f8c19150f83 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/job/service/GovernanceRepositoryAPIImplTest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/core/job/service/GovernanceRepositoryAPIImplTest.java @@ -22,9 +22,9 @@ import org.apache.shardingsphere.data.pipeline.common.constant.DataPipelineConstants; import org.apache.shardingsphere.data.pipeline.common.ingest.position.PlaceholderPosition; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCountCheckResult; import org.apache.shardingsphere.data.pipeline.core.importer.Importer; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.core.task.InventoryTask; @@ -97,10 +97,10 @@ void assertPersistJobItemProgress() { @Test void assertPersistJobCheckResult() { MigrationJobItemContext jobItemContext = mockJobItemContext(); - Map actual = new HashMap<>(); - actual.put("test", new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(1, 1), new DataConsistencyContentCheckResult(true))); + Map actual = new HashMap<>(); + actual.put("test", new TableDataConsistencyCheckResult(new TableDataConsistencyCountCheckResult(1, 1), new TableDataConsistencyContentCheckResult(true))); governanceRepositoryAPI.persistCheckJobResult(jobItemContext.getJobId(), "j02123", actual); - Map checkResult = governanceRepositoryAPI.getCheckJobResult(jobItemContext.getJobId(), "j02123"); + Map checkResult = governanceRepositoryAPI.getCheckJobResult(jobItemContext.getJobId(), "j02123"); assertThat(checkResult.size(), is(1)); assertTrue(checkResult.get("test").getContentCheckResult().isMatched()); } diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobTest.java index c4e90a893815c..a8c3af697f2d9 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobTest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/ConsistencyCheckJobTest.java @@ -56,13 +56,15 @@ void assertBuildPipelineJobItemContext() { ConsistencyCheckJob consistencyCheckJob = new ConsistencyCheckJob(checkJobId); ConsistencyCheckJobItemContext actual = consistencyCheckJob.buildPipelineJobItemContext( new ShardingContext(checkJobId, "", 1, YamlEngine.marshal(createYamlConsistencyCheckJobConfiguration(checkJobId)), 0, "")); - assertThat(actual.getProgressContext().getTableCheckPositions(), is(expectTableCheckPosition)); + assertThat(actual.getProgressContext().getSourceTableCheckPositions(), is(expectTableCheckPosition)); + assertThat(actual.getProgressContext().getTargetTableCheckPositions(), is(expectTableCheckPosition)); } private YamlConsistencyCheckJobItemProgress createYamlConsistencyCheckJobItemProgress(final Map expectTableCheckPosition) { YamlConsistencyCheckJobItemProgress result = new YamlConsistencyCheckJobItemProgress(); result.setStatus(JobStatus.RUNNING.name()); - result.setTableCheckPositions(expectTableCheckPosition); + result.setSourceTableCheckPositions(expectTableCheckPosition); + result.setTargetTableCheckPositions(expectTableCheckPosition); return result; } @@ -70,6 +72,8 @@ private YamlConsistencyCheckJobConfiguration createYamlConsistencyCheckJobConfig YamlConsistencyCheckJobConfiguration result = new YamlConsistencyCheckJobConfiguration(); result.setJobId(checkJobId); result.setParentJobId(""); + result.setAlgorithmTypeName("DATA_MATCH"); + result.setSourceDatabaseType("H2"); return result; } } diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java index 679bba5a053ef..962f3340821a5 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPITest.java @@ -19,9 +19,9 @@ import org.apache.shardingsphere.data.pipeline.common.job.JobStatus; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCountCheckResult; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.ConsistencyCheckJobId; @@ -30,6 +30,10 @@ import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.config.ConsistencyCheckJobConfiguration; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.context.ConsistencyCheckJobItemContext; import org.apache.shardingsphere.data.pipeline.scenario.consistencycheck.util.ConsistencyCheckSequence; +import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; +import org.apache.shardingsphere.data.pipeline.yaml.job.YamlMigrationJobConfigurationSwapper; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.test.it.data.pipeline.core.util.JobConfigurationBuilder; import org.apache.shardingsphere.test.it.data.pipeline.core.util.PipelineContextUtils; import org.junit.jupiter.api.BeforeAll; @@ -49,6 +53,8 @@ class ConsistencyCheckJobAPITest { private final ConsistencyCheckJobAPI checkJobAPI = new ConsistencyCheckJobAPI(); + private final YamlMigrationJobConfigurationSwapper jobConfigSwapper = new YamlMigrationJobConfigurationSwapper(); + @BeforeAll public static void beforeClass() { PipelineContextUtils.mockModeConfigAndContextManager(); @@ -56,43 +62,49 @@ public static void beforeClass() { @Test void assertCreateJobConfig() { - String parentJobId = JobConfigurationBuilder.createYamlMigrationJobConfiguration().getJobId(); - String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null)); - ConsistencyCheckJobConfiguration jobConfig = checkJobAPI.getJobConfiguration(checkJobId); + MigrationJobConfiguration parentJobConfig = jobConfigSwapper.swapToObject(JobConfigurationBuilder.createYamlMigrationJobConfiguration()); + String parentJobId = parentJobConfig.getJobId(); + String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, + parentJobConfig.getSourceDatabaseType(), parentJobConfig.getTargetDatabaseType())); + ConsistencyCheckJobConfiguration checkJobConfig = checkJobAPI.getJobConfiguration(checkJobId); int expectedSequence = ConsistencyCheckSequence.MIN_SEQUENCE; String expectCheckJobId = checkJobAPI.marshalJobId(new ConsistencyCheckJobId(PipelineJobIdUtils.parseContextKey(parentJobId), parentJobId, expectedSequence)); - assertThat(jobConfig.getJobId(), is(expectCheckJobId)); - assertNull(jobConfig.getAlgorithmTypeName()); + assertThat(checkJobConfig.getJobId(), is(expectCheckJobId)); + assertNull(checkJobConfig.getAlgorithmTypeName()); int sequence = ConsistencyCheckJobId.parseSequence(expectCheckJobId); assertThat(sequence, is(expectedSequence)); } @Test void assertGetLatestDataConsistencyCheckResult() { - String parentJobId = JobConfigurationBuilder.createYamlMigrationJobConfiguration().getJobId(); - String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null)); + MigrationJobConfiguration parentJobConfig = jobConfigSwapper.swapToObject(JobConfigurationBuilder.createYamlMigrationJobConfiguration()); + String parentJobId = parentJobConfig.getJobId(); + String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, + parentJobConfig.getSourceDatabaseType(), parentJobConfig.getTargetDatabaseType())); GovernanceRepositoryAPI governanceRepositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()); governanceRepositoryAPI.persistLatestCheckJobId(parentJobId, checkJobId); - Map expectedCheckResult = Collections.singletonMap("t_order", new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(1, 1), - new DataConsistencyContentCheckResult(true))); + Map expectedCheckResult = Collections.singletonMap("t_order", new TableDataConsistencyCheckResult(new TableDataConsistencyCountCheckResult(1, 1), + new TableDataConsistencyContentCheckResult(true))); governanceRepositoryAPI.persistCheckJobResult(parentJobId, checkJobId, expectedCheckResult); - Map actualCheckResult = checkJobAPI.getLatestDataConsistencyCheckResult(parentJobId); + Map actualCheckResult = checkJobAPI.getLatestDataConsistencyCheckResult(parentJobId); assertThat(actualCheckResult.size(), is(expectedCheckResult.size())); assertThat(actualCheckResult.get("t_order").getCountCheckResult().isMatched(), is(expectedCheckResult.get("t_order").getContentCheckResult().isMatched())); } @Test void assertDropByParentJobId() { - String parentJobId = JobConfigurationBuilder.createYamlMigrationJobConfiguration().getJobId(); + MigrationJobConfiguration parentJobConfig = jobConfigSwapper.swapToObject(JobConfigurationBuilder.createYamlMigrationJobConfiguration()); + String parentJobId = parentJobConfig.getJobId(); GovernanceRepositoryAPI repositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()); int expectedSequence = 1; for (int i = 0; i < 3; i++) { - String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null)); + String checkJobId = checkJobAPI.createJobAndStart(new CreateConsistencyCheckJobParameter(parentJobId, null, null, + parentJobConfig.getSourceDatabaseType(), parentJobConfig.getTargetDatabaseType())); ConsistencyCheckJobItemContext checkJobItemContext = new ConsistencyCheckJobItemContext( - new ConsistencyCheckJobConfiguration(checkJobId, parentJobId, null, null), 0, JobStatus.FINISHED, null); + new ConsistencyCheckJobConfiguration(checkJobId, parentJobId, null, null, TypedSPILoader.getService(DatabaseType.class, "H2")), 0, JobStatus.FINISHED, null); checkJobAPI.persistJobItemProgress(checkJobItemContext); - Map dataConsistencyCheckResult = Collections.singletonMap("t_order", - new DataConsistencyCheckResult(new DataConsistencyCountCheckResult(0, 0), new DataConsistencyContentCheckResult(true))); + Map dataConsistencyCheckResult = Collections.singletonMap("t_order", + new TableDataConsistencyCheckResult(new TableDataConsistencyCountCheckResult(0, 0), new TableDataConsistencyContentCheckResult(true))); repositoryAPI.persistCheckJobResult(parentJobId, checkJobId, dataConsistencyCheckResult); Optional latestCheckJobId = repositoryAPI.getLatestCheckJobId(parentJobId); assertTrue(latestCheckJobId.isPresent()); diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java index 841deed534074..96fc9eba5c8a6 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java @@ -29,10 +29,9 @@ import org.apache.shardingsphere.data.pipeline.common.pojo.InventoryIncrementalJobItemInfo; import org.apache.shardingsphere.data.pipeline.common.util.PipelineDistributedBarrier; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyContentCheckResult; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCountCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyContentCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCountCheckResult; import org.apache.shardingsphere.data.pipeline.core.exception.param.PipelineInvalidParameterException; import org.apache.shardingsphere.data.pipeline.core.job.PipelineJobIdUtils; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; @@ -43,11 +42,11 @@ import org.apache.shardingsphere.data.pipeline.scenario.migration.context.MigrationJobItemContext; import org.apache.shardingsphere.data.pipeline.spi.datasource.creator.PipelineDataSourceCreator; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; -import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeFactory; import org.apache.shardingsphere.infra.datanode.DataNode; import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator; -import org.apache.shardingsphere.infra.datasource.props.DataSourceProperties; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; import org.apache.shardingsphere.migration.distsql.statement.MigrateTableStatement; @@ -79,7 +78,6 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -105,7 +103,7 @@ static void beforeClass() { props.put("jdbcUrl", jdbcUrl); props.put("username", "root"); props.put("password", "root"); - jobAPI.addMigrationSourceResources(PipelineContextUtils.getContextKey(), Collections.singletonMap("ds_0", new DataSourceProperties("com.zaxxer.hikari.HikariDataSource", props))); + jobAPI.addMigrationSourceResources(PipelineContextUtils.getContextKey(), Collections.singletonMap("ds_0", new DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props))); } @AfterAll @@ -172,22 +170,16 @@ void assertGetProgress() { assertThat(jobProgressMap.size(), is(1)); } - @Test - void assertBuildNullDataConsistencyCalculateAlgorithm() { - DataConsistencyCalculateAlgorithm actual = jobAPI.buildDataConsistencyCalculateAlgorithm(null, null); - assertInstanceOf(DataConsistencyCalculateAlgorithm.class, actual); - } - @Test void assertDataConsistencyCheck() { MigrationJobConfiguration jobConfig = JobConfigurationBuilder.createJobConfiguration(); initTableData(jobConfig); Optional jobId = jobAPI.start(jobConfig); assertTrue(jobId.isPresent()); - DataConsistencyCalculateAlgorithm calculateAlgorithm = jobAPI.buildDataConsistencyCalculateAlgorithm("FIXTURE", null); - Map checkResultMap = jobAPI.dataConsistencyCheck(jobConfig, calculateAlgorithm, new ConsistencyCheckJobItemProgressContext(jobId.get(), 0)); + Map checkResultMap = jobAPI.buildPipelineDataConsistencyChecker( + jobConfig, jobAPI.buildPipelineProcessContext(jobConfig), new ConsistencyCheckJobItemProgressContext(jobId.get(), 0, "H2")).check("FIXTURE", null); assertThat(checkResultMap.size(), is(1)); - String checkKey = "ds_0.t_order"; + String checkKey = "t_order"; assertTrue(checkResultMap.get(checkKey).getCountCheckResult().isMatched()); assertThat(checkResultMap.get(checkKey).getCountCheckResult().getTargetRecordsCount(), is(2L)); assertTrue(checkResultMap.get(checkKey).getContentCheckResult().isMatched()); @@ -200,33 +192,33 @@ void assertAggregateEmptyDataConsistencyCheckResults() { @Test void assertAggregateDifferentCountDataConsistencyCheckResults() { - DataConsistencyCountCheckResult equalCountCheckResult = new DataConsistencyCountCheckResult(100, 100); - DataConsistencyCountCheckResult notEqualCountCheckResult = new DataConsistencyCountCheckResult(100, 95); - DataConsistencyContentCheckResult equalContentCheckResult = new DataConsistencyContentCheckResult(false); - Map checkResults = new LinkedHashMap<>(2, 1F); - checkResults.put("foo_tbl", new DataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); - checkResults.put("bar_tbl", new DataConsistencyCheckResult(notEqualCountCheckResult, equalContentCheckResult)); + TableDataConsistencyCountCheckResult equalCountCheckResult = new TableDataConsistencyCountCheckResult(100, 100); + TableDataConsistencyCountCheckResult notEqualCountCheckResult = new TableDataConsistencyCountCheckResult(100, 95); + TableDataConsistencyContentCheckResult equalContentCheckResult = new TableDataConsistencyContentCheckResult(false); + Map checkResults = new LinkedHashMap<>(2, 1F); + checkResults.put("foo_tbl", new TableDataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); + checkResults.put("bar_tbl", new TableDataConsistencyCheckResult(notEqualCountCheckResult, equalContentCheckResult)); assertFalse(jobAPI.aggregateDataConsistencyCheckResults("foo_job", checkResults)); } @Test void assertAggregateDifferentContentDataConsistencyCheckResults() { - DataConsistencyCountCheckResult equalCountCheckResult = new DataConsistencyCountCheckResult(100, 100); - DataConsistencyContentCheckResult equalContentCheckResult = new DataConsistencyContentCheckResult(true); - DataConsistencyContentCheckResult notEqualContentCheckResult = new DataConsistencyContentCheckResult(false); - Map checkResults = new LinkedHashMap<>(2, 1F); - checkResults.put("foo_tbl", new DataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); - checkResults.put("bar_tbl", new DataConsistencyCheckResult(equalCountCheckResult, notEqualContentCheckResult)); + TableDataConsistencyCountCheckResult equalCountCheckResult = new TableDataConsistencyCountCheckResult(100, 100); + TableDataConsistencyContentCheckResult equalContentCheckResult = new TableDataConsistencyContentCheckResult(true); + TableDataConsistencyContentCheckResult notEqualContentCheckResult = new TableDataConsistencyContentCheckResult(false); + Map checkResults = new LinkedHashMap<>(2, 1F); + checkResults.put("foo_tbl", new TableDataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); + checkResults.put("bar_tbl", new TableDataConsistencyCheckResult(equalCountCheckResult, notEqualContentCheckResult)); assertFalse(jobAPI.aggregateDataConsistencyCheckResults("foo_job", checkResults)); } @Test void assertAggregateSameDataConsistencyCheckResults() { - DataConsistencyCountCheckResult equalCountCheckResult = new DataConsistencyCountCheckResult(100, 100); - DataConsistencyContentCheckResult equalContentCheckResult = new DataConsistencyContentCheckResult(true); - Map checkResults = new LinkedHashMap<>(2, 1F); - checkResults.put("foo_tbl", new DataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); - checkResults.put("bar_tbl", new DataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); + TableDataConsistencyCountCheckResult equalCountCheckResult = new TableDataConsistencyCountCheckResult(100, 100); + TableDataConsistencyContentCheckResult equalContentCheckResult = new TableDataConsistencyContentCheckResult(true); + Map checkResults = new LinkedHashMap<>(2, 1F); + checkResults.put("foo_tbl", new TableDataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); + checkResults.put("bar_tbl", new TableDataConsistencyCheckResult(equalCountCheckResult, equalContentCheckResult)); assertTrue(jobAPI.aggregateDataConsistencyCheckResults("foo_job", checkResults)); } @@ -279,7 +271,7 @@ void assertRenewJobStatus() { @Test void assertAddMigrationSourceResources() { PipelineDataSourcePersistService persistService = new PipelineDataSourcePersistService(); - Map actual = persistService.load(PipelineContextUtils.getContextKey(), new MigrationJobType()); + Map actual = persistService.load(PipelineContextUtils.getContextKey(), new MigrationJobType()); assertTrue(actual.containsKey("ds_0")); } @@ -315,10 +307,10 @@ void assertCreateJobConfig() throws SQLException { } private void initIntPrimaryEnvironment() throws SQLException { - Map metaDataDataSource = new PipelineDataSourcePersistService().load(PipelineContextUtils.getContextKey(), new MigrationJobType()); - DataSourceProperties dataSourceProps = metaDataDataSource.get("ds_0"); + Map metaDataDataSource = new PipelineDataSourcePersistService().load(PipelineContextUtils.getContextKey(), new MigrationJobType()); + DataSourcePoolProperties props = metaDataDataSource.get("ds_0"); try ( - PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(dataSourceProps), databaseType); + PipelineDataSourceWrapper dataSource = new PipelineDataSourceWrapper(DataSourcePoolCreator.create(props), databaseType); Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.execute("DROP TABLE IF EXISTS t_order"); @@ -336,14 +328,13 @@ void assertShowMigrationSourceResources() { @Test void assertGetJobItemInfosAtBegin() { - Optional optional = jobAPI.start(JobConfigurationBuilder.createJobConfiguration()); - assertTrue(optional.isPresent()); - String jobId = optional.get(); + Optional jobId = jobAPI.start(JobConfigurationBuilder.createJobConfiguration()); + assertTrue(jobId.isPresent()); YamlInventoryIncrementalJobItemProgress yamlJobItemProgress = new YamlInventoryIncrementalJobItemProgress(); yamlJobItemProgress.setStatus(JobStatus.RUNNING.name()); yamlJobItemProgress.setSourceDatabaseType("MySQL"); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId, 0, YamlEngine.marshal(yamlJobItemProgress)); - List jobItemInfos = jobAPI.getJobItemInfos(jobId); + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress)); + List jobItemInfos = jobAPI.getJobItemInfos(jobId.get()); assertThat(jobItemInfos.size(), is(1)); InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0); assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.RUNNING)); @@ -352,16 +343,15 @@ void assertGetJobItemInfosAtBegin() { @Test void assertGetJobItemInfosAtIncrementTask() { - Optional optional = jobAPI.start(JobConfigurationBuilder.createJobConfiguration()); - assertTrue(optional.isPresent()); + Optional jobId = jobAPI.start(JobConfigurationBuilder.createJobConfiguration()); + assertTrue(jobId.isPresent()); YamlInventoryIncrementalJobItemProgress yamlJobItemProgress = new YamlInventoryIncrementalJobItemProgress(); yamlJobItemProgress.setSourceDatabaseType("MySQL"); yamlJobItemProgress.setStatus(JobStatus.EXECUTE_INCREMENTAL_TASK.name()); yamlJobItemProgress.setProcessedRecordsCount(100); yamlJobItemProgress.setInventoryRecordsCount(50); - String jobId = optional.get(); - PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId, 0, YamlEngine.marshal(yamlJobItemProgress)); - List jobItemInfos = jobAPI.getJobItemInfos(jobId); + PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(), 0, YamlEngine.marshal(yamlJobItemProgress)); + List jobItemInfos = jobAPI.getJobItemInfos(jobId.get()); InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0); assertThat(jobItemInfo.getJobItemProgress().getStatus(), is(JobStatus.EXECUTE_INCREMENTAL_TASK)); assertThat(jobItemInfo.getInventoryFinishedPercentage(), is(100)); diff --git a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyCheckerTest.java b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyCheckerTest.java index 89c5ffea665ad..3862d8c061bd5 100644 --- a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyCheckerTest.java +++ b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/check/consistency/MigrationDataConsistencyCheckerTest.java @@ -19,9 +19,11 @@ import org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfiguration; import org.apache.shardingsphere.data.pipeline.common.datasource.DefaultPipelineDataSourceManager; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceManager; +import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper; import org.apache.shardingsphere.data.pipeline.common.registrycenter.repository.GovernanceRepositoryAPI; import org.apache.shardingsphere.data.pipeline.core.consistencycheck.ConsistencyCheckJobItemProgressContext; -import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.DataConsistencyCheckResult; +import org.apache.shardingsphere.data.pipeline.core.consistencycheck.result.TableDataConsistencyCheckResult; import org.apache.shardingsphere.data.pipeline.core.job.service.PipelineAPIFactory; import org.apache.shardingsphere.data.pipeline.scenario.migration.check.consistency.MigrationDataConsistencyChecker; import org.apache.shardingsphere.data.pipeline.scenario.migration.config.MigrationJobConfiguration; @@ -30,7 +32,6 @@ import org.apache.shardingsphere.data.pipeline.yaml.job.YamlMigrationJobConfigurationSwapper; import org.apache.shardingsphere.elasticjob.infra.pojo.JobConfigurationPOJO; import org.apache.shardingsphere.infra.util.yaml.YamlEngine; -import org.apache.shardingsphere.test.it.data.pipeline.core.fixture.DataConsistencyCalculateAlgorithmFixture; import org.apache.shardingsphere.test.it.data.pipeline.core.util.JobConfigurationBuilder; import org.apache.shardingsphere.test.it.data.pipeline.core.util.PipelineContextUtils; import org.junit.jupiter.api.BeforeAll; @@ -62,16 +63,16 @@ void assertCountAndDataCheck() throws SQLException { GovernanceRepositoryAPI governanceRepositoryAPI = PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()); governanceRepositoryAPI.persist(String.format("/pipeline/jobs/%s/config", jobConfig.getJobId()), YamlEngine.marshal(jobConfigurationPOJO)); governanceRepositoryAPI.persistJobItemProgress(jobConfig.getJobId(), 0, ""); - Map actual = new MigrationDataConsistencyChecker(jobConfig, new MigrationProcessContext(jobConfig.getJobId(), null), - createConsistencyCheckJobItemProgressContext()).check(new DataConsistencyCalculateAlgorithmFixture()); - String checkKey = "ds_0.t_order"; + Map actual = new MigrationDataConsistencyChecker(jobConfig, new MigrationProcessContext(jobConfig.getJobId(), null), + createConsistencyCheckJobItemProgressContext()).check("FIXTURE", null); + String checkKey = "t_order"; assertTrue(actual.get(checkKey).getCountCheckResult().isMatched()); assertThat(actual.get(checkKey).getCountCheckResult().getSourceRecordsCount(), is(actual.get(checkKey).getCountCheckResult().getTargetRecordsCount())); assertTrue(actual.get(checkKey).getContentCheckResult().isMatched()); } private ConsistencyCheckJobItemProgressContext createConsistencyCheckJobItemProgressContext() { - return new ConsistencyCheckJobItemProgressContext("", 0); + return new ConsistencyCheckJobItemProgressContext("", 0, "H2"); } private MigrationJobConfiguration createJobConfiguration() throws SQLException { @@ -83,7 +84,9 @@ private MigrationJobConfiguration createJobConfiguration() throws SQLException { private void initTableData(final PipelineDataSourceConfiguration dataSourceConfig) throws SQLException { try ( - Connection connection = new DefaultPipelineDataSourceManager().getDataSource(dataSourceConfig).getConnection(); + PipelineDataSourceManager dataSourceManager = new DefaultPipelineDataSourceManager(); + PipelineDataSourceWrapper dataSource = dataSourceManager.getDataSource(dataSourceConfig); + Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement()) { statement.execute("DROP TABLE IF EXISTS t_order"); statement.execute("CREATE TABLE t_order (order_id INT PRIMARY KEY, user_id INT(11))"); diff --git a/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm b/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm deleted file mode 100644 index 697e98ee190c3..0000000000000 --- a/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.algorithm.DataConsistencyCalculateAlgorithm +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -org.apache.shardingsphere.test.it.data.pipeline.core.fixture.DataConsistencyCalculateAlgorithmFixture diff --git a/features/sharding/plugin/nanoid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm b/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker similarity index 89% rename from features/sharding/plugin/nanoid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm rename to test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker index 759153ddffdbb..b771deadee2c5 100644 --- a/features/sharding/plugin/nanoid/src/main/resources/META-INF/services/org.apache.shardingsphere.sharding.spi.KeyGenerateAlgorithm +++ b/test/it/pipeline/src/test/resources/META-INF/services/org.apache.shardingsphere.data.pipeline.core.consistencycheck.table.TableDataConsistencyChecker @@ -15,4 +15,4 @@ # limitations under the License. # -org.apache.shardingsphere.sharding.nanoid.algorithm.keygen.NanoIdKeyGenerateAlgorithm +org.apache.shardingsphere.test.it.data.pipeline.core.fixture.FixtureTableDataConsistencyChecker diff --git a/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_source.yaml b/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_source.yaml index ac7775e120459..c48220f058a71 100644 --- a/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_source.yaml +++ b/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_source.yaml @@ -55,6 +55,16 @@ rules: # standard: # shardingAlgorithmName: t_order_tbl_inline # shardingColumn: order_id + t_order_item: + actualDataNodes: ds_${0..1}.t_order_item_${0..1} + tableStrategy: + standard: + shardingColumn: order_id + shardingAlgorithmName: t_order_item_inline + keyGenerateStrategy: + column: order_id + keyGeneratorName: snowflake + shardingAlgorithms: default_db_inline: type: INLINE @@ -64,6 +74,11 @@ rules: type: INLINE props: algorithm-expression: t_order + t_order_item_inline: + type: INLINE + props: + algorithm-expression: t_order_item_${order_id % 2} + keyGenerators: snowflake: type: SNOWFLAKE diff --git a/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_target.yaml b/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_target.yaml index 92ee83b821143..eef745c81c8f4 100644 --- a/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_target.yaml +++ b/test/it/pipeline/src/test/resources/config_sharding_sphere_jdbc_target.yaml @@ -36,22 +36,41 @@ rules: standard: shardingAlgorithmName: default_db_inline shardingColumn: user_id - tables: + autoTables: t_order: - actualDataNodes: ds_$->{1..2}.t_order_$->{0..1} + actualDataSources: ds_1,ds_2 keyGenerateStrategy: column: order_id keyGeneratorName: snowflake + shardingStrategy: + standard: + shardingAlgorithmName: t_order_hash_mod + shardingColumn: order_id + tables: + t_order_item: + actualDataNodes: ds_${0..2}.t_order_item_${0..2} tableStrategy: standard: - shardingAlgorithmName: t1_tbl_inline shardingColumn: order_id + shardingAlgorithmName: new_t_order_item_inline + keyGenerateStrategy: + column: order_id + keyGeneratorName: snowflake + shardingAlgorithms: - default_db_inline: + database_inline: + props: + algorithm-expression: ds_${user_id % 2 + 1} + type: INLINE + t_order_inline: + props: + algorithm-expression: t_order_${order_id % 2} type: INLINE + t_order_hash_mod: props: - algorithm-expression: ds_$->{user_id % 2 + 1} - t_order_tbl_inline: + sharding-count: '6' + type: hash_mod + new_t_order_item_inline: type: INLINE props: - algorithm-expression: t_order_$->{order_id % 2} + algorithm-expression: t_order_item_${order_id % 4} diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java index 99042c9442d96..d7c29fa34da09 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/SQLRewriterIT.java @@ -33,6 +33,7 @@ import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase; import org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; import org.apache.shardingsphere.infra.metadata.database.rule.RuleMetaData; import org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema; import org.apache.shardingsphere.infra.parser.sql.SQLStatementParserEngine; @@ -86,6 +87,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -118,9 +120,9 @@ private Collection createSQLRewriteUnits(final SQLRewriteEngineT new YamlDataSourceConfigurationSwapper().swapToDataSources(rootConfig.getDataSources()), new YamlRuleConfigurationSwapperEngine().swapToRuleConfigurations(rootConfig.getRules())); mockDataSource(databaseConfig.getDataSources()); DatabaseType databaseType = TypedSPILoader.getService(DatabaseType.class, testParams.getDatabaseType()); - Map storageTypes = createStorageTypes(databaseConfig, databaseType); - ResourceMetaData resourceMetaData = mock(ResourceMetaData.class); - when(resourceMetaData.getStorageTypes()).thenReturn(storageTypes); + Map storageUnits = createStorageUnits(databaseConfig, databaseType); + ResourceMetaData resourceMetaData = mock(ResourceMetaData.class, RETURNS_DEEP_STUBS); + when(resourceMetaData.getStorageUnitMetaData().getStorageUnits()).thenReturn(storageUnits); String schemaName = new DatabaseTypeRegistry(databaseType).getDefaultSchemaName(DefaultDatabase.LOGIC_NAME); SQLStatementParserEngine sqlStatementParserEngine = new SQLStatementParserEngine(TypedSPILoader.getService(DatabaseType.class, testParams.getDatabaseType()), sqlParserRule.getSqlStatementCache(), sqlParserRule.getParseTreeCache(), sqlParserRule.isSqlCommentParseEnabled()); @@ -168,10 +170,12 @@ private Collection createGlobalRules() { return result; } - private Map createStorageTypes(final DatabaseConfiguration databaseConfig, final DatabaseType databaseType) { - Map result = new LinkedHashMap<>(databaseConfig.getDataSources().size(), 1F); + private Map createStorageUnits(final DatabaseConfiguration databaseConfig, final DatabaseType databaseType) { + Map result = new LinkedHashMap<>(databaseConfig.getDataSources().size(), 1F); for (Entry entry : databaseConfig.getDataSources().entrySet()) { - result.put(entry.getKey(), databaseType); + StorageUnit storageUnit = mock(StorageUnit.class); + when(storageUnit.getStorageType()).thenReturn(databaseType); + result.put(entry.getKey(), storageUnit); } return result; } diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/scenario/ShardingSQLRewriterIT.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/scenario/ShardingSQLRewriterIT.java index 1f3b8c54d1dd8..ca22f2c6073ab 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/scenario/ShardingSQLRewriterIT.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/engine/scenario/ShardingSQLRewriterIT.java @@ -80,18 +80,23 @@ protected Map mockSchemas(final String schemaName) tables.put("t_user_extend", new ShardingSphereTable("t_user_extend", Arrays.asList( new ShardingSphereColumn("user_id", Types.INTEGER, false, false, false, true, false, false), new ShardingSphereColumn("content", Types.VARCHAR, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("t_single", new ShardingSphereTable("t_single", Collections.singletonList( - new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + tables.put("t_single", new ShardingSphereTable("t_single", Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("account_id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); tables.put("t_single_extend", new ShardingSphereTable("t_single_extend", Collections.singletonList( new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("t_config", new ShardingSphereTable("t_config", Collections.singletonList( - new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("T_ROLE", new ShardingSphereTable("T_ROLE", Collections.singletonList( - new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("T_ROLE_ADMIN", new ShardingSphereTable("T_ROLE_ADMIN", Collections.singletonList( - new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); - tables.put("t_account_view", new ShardingSphereTable("t_account_view", Collections.singletonList( - new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + tables.put("t_config", new ShardingSphereTable("t_config", Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("account_id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + tables.put("T_ROLE", new ShardingSphereTable("T_ROLE", Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("ROLE_ID", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + tables.put("T_ROLE_ADMIN", new ShardingSphereTable("T_ROLE_ADMIN", Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("ROLE_ID", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); + tables.put("t_account_view", new ShardingSphereTable("t_account_view", Arrays.asList( + new ShardingSphereColumn("id", Types.INTEGER, false, false, false, true, false, false), + new ShardingSphereColumn("account_id", Types.INTEGER, false, false, false, true, false, false)), Collections.emptyList(), Collections.emptyList())); ShardingSphereSchema result = new ShardingSphereSchema(tables, Collections.emptyMap()); return Collections.singletonMap(schemaName, result); } diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteNormalEncryptAlgorithmFixture.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteNormalEncryptAlgorithmFixture.java index 8c6398531ac74..00f048811786f 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteNormalEncryptAlgorithmFixture.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteNormalEncryptAlgorithmFixture.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.test.it.rewrite.fixture.encrypt; -import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; +import org.apache.shardingsphere.encrypt.api.encrypt.standard.StandardEncryptAlgorithm; -public final class RewriteNormalEncryptAlgorithmFixture implements StandardEncryptAlgorithm { +public final class RewriteNormalEncryptAlgorithmFixture implements StandardEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { @@ -31,11 +31,11 @@ public String encrypt(final Object plainValue, final EncryptContext encryptConte } @Override - public Object decrypt(final String cipherValue, final EncryptContext encryptContext) { + public Object decrypt(final Object cipherValue, final EncryptContext encryptContext) { if (null == cipherValue) { return null; } - return cipherValue.replaceAll("encrypt_", ""); + return cipherValue.toString().replaceAll("encrypt_", ""); } @Override diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryAssistedEncryptAlgorithmFixture.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryAssistedEncryptAlgorithmFixture.java index 1381a68fe1c7c..bec30f1892655 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryAssistedEncryptAlgorithmFixture.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryAssistedEncryptAlgorithmFixture.java @@ -20,7 +20,7 @@ import org.apache.shardingsphere.encrypt.api.context.EncryptContext; import org.apache.shardingsphere.encrypt.api.encrypt.assisted.AssistedEncryptAlgorithm; -public final class RewriteQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { +public final class RewriteQueryAssistedEncryptAlgorithmFixture implements AssistedEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { diff --git a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryLikeEncryptAlgorithmFixture.java b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryLikeEncryptAlgorithmFixture.java index 2ab6a4dce133f..6ee95b055ea5a 100644 --- a/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryLikeEncryptAlgorithmFixture.java +++ b/test/it/rewriter/src/test/java/org/apache/shardingsphere/test/it/rewrite/fixture/encrypt/RewriteQueryLikeEncryptAlgorithmFixture.java @@ -17,10 +17,10 @@ package org.apache.shardingsphere.test.it.rewrite.fixture.encrypt; -import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; import org.apache.shardingsphere.encrypt.api.context.EncryptContext; +import org.apache.shardingsphere.encrypt.api.encrypt.like.LikeEncryptAlgorithm; -public final class RewriteQueryLikeEncryptAlgorithmFixture implements LikeEncryptAlgorithm { +public final class RewriteQueryLikeEncryptAlgorithmFixture implements LikeEncryptAlgorithm { @Override public String encrypt(final Object plainValue, final EncryptContext encryptContext) { diff --git a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_database_rules.xml b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-distinct.xml similarity index 66% rename from test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_database_rules.xml rename to test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-distinct.xml index 02e4fa15d3e82..793b33f56a145 100644 --- a/test/e2e/sql/src/test/resources/cases/rql/dataset/sharding_governance/mysql/count_database_rules.xml +++ b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-distinct.xml @@ -1,3 +1,4 @@ + - - - - - - - - - - - - - + + + + + + diff --git a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-join.xml b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-join.xml index 81bc6e4852d18..c7ea37f443427 100644 --- a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-join.xml +++ b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-join.xml @@ -38,22 +38,22 @@ - - + + - - + + - - + + - - + + diff --git a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-order-by.xml b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-order-by.xml deleted file mode 100644 index 6730842c6768f..0000000000000 --- a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-order-by.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-subquery.xml b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-subquery.xml index 4eafb1a5edb30..6d9e381ba956e 100644 --- a/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-subquery.xml +++ b/test/it/rewriter/src/test/resources/scenario/encrypt/case/query-with-cipher/dml/select/select-subquery.xml @@ -24,32 +24,32 @@ - + - + - + - + - + - + @@ -64,7 +64,7 @@ - + @@ -74,12 +74,12 @@ - + - + @@ -89,16 +89,16 @@ - + - + - - - + + + diff --git a/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-join.xml b/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-join.xml index c93f3f0ec4077..7fa4f6879a161 100644 --- a/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-join.xml +++ b/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-join.xml @@ -18,11 +18,11 @@ - - - - - + + + + + diff --git a/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-subquery.xml b/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-subquery.xml index e0ccbe45cde0f..90828aac50a2c 100644 --- a/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-subquery.xml +++ b/test/it/rewriter/src/test/resources/scenario/mix/case/query-with-cipher/dml/select/select-subquery.xml @@ -25,14 +25,14 @@ - - + + - - + + diff --git a/test/it/rewriter/src/test/resources/scenario/sharding/case/ddl/create-table.xml b/test/it/rewriter/src/test/resources/scenario/sharding/case/ddl/create-table.xml index 5a305b0db5e9b..2f52ce9119c6f 100644 --- a/test/it/rewriter/src/test/resources/scenario/sharding/case/ddl/create-table.xml +++ b/test/it/rewriter/src/test/resources/scenario/sharding/case/ddl/create-table.xml @@ -53,10 +53,10 @@ - - - - + + + + diff --git a/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/delete.xml b/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/delete.xml index b62e9963e3758..922bfa12cebfd 100644 --- a/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/delete.xml +++ b/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/delete.xml @@ -28,13 +28,13 @@ - + - + diff --git a/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/select.xml b/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/select.xml index 1f71a727ac86a..a2a47e796b954 100644 --- a/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/select.xml +++ b/test/it/rewriter/src/test/resources/scenario/sharding/case/dml/select.xml @@ -127,15 +127,15 @@ - - - + + + - - - + + + @@ -187,15 +187,15 @@ - - - + + + - - - + + + @@ -222,15 +222,15 @@ - - - + + + - - - + + + diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/env/env/TestUtilEnvironment.java b/test/util/src/main/java/org/apache/shardingsphere/test/env/EnvironmentContext.java similarity index 68% rename from test/util/src/main/java/org/apache/shardingsphere/test/env/env/TestUtilEnvironment.java rename to test/util/src/main/java/org/apache/shardingsphere/test/env/EnvironmentContext.java index 2c96a17aa4a65..28054d28bd1fd 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/env/env/TestUtilEnvironment.java +++ b/test/util/src/main/java/org/apache/shardingsphere/test/env/EnvironmentContext.java @@ -15,9 +15,8 @@ * limitations under the License. */ -package org.apache.shardingsphere.test.env.env; +package org.apache.shardingsphere.test.env; -import lombok.Getter; import lombok.SneakyThrows; import java.io.IOException; @@ -25,35 +24,41 @@ import java.util.Properties; /** - * Test util environment. + * environment context. */ -@Getter -public final class TestUtilEnvironment { +public final class EnvironmentContext { - private static final String SQL_PARSER_EXTERNAL_IT_GITHUB_TOKEN = "test.util.github.token"; + private static final EnvironmentContext INSTANCE = new EnvironmentContext(); - private static final TestUtilEnvironment INSTANCE = new TestUtilEnvironment(); + private final Properties props; - private final String githubToken; - - private TestUtilEnvironment() { - Properties props = loadProperties(); - githubToken = props.getProperty(SQL_PARSER_EXTERNAL_IT_GITHUB_TOKEN); + private EnvironmentContext() { + props = loadProperties(); } /** - * Get instance. + * Get GitHub environment instance. * * @return got instance */ - public static TestUtilEnvironment getInstance() { + public static EnvironmentContext getInstance() { return INSTANCE; } + /** + * Get value by key. + * + * @param key key + * @return value + */ + public String getValue(final String key) { + return props.getProperty(key); + } + @SneakyThrows(IOException.class) private Properties loadProperties() { Properties result = new Properties(); - try (InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("env/test-util-env.properties")) { + try (InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("env/env.properties")) { result.load(inputStream); } for (String each : System.getProperties().stringPropertyNames()) { diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLParserTestParameter.java b/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLTestParameter.java similarity index 95% rename from test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLParserTestParameter.java rename to test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLTestParameter.java index 051e7aef64aae..f4d4ddb9e14f8 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLParserTestParameter.java +++ b/test/util/src/main/java/org/apache/shardingsphere/test/loader/ExternalSQLTestParameter.java @@ -25,7 +25,7 @@ */ @RequiredArgsConstructor @Getter -public final class ExternalSQLParserTestParameter { +public final class ExternalSQLTestParameter { private final String sqlCaseId; diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoadTemplate.java b/test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoadTemplate.java new file mode 100644 index 0000000000000..5e44ae9b19156 --- /dev/null +++ b/test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoadTemplate.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.test.loader; + +import java.util.Collection; +import java.util.List; + +/** + * Test parameter load template. + */ +public interface TestParameterLoadTemplate { + + /** + * Load test parameters. + * + * @param sqlCaseFileName SQL case file name + * @param sqlCaseFileContent SQL case file content + * @param resultFileContent result file content + * @param databaseType database type + * @param reportType report type + * @return loaded test parameters + */ + Collection load(String sqlCaseFileName, List sqlCaseFileContent, List resultFileContent, String databaseType, String reportType); +} diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/AbstractTestParameterLoader.java b/test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoader.java similarity index 74% rename from test/util/src/main/java/org/apache/shardingsphere/test/loader/AbstractTestParameterLoader.java rename to test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoader.java index 4ffc60e73d318..c3d3bebcf8b88 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/loader/AbstractTestParameterLoader.java +++ b/test/util/src/main/java/org/apache/shardingsphere/test/loader/TestParameterLoader.java @@ -17,11 +17,12 @@ package org.apache.shardingsphere.test.loader; +import com.google.common.base.Strings; import com.google.common.collect.Lists; -import lombok.AccessLevel; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.shardingsphere.test.env.EnvironmentContext; import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; import org.apache.shardingsphere.test.loader.summary.FileSummary; @@ -29,6 +30,7 @@ import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; +import java.net.URLConnection; import java.util.Collection; import java.util.LinkedList; import java.util.List; @@ -42,12 +44,12 @@ /** * Test parameter loader. - * - * @param type of test parameter */ -@RequiredArgsConstructor(access = AccessLevel.PROTECTED) +@RequiredArgsConstructor @Slf4j -public abstract class AbstractTestParameterLoader { +public final class TestParameterLoader { + + private static final String TOKEN_KEY = "it.github.token"; private static final int DEFAULT_DOWNLOAD_THREADS = 4; @@ -55,6 +57,8 @@ public abstract class AbstractTestParameterLoader { private final TestParameterLoadStrategy loadStrategy; + private final TestParameterLoadTemplate loadTemplate; + /** * Load test parameters. * @@ -65,15 +69,15 @@ public abstract class AbstractTestParameterLoader { * @return loaded test parameters */ @SneakyThrows - public Collection load(final URI sqlCaseURI, final URI resultURI, final String databaseType, final String reportType) { - Collection result = new LinkedList<>(); + public Collection load(final URI sqlCaseURI, final URI resultURI, final String databaseType, final String reportType) { + Collection result = new LinkedList<>(); Map> sqlCaseFileContents = downloadAllBySummary(sqlCaseURI); Map> resultFileContents = downloadAllBySummary(resultURI); for (Entry> each : sqlCaseFileContents.entrySet()) { String fileName = each.getKey(); List sqlCaseFileContent = each.getValue(); List resultFileContent = resultFileContents.getOrDefault(fileName, Lists.newArrayList()); - result.addAll(createTestParameters(fileName, sqlCaseFileContent, resultFileContent, databaseType, reportType)); + result.addAll(loadTemplate.load(fileName, sqlCaseFileContent, resultFileContent, databaseType, reportType)); } return result; } @@ -87,23 +91,16 @@ private Map> downloadAllBySummary(final URI sqlCaseURI) thr return contents; } - /** - * Create test parameters. - * - * @param sqlCaseFileName SQL case file name - * @param sqlCaseFileContent SQL case file content - * @param resultFileContent result file content - * @param databaseType database type - * @param reportType report type - * @return test parameters - */ - public abstract Collection createTestParameters(String sqlCaseFileName, List sqlCaseFileContent, List resultFileContent, String databaseType, String reportType); - private List loadContent(final URI uri) { - try ( - InputStreamReader in = new InputStreamReader(uri.toURL().openStream()); - BufferedReader reader = new BufferedReader(in)) { - return reader.lines().collect(Collectors.toList()); + try { + URLConnection urlConnection = uri.toURL().openConnection(); + String githubToken = EnvironmentContext.getInstance().getValue(TOKEN_KEY); + if (!Strings.isNullOrEmpty(githubToken)) { + urlConnection.setRequestProperty("Authorization", "Bearer " + githubToken); + } + try (BufferedReader reader = new BufferedReader(new InputStreamReader(urlConnection.getInputStream()))) { + return reader.lines().collect(Collectors.toList()); + } } catch (final IOException ex) { log.warn("Load failed, reason is: ", ex); return Lists.newArrayList(); diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/GitHubTestParameterLoadStrategy.java b/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/GitHubTestParameterLoadStrategy.java index 9f081999f8a16..7d96ef0948f21 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/GitHubTestParameterLoadStrategy.java +++ b/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/GitHubTestParameterLoadStrategy.java @@ -17,11 +17,11 @@ package org.apache.shardingsphere.test.loader.strategy.impl; +import com.google.common.base.Strings; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; -import org.apache.shardingsphere.test.env.env.TestUtilEnvironment; +import org.apache.shardingsphere.test.env.EnvironmentContext; import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; import org.apache.shardingsphere.test.loader.summary.FileSummary; @@ -42,6 +42,8 @@ @Slf4j public final class GitHubTestParameterLoadStrategy implements TestParameterLoadStrategy { + private static final String TOKEN_KEY = "it.github.token"; + @Override public Collection loadSQLCaseFileSummaries(final URI uri) { if (uri.toString().isEmpty()) { @@ -83,8 +85,9 @@ private URI getGitHubApiUri(final URI sqlCaseURI) { private String loadContent(final URI casesURI) { try { URLConnection urlConnection = casesURI.toURL().openConnection(); - if (StringUtils.isNotBlank(TestUtilEnvironment.getInstance().getGithubToken())) { - urlConnection.setRequestProperty("Authorization", "Bearer " + TestUtilEnvironment.getInstance().getGithubToken()); + String githubToken = EnvironmentContext.getInstance().getValue(TOKEN_KEY); + if (!Strings.isNullOrEmpty(githubToken)) { + urlConnection.setRequestProperty("Authorization", "Bearer " + githubToken); } try (BufferedReader reader = new BufferedReader(new InputStreamReader(urlConnection.getInputStream()))) { return reader.lines().collect(Collectors.joining(System.lineSeparator())); diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/LocalFileTestParameterLoadStrategy.java b/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/LocalFileTestParameterLoadStrategy.java deleted file mode 100644 index 774a711679db0..0000000000000 --- a/test/util/src/main/java/org/apache/shardingsphere/test/loader/strategy/impl/LocalFileTestParameterLoadStrategy.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.test.loader.strategy.impl; - -import lombok.SneakyThrows; -import org.apache.shardingsphere.test.loader.strategy.TestParameterLoadStrategy; -import org.apache.shardingsphere.test.loader.summary.FileSummary; - -import java.net.URI; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collection; -import java.util.LinkedList; -import java.util.stream.Stream; - -/** - * Test parameter load strategy with local file. - */ -public final class LocalFileTestParameterLoadStrategy implements TestParameterLoadStrategy { - - @SneakyThrows - @Override - public Collection loadSQLCaseFileSummaries(final URI uri) { - final Collection result = new LinkedList<>(); - try (Stream stream = Files.walk(Paths.get(uri))) { - stream.filter(each -> each.toString().endsWith(".sql")).forEach(each -> result.add(new FileSummary(each.getFileName().toString(), each.toUri().toString()))); - } - return result; - } -} diff --git a/test/util/src/main/java/org/apache/shardingsphere/test/util/PropertiesBuilder.java b/test/util/src/main/java/org/apache/shardingsphere/test/util/PropertiesBuilder.java index 2c7f4655fb5a8..0f879bf1848b9 100644 --- a/test/util/src/main/java/org/apache/shardingsphere/test/util/PropertiesBuilder.java +++ b/test/util/src/main/java/org/apache/shardingsphere/test/util/PropertiesBuilder.java @@ -32,12 +32,12 @@ public final class PropertiesBuilder { /** * Build properties. * - * @param properties to be built properties + * @param props to be built properties * @return built properties */ - public static Properties build(final Property... properties) { + public static Properties build(final Property... props) { Properties result = new Properties(); - for (Property each : properties) { + for (Property each : props) { result.setProperty(each.key, each.value); } return result; diff --git a/test/util/src/main/resources/env/test-util-env.properties b/test/util/src/main/resources/env/env.properties similarity index 97% rename from test/util/src/main/resources/env/test-util-env.properties rename to test/util/src/main/resources/env/env.properties index 93f8f4d9078b1..42c54031a4756 100644 --- a/test/util/src/main/resources/env/test-util-env.properties +++ b/test/util/src/main/resources/env/env.properties @@ -14,4 +14,4 @@ # limitations under the License. # -test.util.github.token= +it.github.token=