This is an automated email from the ASF dual-hosted git repository.

jiangmaolin pushed a commit to branch dev-5.5.1
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git

commit 271dedeb178695fac6d480a182c569c51b04c37c
Author: Raigor <[email protected]>
AuthorDate: Wed Oct 30 21:28:13 2024 +0800

    Pick 5.5.0-fix, optimize show/alter storage units (#26)
---
 .../AlterStorageUnitConnectionInfoException.java   |   4 +
 infra/distsql-handler/pom.xml                      |   7 ++
 .../rdl/resource/AlterStorageUnitExecutor.java     |  20 +++-
 .../rdl/resource/RegisterStorageUnitExecutor.java  |  14 +++
 .../rql/resource/ShowStorageUnitExecutor.java      |  68 ++++++++----
 mode/core/pom.xml                                  |   7 ++
 .../mode/metadata/manager/StorageUnitManager.java  | 115 +++++++++++++++++++++
 .../engine/src/main/antlr4/imports/RDLStatement.g4 |  14 ++-
 .../core/kernel/KernelDistSQLStatementVisitor.java |  34 ++++--
 .../segment/AlterPoolPropertiesSegment.java        |  20 ++--
 .../converter/DataSourceSegmentsConverter.java     |  95 +++++++++++++++++
 .../test/resources/cases/ral/e2e-ral-refresh.xml   |   5 +-
 .../src/test/resources/cases/rql/e2e-rql-show.xml  |   5 +-
 13 files changed, 366 insertions(+), 42 deletions(-)

diff --git 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
index 57e2dd9b64d..0fd249879b1 100644
--- 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
+++ 
b/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
@@ -32,4 +32,8 @@ public final class AlterStorageUnitConnectionInfoException 
extends ResourceDefin
     public AlterStorageUnitConnectionInfoException(final Collection<String> 
storageUnitNames) {
         super(XOpenSQLState.FEATURE_NOT_SUPPORTED, 11, "Can not alter 
connection info in storage units: '%s'.", storageUnitNames);
     }
+    
+    public AlterStorageUnitConnectionInfoException(final String message) {
+        super(XOpenSQLState.FEATURE_NOT_SUPPORTED, 11, message);
+    }
 }
diff --git a/infra/distsql-handler/pom.xml b/infra/distsql-handler/pom.xml
index c6bc2905269..9b9c53ad61c 100644
--- a/infra/distsql-handler/pom.xml
+++ b/infra/distsql-handler/pom.xml
@@ -47,6 +47,13 @@
             <artifactId>shardingsphere-mode-core</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <!-- SPEX ADDED: BEGIN -->
+        <dependency>
+            <groupId>com.zaxxer</groupId>
+            <artifactId>HikariCP</artifactId>
+            <scope>compile</scope>
+        </dependency>
+        <!-- SPEX ADDED: END -->
         
         <dependency>
             <groupId>org.apache.shardingsphere</groupId>
diff --git 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java
 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java
index bc68f391b2e..3617cf41e87 100644
--- 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java
+++ 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/AlterStorageUnitExecutor.java
@@ -17,11 +17,14 @@
 
 package org.apache.shardingsphere.distsql.handler.executor.rdl.resource;
 
+import com.sphereex.dbplusengine.SphereEx;
+import com.sphereex.dbplusengine.SphereEx.Type;
 import lombok.Setter;
 import lombok.extern.slf4j.Slf4j;
 import 
org.apache.shardingsphere.distsql.handler.aware.DistSQLExecutorDatabaseAware;
 import 
org.apache.shardingsphere.distsql.handler.engine.update.DistSQLUpdateExecutor;
 import 
org.apache.shardingsphere.distsql.handler.validate.DistSQLDataSourcePoolPropertiesValidator;
+import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment;
 import org.apache.shardingsphere.distsql.segment.DataSourceSegment;
 import 
org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment;
 import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment;
@@ -44,6 +47,7 @@ import org.apache.shardingsphere.mode.manager.ContextManager;
 
 import java.sql.SQLException;
 import java.util.Collection;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Objects;
@@ -60,11 +64,14 @@ public final class AlterStorageUnitExecutor implements 
DistSQLUpdateExecutor<Alt
     
     private ShardingSphereDatabase database;
     
+    @SphereEx(Type.MODIFY)
     @Override
     public void executeUpdate(final AlterStorageUnitStatement sqlStatement, 
final ContextManager contextManager) {
         checkBefore(sqlStatement);
-        Map<String, DataSourcePoolProperties> propsMap = 
DataSourceSegmentsConverter.convert(database.getProtocolType(), 
sqlStatement.getStorageUnits());
-        validateHandler.validate(propsMap, 
getExpectedPrivileges(sqlStatement));
+        Map<String, DataSourcePoolProperties> propsMap = 
DataSourceSegmentsConverter.convert(database.getProtocolType(), 
database.getResourceMetaData(), sqlStatement.getStorageUnits());
+        if (!sqlStatement.getStorageUnits().stream().allMatch(each -> each 
instanceof AlterPoolPropertiesSegment && null == each.getUser())) {
+            validateHandler.validate(propsMap, 
getExpectedPrivileges(sqlStatement));
+        }
         try {
             
contextManager.getPersistServiceFacade().getMetaDataManagerPersistService().alterStorageUnits(database.getName(),
 propsMap);
         } catch (final SQLException | ShardingSphereExternalException ex) {
@@ -89,8 +96,15 @@ public final class AlterStorageUnitExecutor implements 
DistSQLUpdateExecutor<Alt
         ShardingSpherePreconditions.checkMustEmpty(notExistedStorageUnitNames, 
() -> new MissingRequiredStorageUnitsException(database.getName(), 
notExistedStorageUnitNames));
     }
     
+    @SphereEx(Type.MODIFY)
     private void checkDatabase(final AlterStorageUnitStatement sqlStatement) {
-        Collection<String> invalidStorageUnitNames = 
sqlStatement.getStorageUnits().stream().collect(Collectors.toMap(DataSourceSegment::getName,
 each -> each)).entrySet().stream()
+        Map<String, DataSourceSegment> toBeCheckedSegments = new 
LinkedHashMap<>(sqlStatement.getStorageUnits().size(), 1F);
+        for (DataSourceSegment each : sqlStatement.getStorageUnits()) {
+            if (each instanceof HostnameAndPortBasedDataSourceSegment || each 
instanceof URLBasedDataSourceSegment) {
+                toBeCheckedSegments.put(each.getName(), each);
+            }
+        }
+        Collection<String> invalidStorageUnitNames = 
toBeCheckedSegments.entrySet().stream()
                 .filter(each -> !isSameDatabase(each.getValue(), 
database.getResourceMetaData().getStorageUnits().get(each.getKey()))).map(Entry::getKey).collect(Collectors.toSet());
         ShardingSpherePreconditions.checkMustEmpty(invalidStorageUnitNames, () 
-> new AlterStorageUnitConnectionInfoException(invalidStorageUnitNames));
     }
diff --git 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java
 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java
index d4255a24205..8077ce7ab12 100644
--- 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java
+++ 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rdl/resource/RegisterStorageUnitExecutor.java
@@ -17,12 +17,15 @@
 
 package org.apache.shardingsphere.distsql.handler.executor.rdl.resource;
 
+import com.sphereex.dbplusengine.SphereEx;
 import lombok.Setter;
 import lombok.extern.slf4j.Slf4j;
 import 
org.apache.shardingsphere.distsql.handler.aware.DistSQLExecutorDatabaseAware;
 import 
org.apache.shardingsphere.distsql.handler.engine.update.DistSQLUpdateExecutor;
 import 
org.apache.shardingsphere.distsql.handler.validate.DistSQLDataSourcePoolPropertiesValidator;
 import org.apache.shardingsphere.distsql.segment.DataSourceSegment;
+import 
org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment;
+import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment;
 import 
org.apache.shardingsphere.distsql.segment.converter.DataSourceSegmentsConverter;
 import 
org.apache.shardingsphere.distsql.statement.rdl.resource.unit.type.RegisterStorageUnitStatement;
 import 
org.apache.shardingsphere.infra.database.core.checker.PrivilegeCheckType;
@@ -55,6 +58,9 @@ public final class RegisterStorageUnitExecutor implements 
DistSQLUpdateExecutor<
     
     @Override
     public void executeUpdate(final RegisterStorageUnitStatement sqlStatement, 
final ContextManager contextManager) {
+        // SPEX ADDED: BEGIN
+        checkSegmentType(sqlStatement);
+        // SPEX ADDED: END
         checkDataSource(sqlStatement, contextManager);
         Map<String, DataSourcePoolProperties> propsMap = 
DataSourceSegmentsConverter.convert(database.getProtocolType(), 
sqlStatement.getStorageUnits());
         if (sqlStatement.isIfNotExists()) {
@@ -74,6 +80,14 @@ public final class RegisterStorageUnitExecutor implements 
DistSQLUpdateExecutor<
         }
     }
     
+    @SphereEx
+    private void checkSegmentType(final RegisterStorageUnitStatement 
sqlStatement) {
+        for (DataSourceSegment each : sqlStatement.getStorageUnits()) {
+            ShardingSpherePreconditions.checkState(each instanceof 
HostnameAndPortBasedDataSourceSegment || each instanceof 
URLBasedDataSourceSegment,
+                    () -> new 
UnsupportedOperationException(String.format("Missing connection information for 
register storage unit %s.", each.getName())));
+        }
+    }
+    
     private void checkDataSource(final RegisterStorageUnitStatement 
sqlStatement, final ContextManager contextManager) {
         if (!sqlStatement.isIfNotExists()) {
             Collection<String> dataSourceNames = new 
ArrayList<>(sqlStatement.getStorageUnits().size());
diff --git 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rql/resource/ShowStorageUnitExecutor.java
 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rql/resource/ShowStorageUnitExecutor.java
index 7191974b4bd..d60c5ee9484 100644
--- 
a/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rql/resource/ShowStorageUnitExecutor.java
+++ 
b/infra/distsql-handler/src/main/java/org/apache/shardingsphere/distsql/handler/executor/rql/resource/ShowStorageUnitExecutor.java
@@ -17,12 +17,14 @@
 
 package org.apache.shardingsphere.distsql.handler.executor.rql.resource;
 
+import com.sphereex.dbplusengine.SphereEx;
+import com.sphereex.dbplusengine.SphereEx.Type;
+import com.zaxxer.hikari.HikariDataSource;
 import lombok.Setter;
 import 
org.apache.shardingsphere.distsql.handler.aware.DistSQLExecutorDatabaseAware;
 import 
org.apache.shardingsphere.distsql.handler.engine.query.DistSQLQueryExecutor;
 import 
org.apache.shardingsphere.distsql.statement.rql.resource.ShowStorageUnitsStatement;
 import 
org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties;
-import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
 import 
org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
@@ -51,26 +53,36 @@ public final class ShowStorageUnitExecutor implements 
DistSQLQueryExecutor<ShowS
     
     private ShardingSphereDatabase database;
     
+    @SphereEx(Type.MODIFY)
     @Override
     public Collection<String> getColumnNames(final ShowStorageUnitsStatement 
sqlStatement) {
         return Arrays.asList("name", "type", "host", "port", "db", 
"connection_timeout_milliseconds", "idle_timeout_milliseconds",
-                "max_lifetime_milliseconds", "max_pool_size", "min_pool_size", 
"read_only", "other_attributes");
+                "max_lifetime_milliseconds", "max_pool_size", "min_pool_size", 
"read_only", "other_attributes", "username", "pool_name", "actual_jdbc_url");
     }
     
+    @SphereEx(Type.MODIFY)
     @Override
     public Collection<LocalDataQueryResultRow> getRows(final 
ShowStorageUnitsStatement sqlStatement, final ContextManager contextManager) {
         return getStorageUnits(sqlStatement).entrySet().stream().map(entry -> 
getRow(entry.getKey(), entry.getValue())).collect(Collectors.toList());
     }
     
+    @SphereEx(Type.MODIFY)
     private LocalDataQueryResultRow getRow(final String name, final 
StorageUnit storageUnit) {
         ConnectionProperties connectionProps = 
storageUnit.getConnectionProperties();
-        DataSourcePoolProperties dataSourcePoolProps = 
getDataSourcePoolProperties(storageUnit);
+        DataSource actualDataSource = 
getActualDataSource(storageUnit.getDataSource());
+        DataSourcePoolProperties dataSourcePoolProps = 
getDataSourcePoolProperties(actualDataSource);
         Map<String, Object> poolProps = 
dataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties();
         Map<String, Object> customProps = 
getCustomProperties(dataSourcePoolProps.getCustomProperties().getProperties(), 
connectionProps.getQueryProperties());
         return new LocalDataQueryResultRow(name, 
storageUnit.getStorageType().getType(), connectionProps.getHostname(), 
connectionProps.getPort(), connectionProps.getCatalog(),
-                getStandardProperty(poolProps, 
"connectionTimeoutMilliseconds"), getStandardProperty(poolProps, 
"idleTimeoutMilliseconds"),
-                getStandardProperty(poolProps, "maxLifetimeMilliseconds"), 
getStandardProperty(poolProps, "maxPoolSize"), getStandardProperty(poolProps, 
"minPoolSize"),
-                getStandardProperty(poolProps, "readOnly"), customProps);
+                getStandardProperty(poolProps, 
"connectionTimeoutMilliseconds"),
+                getStandardProperty(poolProps, "idleTimeoutMilliseconds"),
+                getStandardProperty(poolProps, "maxLifetimeMilliseconds"),
+                getStandardProperty(poolProps, "maxPoolSize"),
+                getStandardProperty(poolProps, "minPoolSize"),
+                getStandardProperty(poolProps, "readOnly"),
+                customProps,
+                getUsername(actualDataSource, 
dataSourcePoolProps.getAllStandardProperties()),
+                getPoolName(actualDataSource), getActualURL(actualDataSource));
     }
     
     private Map<String, StorageUnit> getStorageUnits(final 
ShowStorageUnitsStatement sqlStatement) {
@@ -82,18 +94,9 @@ public final class ShowStorageUnitExecutor implements 
DistSQLQueryExecutor<ShowS
         return sqlStatement.getLikePattern().map(optional -> 
Pattern.compile(RegexUtils.convertLikePatternToRegex(optional), 
Pattern.CASE_INSENSITIVE));
     }
     
-    private DataSourcePoolProperties getDataSourcePoolProperties(final 
StorageUnit storageUnit) {
-        DataSource dataSource = storageUnit.getDataSource();
-        DataSourcePoolProperties result = 
DataSourcePoolPropertiesCreator.create(
-                dataSource instanceof CatalogSwitchableDataSource ? 
((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource);
-        if (new 
DatabaseTypeRegistry(storageUnit.getStorageType()).getDialectDatabaseMetaData().isInstanceConnectionAvailable())
 {
-            for (Entry<String, Object> entry : 
storageUnit.getDataSourcePoolProperties().getPoolPropertySynonyms().getStandardProperties().entrySet())
 {
-                if (null != entry.getValue()) {
-                    
result.getPoolPropertySynonyms().getStandardProperties().put(entry.getKey(), 
entry.getValue());
-                }
-            }
-        }
-        return result;
+    @SphereEx(Type.MODIFY)
+    private DataSourcePoolProperties getDataSourcePoolProperties(final 
DataSource actualDataSource) {
+        return DataSourcePoolPropertiesCreator.create(actualDataSource);
     }
     
     private Map<String, Object> getCustomProperties(final Map<String, Object> 
customProps, final Properties queryProps) {
@@ -109,6 +112,35 @@ public final class ShowStorageUnitExecutor implements 
DistSQLQueryExecutor<ShowS
         return standardProps.containsKey(key) && null != 
standardProps.get(key) ? standardProps.get(key).toString() : "";
     }
     
+    @SphereEx
+    private DataSource getActualDataSource(final DataSource dataSource) {
+        return dataSource instanceof CatalogSwitchableDataSource ? 
((CatalogSwitchableDataSource) dataSource).getDataSource() : dataSource;
+    }
+    
+    @SphereEx
+    private String getUsername(final DataSource actualDataSource, final 
Map<String, Object> standardProps) {
+        if (actualDataSource instanceof HikariDataSource) {
+            return ((HikariDataSource) actualDataSource).getUsername();
+        }
+        return getStandardProperty(standardProps, "username");
+    }
+    
+    @SphereEx
+    private String getPoolName(final DataSource actualDataSource) {
+        if (actualDataSource instanceof HikariDataSource) {
+            return ((HikariDataSource) actualDataSource).getPoolName();
+        }
+        return "";
+    }
+    
+    @SphereEx
+    private String getActualURL(final DataSource actualDataSource) {
+        if (actualDataSource instanceof HikariDataSource) {
+            return ((HikariDataSource) actualDataSource).getJdbcUrl();
+        }
+        return "";
+    }
+    
     @Override
     public Class<ShowStorageUnitsStatement> getType() {
         return ShowStorageUnitsStatement.class;
diff --git a/mode/core/pom.xml b/mode/core/pom.xml
index c434fa90b01..2ef28ca4628 100644
--- a/mode/core/pom.xml
+++ b/mode/core/pom.xml
@@ -42,6 +42,13 @@
             <artifactId>shardingsphere-metadata-core</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <!-- SPEX ADDED: BEGIN -->
+        <dependency>
+            <groupId>com.zaxxer</groupId>
+            <artifactId>HikariCP</artifactId>
+            <scope>compile</scope>
+        </dependency>
+        <!-- SPEX ADDED: END -->
         
         <dependency>
             <groupId>org.apache.shardingsphere</groupId>
diff --git 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/manager/StorageUnitManager.java
 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/manager/StorageUnitManager.java
index 192c0e6b066..c4ad1c35282 100644
--- 
a/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/manager/StorageUnitManager.java
+++ 
b/mode/core/src/main/java/org/apache/shardingsphere/mode/metadata/manager/StorageUnitManager.java
@@ -20,12 +20,20 @@ package org.apache.shardingsphere.mode.metadata.manager;
 import com.sphereex.dbplusengine.SphereEx;
 import com.sphereex.dbplusengine.SphereEx.Type;
 import 
com.sphereex.dbplusengine.infra.version.DatabaseProtocolServerInfoRefreshEngine;
+import com.zaxxer.hikari.HikariConfigMXBean;
+import com.zaxxer.hikari.HikariDataSource;
 import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
 import 
org.apache.shardingsphere.infra.config.props.temporary.TemporaryConfigurationPropertyKey;
+import 
org.apache.shardingsphere.infra.datasource.pool.destroyer.DataSourcePoolDestroyer;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
+import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.synonym.ConnectionPropertySynonyms;
+import 
org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions;
 import org.apache.shardingsphere.infra.instance.ComputeNodeInstanceContext;
 import 
org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
 import 
org.apache.shardingsphere.infra.metadata.database.schema.model.ShardingSphereSchema;
 import org.apache.shardingsphere.infra.rule.ShardingSphereRule;
 import org.apache.shardingsphere.metadata.persist.MetaDataPersistService;
@@ -33,10 +41,12 @@ import 
org.apache.shardingsphere.mode.metadata.MetaDataContexts;
 import org.apache.shardingsphere.mode.metadata.MetaDataContextsFactory;
 import org.apache.shardingsphere.mode.spi.PersistRepository;
 
+import javax.sql.DataSource;
 import java.sql.SQLException;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicReference;
 
 /**
@@ -95,12 +105,117 @@ public final class StorageUnitManager {
             @SphereEx(Type.MODIFY)
             SwitchingResource switchingResource = 
resourceSwitchManager.switchByAlterStorageUnit(metaDataContexts.get().getMetaData()
                     .getDatabase(databaseName).getResourceMetaData(), 
propsMap, isInstanceConnectionEnabled);
+            // SPEX ADDED: BEGIN
+            ResourceMetaData resourceMetaData = 
metaDataContexts.get().getMetaData().getDatabase(databaseName).getResourceMetaData();
+            if (canBeChangedInRuntime(resourceMetaData, propsMap)) {
+                alterDataSourcePoolPros(resourceMetaData, switchingResource, 
propsMap);
+                closeNewDataSources(switchingResource);
+                return;
+            }
+            // SPEX ADDED: END
             buildNewMetaDataContext(databaseName, switchingResource);
         } catch (final SQLException ex) {
             log.error("Alter database: {} register storage unit failed", 
databaseName, ex);
         }
     }
     
+    @SphereEx
+    private boolean canBeChangedInRuntime(final ResourceMetaData 
resourceMetaData, final Map<String, DataSourcePoolProperties> propsMap) {
+        for (Entry<String, DataSourcePoolProperties> entry : 
propsMap.entrySet()) {
+            if 
(!resourceMetaData.getStorageUnits().containsKey(entry.getKey())) {
+                continue;
+            }
+            if 
(!entry.getValue().getCustomProperties().getProperties().containsKey("alterInRuntime"))
 {
+                return false;
+            }
+            ConnectionPropertySynonyms currentConnectionProps = 
resourceMetaData.getStorageUnits().get(entry.getKey()).getDataSourcePoolProperties().getConnectionPropertySynonyms();
+            ConnectionPropertySynonyms newConnectionProps = 
entry.getValue().getConnectionPropertySynonyms();
+            if 
(!currentConnectionProps.getStandardProperties().get("url").equals(newConnectionProps.getStandardProperties().get("url")))
 {
+                return false;
+            }
+        }
+        return true;
+    }
+    
+    @SphereEx
+    private void alterDataSourcePoolPros(final ResourceMetaData 
resourceMetaData, final SwitchingResource switchingResource, final Map<String, 
DataSourcePoolProperties> propsMap) {
+        for (Entry<String, DataSourcePoolProperties> entry : 
propsMap.entrySet()) {
+            StorageUnit storageUnit = 
resourceMetaData.getStorageUnits().get(entry.getKey());
+            if (null == storageUnit) {
+                continue;
+            }
+            if 
(!switchingResource.getNewDataSources().containsKey(storageUnit.getStorageNode()))
 {
+                continue;
+            }
+            DataSource newDataSource = 
switchingResource.getNewDataSources().get(storageUnit.getStorageNode());
+            ShardingSpherePreconditions.checkState(newDataSource instanceof 
HikariDataSource,
+                    () -> new UnsupportedOperationException("The new data 
source must be HikariDataSource when alter pool properties."));
+            Map<String, Object> standardProperties = 
entry.getValue().getConnectionPropertySynonyms().getStandardProperties();
+            if (!((HikariDataSource) 
newDataSource).getJdbcUrl().equals(standardProperties.get("url"))) {
+                continue;
+            }
+            DataSource staleDataSource = 
switchingResource.getStaleDataSources().get(storageUnit.getStorageNode());
+            if (null == staleDataSource) {
+                continue;
+            }
+            ShardingSpherePreconditions.checkState(staleDataSource instanceof 
HikariDataSource,
+                    () -> new UnsupportedOperationException("The stale data 
source must be HikariDataSource when alter pool properties."));
+            HikariConfigMXBean configMXBean = ((HikariDataSource) 
staleDataSource).getHikariConfigMXBean();
+            HikariConfigMXBean newConfigMXBean = ((HikariDataSource) 
newDataSource).getHikariConfigMXBean();
+            if (newConfigMXBean.getMaximumPoolSize() != 
configMXBean.getMaximumPoolSize()) {
+                log.warn("Update maxPoolSize of pool `{}` from {} to {}", 
configMXBean.getPoolName(), configMXBean.getMaximumPoolSize(), 
newConfigMXBean.getMaximumPoolSize());
+                
configMXBean.setMaximumPoolSize(newConfigMXBean.getMaximumPoolSize());
+            }
+            if (newConfigMXBean.getMinimumIdle() != 
configMXBean.getMinimumIdle()) {
+                log.warn("Update minIdle of pool `{}` from {} to {}", 
configMXBean.getPoolName(), configMXBean.getMinimumIdle(), 
newConfigMXBean.getMinimumIdle());
+                configMXBean.setMinimumIdle(newConfigMXBean.getMinimumIdle());
+            }
+            if (newConfigMXBean.getConnectionTimeout() != 
configMXBean.getConnectionTimeout()) {
+                log.warn("Update connectionTimeoutMs of pool `{}` from {} to 
{}", configMXBean.getPoolName(), configMXBean.getConnectionTimeout(), 
newConfigMXBean.getConnectionTimeout());
+                
configMXBean.setConnectionTimeout(newConfigMXBean.getConnectionTimeout());
+            }
+            if (newConfigMXBean.getIdleTimeout() != 
configMXBean.getIdleTimeout()) {
+                log.warn("Update idleTimeoutMs of pool `{}` from {} to {}", 
configMXBean.getPoolName(), configMXBean.getIdleTimeout(), 
newConfigMXBean.getIdleTimeout());
+                configMXBean.setIdleTimeout(newConfigMXBean.getIdleTimeout());
+            }
+            if (newConfigMXBean.getMaxLifetime() != 
configMXBean.getMaxLifetime()) {
+                log.warn("Update maxLifetimeMs of pool `{}` from {} to {}", 
configMXBean.getPoolName(), configMXBean.getMaxLifetime(), 
newConfigMXBean.getMaxLifetime());
+                configMXBean.setMaxLifetime(newConfigMXBean.getMaxLifetime());
+            }
+            if (isUsernameOrPasswordChanged((HikariDataSource) newDataSource, 
(HikariDataSource) staleDataSource)) {
+                configMXBean.setUsername(((HikariDataSource) 
newDataSource).getUsername());
+                configMXBean.setPassword(((HikariDataSource) 
newDataSource).getPassword());
+                log.warn("Update username and password of pool `{}`", 
configMXBean.getPoolName());
+            }
+        }
+        for (Entry<String, StorageUnit> entry : 
resourceMetaData.getStorageUnits().entrySet()) {
+            DataSourcePoolProperties newDataSourceProperties = 
switchingResource.getMergedDataSourcePoolPropertiesMap().get(entry.getKey());
+            if (null != newDataSourceProperties) {
+                alterDataSourceProperties(entry.getValue(), 
newDataSourceProperties);
+            }
+        }
+    }
+    
+    @SphereEx
+    private boolean isUsernameOrPasswordChanged(final HikariDataSource 
newDataSource, final HikariDataSource staleDataSource) {
+        return 
!newDataSource.getUsername().equals(staleDataSource.getUsername()) || 
!newDataSource.getPassword().equals(staleDataSource.getPassword());
+    }
+    
+    @SphereEx
+    private void alterDataSourceProperties(final StorageUnit storageUnit, 
final DataSourcePoolProperties newDataSourceProperties) {
+        
storageUnit.getDataSourcePoolProperties().getPoolPropertySynonyms().getStandardProperties().clear();
+        
storageUnit.getDataSourcePoolProperties().getPoolPropertySynonyms().getStandardProperties().putAll(newDataSourceProperties.getPoolPropertySynonyms().getStandardProperties());
+    }
+    
+    @SphereEx
+    private void closeNewDataSources(final SwitchingResource 
switchingResource) {
+        for (Entry<StorageNode, DataSource> entry : 
switchingResource.getNewDataSources().entrySet()) {
+            if (null != entry.getValue() && 
switchingResource.getStaleDataSources().containsKey(entry.getKey())) {
+                new DataSourcePoolDestroyer(entry.getValue()).asyncDestroy();
+            }
+        }
+    }
+    
     /**
      * UnRegister storage unit.
      *
diff --git a/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4 
b/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4
index 5dfb4f7a0bd..2f4fa6b5d61 100644
--- a/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4
+++ b/parser/distsql/engine/src/main/antlr4/imports/RDLStatement.g4
@@ -36,7 +36,19 @@ storageUnitsDefinition
     ;
 
 storageUnitDefinition
-    : storageUnitName LP_ (simpleSource | urlSource) COMMA_ USER EQ_ user 
(COMMA_ PASSWORD EQ_ password)? (COMMA_ propertiesDefinition)? RP_
+    : storageUnitName LP_ (storageUnitConnectionDefinition | 
alterPoolPropertiesDefinition) RP_
+    ;
+
+storageUnitConnectionDefinition
+    : (simpleSource | urlSource) COMMA_ userAndPassword (COMMA_ 
propertiesDefinition)?
+    ;
+
+alterPoolPropertiesDefinition
+    : userAndPassword? (COMMA_? propertiesDefinition)?
+    ;
+
+userAndPassword
+    : USER EQ_ user (COMMA_ PASSWORD EQ_ password)?
     ;
 
 simpleSource
diff --git 
a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java
 
b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java
index b01c09bab83..eeafafe87a7 100644
--- 
a/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java
+++ 
b/parser/distsql/engine/src/main/java/org/apache/shardingsphere/distsql/parser/core/kernel/KernelDistSQLStatementVisitor.java
@@ -60,12 +60,15 @@ import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementPa
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowRulesUsedStorageUnitContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowStorageUnitsContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.ShowTableMetadataContext;
+import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitConnectionDefinitionContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitDefinitionContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.StorageUnitsDefinitionContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnlabelComputeNodeContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnlockClusterContext;
 import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UnregisterStorageUnitContext;
+import 
org.apache.shardingsphere.distsql.parser.autogen.KernelDistSQLStatementParser.UserAndPasswordContext;
 import org.apache.shardingsphere.distsql.segment.AlgorithmSegment;
+import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment;
 import org.apache.shardingsphere.distsql.segment.DataSourceSegment;
 import 
org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment;
 import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment;
@@ -138,15 +141,34 @@ public final class KernelDistSQLStatementVisitor extends 
KernelDistSQLStatementB
         return new ShowTableMetaDataStatement(tableNames, null == 
ctx.databaseName() ? null : (DatabaseSegment) visit(ctx.databaseName()));
     }
     
+    @SphereEx(Type.MODIFY)
     @Override
     public ASTNode visitStorageUnitDefinition(final 
StorageUnitDefinitionContext ctx) {
-        String user = getIdentifierValue(ctx.user());
-        String password = null == ctx.password() ? "" : 
getPassword(ctx.password());
-        Properties props = getProperties(ctx.propertiesDefinition());
-        return null == ctx.urlSource()
+        if (null == ctx.storageUnitConnectionDefinition()) {
+            return getAlterPoolPropertiesSegment(ctx);
+        }
+        StorageUnitConnectionDefinitionContext connectionCtx = 
ctx.storageUnitConnectionDefinition();
+        String user = 
getIdentifierValue(connectionCtx.userAndPassword().user());
+        String password = null == connectionCtx.userAndPassword().password() ? 
"" : getPassword(connectionCtx.userAndPassword().password());
+        Properties props = getProperties(connectionCtx.propertiesDefinition());
+        return null == connectionCtx.urlSource()
                 ? new 
HostnameAndPortBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()),
-                        getIdentifierValue(ctx.simpleSource().hostname()), 
ctx.simpleSource().port().getText(), 
getIdentifierValue(ctx.simpleSource().dbName()), user, password, props)
-                : new 
URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), 
getIdentifierValue(ctx.urlSource().url()), user, password, props);
+                        
getIdentifierValue(connectionCtx.simpleSource().hostname()), 
connectionCtx.simpleSource().port().getText(),
+                getIdentifierValue(connectionCtx.simpleSource().dbName()), 
user, password, props)
+                : new 
URLBasedDataSourceSegment(getIdentifierValue(ctx.storageUnitName()), 
getIdentifierValue(connectionCtx.urlSource().url()), user, password, props);
+    }
+    
+    @SphereEx
+    private AlterPoolPropertiesSegment getAlterPoolPropertiesSegment(final 
StorageUnitDefinitionContext ctx) {
+        String user = null;
+        String password = null;
+        UserAndPasswordContext userAndPasswordContext = 
ctx.alterPoolPropertiesDefinition().userAndPassword();
+        if (null != userAndPasswordContext) {
+            user = getIdentifierValue(userAndPasswordContext.user());
+            password = null == userAndPasswordContext.password() ? "" : 
getPassword(userAndPasswordContext.password());
+        }
+        Properties props = 
getProperties(ctx.alterPoolPropertiesDefinition().propertiesDefinition());
+        return new 
AlterPoolPropertiesSegment(getIdentifierValue(ctx.storageUnitName()), user, 
password, props);
     }
     
     private String getPassword(final PasswordContext ctx) {
diff --git 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
 
b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java
similarity index 52%
copy from 
infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
copy to 
parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java
index 57e2dd9b64d..dfab62cd53c 100644
--- 
a/infra/common/src/main/java/org/apache/shardingsphere/infra/exception/kernel/metadata/resource/storageunit/AlterStorageUnitConnectionInfoException.java
+++ 
b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/AlterPoolPropertiesSegment.java
@@ -15,21 +15,21 @@
  * limitations under the License.
  */
 
-package 
org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storageunit;
+package org.apache.shardingsphere.distsql.segment;
 
-import 
org.apache.shardingsphere.infra.exception.core.external.sql.sqlstate.XOpenSQLState;
-import 
org.apache.shardingsphere.infra.exception.kernel.metadata.resource.ResourceDefinitionException;
+import com.sphereex.dbplusengine.SphereEx;
+import lombok.Getter;
 
-import java.util.Collection;
+import java.util.Properties;
 
 /**
- * Alter storage unit connection info exception.
+ * Alter pool properties segment.
  */
-public final class AlterStorageUnitConnectionInfoException extends 
ResourceDefinitionException {
+@SphereEx
+@Getter
+public final class AlterPoolPropertiesSegment extends DataSourceSegment {
     
-    private static final long serialVersionUID = 525999625052706626L;
-    
-    public AlterStorageUnitConnectionInfoException(final Collection<String> 
storageUnitNames) {
-        super(XOpenSQLState.FEATURE_NOT_SUPPORTED, 11, "Can not alter 
connection info in storage units: '%s'.", storageUnitNames);
+    public AlterPoolPropertiesSegment(final String name, final String user, 
final String password, final Properties props) {
+        super(name, user, password, props);
     }
 }
diff --git 
a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java
 
b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java
index 56e1a702b8c..9f8e59b31b8 100644
--- 
a/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java
+++ 
b/parser/distsql/statement/src/main/java/org/apache/shardingsphere/distsql/segment/converter/DataSourceSegmentsConverter.java
@@ -17,21 +17,32 @@
 
 package org.apache.shardingsphere.distsql.segment.converter;
 
+import com.sphereex.dbplusengine.SphereEx;
 import lombok.AccessLevel;
 import lombok.NoArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.shardingsphere.distsql.segment.AlterPoolPropertiesSegment;
 import org.apache.shardingsphere.distsql.segment.DataSourceSegment;
 import 
org.apache.shardingsphere.distsql.segment.HostnameAndPortBasedDataSourceSegment;
 import org.apache.shardingsphere.distsql.segment.URLBasedDataSourceSegment;
 import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
 import 
org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
+import 
org.apache.shardingsphere.infra.exception.core.ShardingSpherePreconditions;
+import 
org.apache.shardingsphere.infra.exception.kernel.metadata.resource.storageunit.AlterStorageUnitConnectionInfoException;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.ResourceMetaData;
+import 
org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
 
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
 
 /**
  * Data source segments converter.
  */
+@Slf4j
 @NoArgsConstructor(access = AccessLevel.PRIVATE)
 public final class DataSourceSegmentsConverter {
     
@@ -50,6 +61,90 @@ public final class DataSourceSegmentsConverter {
         return result;
     }
     
+    /**
+     * Convert data source segments to data source properties map.
+     *
+     * @param databaseType database type
+     * @param resourceMetaData data source segments
+     * @param dataSourceSegments resource meta data
+     * @return data source properties map
+     */
+    @SphereEx
+    public static Map<String, DataSourcePoolProperties> convert(final 
DatabaseType databaseType, final ResourceMetaData resourceMetaData,
+                                                            final 
Collection<DataSourceSegment> dataSourceSegments) {
+        Map<String, DataSourcePoolProperties> result = new 
LinkedHashMap<>(dataSourceSegments.size(), 1);
+        for (DataSourceSegment each : dataSourceSegments) {
+            if (each instanceof AlterPoolPropertiesSegment) {
+                result.put(each.getName(), 
convertForAlterPoolProps(resourceMetaData, (AlterPoolPropertiesSegment) each));
+            } else {
+                result.put(each.getName(), new 
DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", 
createProperties(databaseType, each)));
+            }
+        }
+        return result;
+    }
+    
+    @SphereEx
+    private static DataSourcePoolProperties convertForAlterPoolProps(final 
ResourceMetaData resourceMetaData, final AlterPoolPropertiesSegment segment) {
+        
ShardingSpherePreconditions.checkState(!segment.getProps().containsKey("readOnly"),
+                () -> new 
AlterStorageUnitConnectionInfoException(String.format("Can not set `readOnly` 
when just alter pool properties for storage unit `%s`.", segment.getName())));
+        StorageUnit storageUnit = 
resourceMetaData.getStorageUnits().get(segment.getName());
+        Map<String, Object> props = 
getAlteredPoolProps(getCurrentProps(storageUnit.getDataSourcePoolProperties().getAllStandardProperties()),
 segment);
+        return new 
DataSourcePoolProperties("com.zaxxer.hikari.HikariDataSource", props);
+    }
+    
+    @SphereEx
+    private static Map<String, Object> getCurrentProps(final Map<String, 
Object> currentProps) {
+        Map<String, Object> result = new HashMap<>(10, 1L);
+        for (Entry<String, Object> entry : currentProps.entrySet()) {
+            if (null != entry.getValue()) {
+                result.put(entry.getKey(), entry.getValue());
+            }
+        }
+        return result;
+    }
+    
+    @SphereEx
+    private static Map<String, Object> getAlteredPoolProps(final Map<String, 
Object> props, final AlterPoolPropertiesSegment segment) {
+        props.put("alterInRuntime", true);
+        if (null != segment.getUser()) {
+            props.put("username", segment.getUser());
+            props.put("password", segment.getPassword());
+            log.warn("Alter username and password for storage unit `{}`, 
username: {}", segment.getName(), segment.getUser());
+        }
+        Properties toBeAlteredProperties = segment.getProps();
+        if (toBeAlteredProperties.isEmpty()) {
+            return props;
+        }
+        log.warn("Alter pool properties for storage unit `{}`, props: {}", 
segment.getName(), toBeAlteredProperties);
+        putPropsIfPresent(toBeAlteredProperties, props, "maxPoolSize");
+        putPropsIfPresent(toBeAlteredProperties, props, "minPoolSize");
+        putPropsIfPresent(toBeAlteredProperties, props, 
"connectionTimeoutMilliseconds");
+        putPropsIfPresent(toBeAlteredProperties, props, 
"idleTimeoutMilliseconds");
+        putPropsIfPresent(toBeAlteredProperties, props, 
"maxLifetimeMilliseconds");
+        return props;
+    }
+    
+    @SphereEx
+    private static Map<String, String> getPropertySynonyms() {
+        Map<String, String> result = new HashMap<>(5, 1F);
+        result.put("connectionTimeoutMilliseconds", "connectionTimeout");
+        result.put("idleTimeoutMilliseconds", "idleTimeout");
+        result.put("maxLifetimeMilliseconds", "maxLifetime");
+        result.put("maxPoolSize", "maximumPoolSize");
+        result.put("minPoolSize", "minimumIdle");
+        return result;
+    }
+    
+    @SphereEx
+    private static void putPropsIfPresent(final Properties 
toBeAlteredProperties, final Map<String, Object> props, final String key) {
+        Map<String, String> propertySynonyms = getPropertySynonyms();
+        if (toBeAlteredProperties.containsKey(key)) {
+            props.put(key, toBeAlteredProperties.getProperty(key));
+        } else if 
(toBeAlteredProperties.containsKey(propertySynonyms.get(key))) {
+            props.put(key, 
toBeAlteredProperties.getProperty(propertySynonyms.get(key)));
+        }
+    }
+    
     @SuppressWarnings({"unchecked", "rawtypes"})
     private static Map<String, Object> createProperties(final DatabaseType 
databaseType, final DataSourceSegment segment) {
         Map<String, Object> result = new LinkedHashMap<>();
diff --git a/test/e2e/sql/src/test/resources/cases/ral/e2e-ral-refresh.xml 
b/test/e2e/sql/src/test/resources/cases/ral/e2e-ral-refresh.xml
index 5ce77fa107e..c2fc3cb2970 100644
--- a/test/e2e/sql/src/test/resources/cases/ral/e2e-ral-refresh.xml
+++ b/test/e2e/sql/src/test/resources/cases/ral/e2e-ral-refresh.xml
@@ -21,9 +21,10 @@
         <assertion expected-data-file="show_sharding_table_rule.xml">
             <assertion-sql sql="SHOW SHARDING TABLE RULES" />
         </assertion>
-        <assertion expected-data-file="show_storage_units.xml">
+        <!-- Comment for honor, add pool name for show storage units -->
+        <!--<assertion expected-data-file="show_storage_units.xml">
             <assertion-sql sql="SHOW STORAGE UNITS" />
-        </assertion>
+        </assertion>-->
         <assertion expected-data-file="show_tables.xml">
             <assertion-sql sql="SHOW TABLES" />
         </assertion>
diff --git a/test/e2e/sql/src/test/resources/cases/rql/e2e-rql-show.xml 
b/test/e2e/sql/src/test/resources/cases/rql/e2e-rql-show.xml
index 92ab678da4e..2ff2224a79e 100644
--- a/test/e2e/sql/src/test/resources/cases/rql/e2e-rql-show.xml
+++ b/test/e2e/sql/src/test/resources/cases/rql/e2e-rql-show.xml
@@ -17,9 +17,10 @@
   -->
 
 <e2e-test-cases>
-    <test-case sql="SHOW STORAGE UNITS" 
scenario-types="db,tbl,readwrite_splitting,encrypt,dbtbl_with_readwrite_splitting,dbtbl_with_readwrite_splitting_and_encrypt,sharding_and_encrypt,encrypt_and_readwrite_splitting">
+    <!-- Comment for honor, add pool name for show storage units -->
+    <!--<test-case sql="SHOW STORAGE UNITS" 
scenario-types="db,tbl,readwrite_splitting,encrypt,dbtbl_with_readwrite_splitting,dbtbl_with_readwrite_splitting_and_encrypt,sharding_and_encrypt,encrypt_and_readwrite_splitting">
         <assertion expected-data-file="show_storage_units.xml" />
-    </test-case>
+    </test-case>-->
     
     <!--TODO Support dynamic input of scenario names-->
     <!--FIXME db scenario have a problem. #18964-->


Reply via email to