This is an automated email from the ASF dual-hosted git repository. totalo pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push: new 69b0255e378 Add StorageUnit (#28066) 69b0255e378 is described below commit 69b0255e378d377b78a64e53328a983194b0e935 Author: Liang Zhang <zhangli...@apache.org> AuthorDate: Mon Aug 14 00:03:13 2023 +0800 Add StorageUnit (#28066) * Refactor StorageResourceUtils * Add StorageUnit * Add StorageUnit * Add StorageUnit --- .../DataSourceProvidedDatabaseConfiguration.java | 4 +- .../database/resource/ResourceMetaData.java | 14 +-- .../database/resource/StorageUnitMetaData.java | 109 --------------------- .../resource/storage/StorageResourceUtils.java | 23 +++-- .../database/resource/storage/StorageUnit.java | 96 ++++++++++++++++++ .../resource/storage/StorageUnitMetaData.java | 75 ++++++++++++++ 6 files changed, 197 insertions(+), 124 deletions(-) diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java index c6193de5a0b..e16db003755 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/config/database/impl/DataSourceProvidedDatabaseConfiguration.java @@ -46,8 +46,8 @@ public final class DataSourceProvidedDatabaseConfiguration implements DatabaseCo private final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap; - public DataSourceProvidedDatabaseConfiguration(final Map<String, DataSource> dataSources, final Collection<RuleConfiguration> ruleConfigurations) { - this.ruleConfigurations = ruleConfigurations; + public DataSourceProvidedDatabaseConfiguration(final Map<String, DataSource> dataSources, final Collection<RuleConfiguration> ruleConfigs) { + this.ruleConfigurations = ruleConfigs; this.storageResource = new StorageResource(StorageResourceUtils.getStorageNodeDataSources(dataSources), StorageResourceUtils.getStorageUnitNodeMappers(dataSources)); dataSourcePoolPropertiesMap = createDataSourcePoolPropertiesMap(dataSources); } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java index bb930c40547..c8fc7350ea3 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/ResourceMetaData.java @@ -26,6 +26,8 @@ import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourceP import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResource; import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageResourceUtils; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnit; +import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitMetaData; import javax.sql.DataSource; import java.util.Collection; @@ -87,7 +89,7 @@ public final class ResourceMetaData { */ public Collection<String> getAllInstanceDataSourceNames() { Collection<String> result = new LinkedList<>(); - for (Entry<String, ConnectionProperties> entry : storageUnitMetaData.getConnectionPropertiesMap().entrySet()) { + for (Entry<String, StorageUnit> entry : storageUnitMetaData.getStorageUnits().entrySet()) { if (!isExisted(entry.getKey(), result)) { result.add(entry.getKey()); } @@ -96,8 +98,8 @@ public final class ResourceMetaData { } private boolean isExisted(final String dataSourceName, final Collection<String> existedDataSourceNames) { - return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getConnectionPropertiesMap().get(dataSourceName) - .isInSameDatabaseInstance(storageUnitMetaData.getConnectionPropertiesMap().get(each))); + return existedDataSourceNames.stream().anyMatch(each -> storageUnitMetaData.getStorageUnits().get(dataSourceName).getConnectionProperties() + .isInSameDatabaseInstance(storageUnitMetaData.getStorageUnits().get(each).getConnectionProperties())); } /** @@ -107,7 +109,7 @@ public final class ResourceMetaData { * @return connection properties */ public ConnectionProperties getConnectionProperties(final String dataSourceName) { - return storageUnitMetaData.getConnectionPropertiesMap().get(dataSourceName); + return storageUnitMetaData.getStorageUnits().get(dataSourceName).getConnectionProperties(); } /** @@ -117,7 +119,7 @@ public final class ResourceMetaData { * @return storage type */ public DatabaseType getStorageType(final String dataSourceName) { - return storageUnitMetaData.getStorageTypes().get(dataSourceName); + return storageUnitMetaData.getStorageUnits().get(dataSourceName).getStorageType(); } /** @@ -127,7 +129,7 @@ public final class ResourceMetaData { * @return not existed resource names */ public Collection<String> getNotExistedDataSources(final Collection<String> resourceNames) { - return resourceNames.stream().filter(each -> !storageUnitMetaData.getDataSources().containsKey(each)).collect(Collectors.toSet()); + return resourceNames.stream().filter(each -> !storageUnitMetaData.getStorageUnits().containsKey(each)).collect(Collectors.toSet()); } /** diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java deleted file mode 100644 index f2ef09c1c4b..00000000000 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/StorageUnitMetaData.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.shardingsphere.infra.metadata.database.resource; - -import lombok.Getter; -import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; -import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; -import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; -import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; -import org.apache.shardingsphere.infra.database.core.type.DatabaseType; -import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; -import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; -import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; -import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageNode; -import org.apache.shardingsphere.infra.metadata.database.resource.storage.StorageUnitNodeMapper; -import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; - -import javax.sql.DataSource; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.Collectors; - -/** - * Storage unit meta data. - */ -@Getter -public final class StorageUnitMetaData { - - private final Map<String, DataSource> dataSources; - - private final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap; - - private final Map<String, DatabaseType> storageTypes; - - private final Map<String, StorageUnitNodeMapper> unitNodeMappers; - - private final Map<String, ConnectionProperties> connectionPropertiesMap; - - public StorageUnitMetaData(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources, - final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageUnitNodeMapper> unitNodeMappers) { - this.unitNodeMappers = unitNodeMappers; - this.dataSources = getStorageUnitDataSources(storageNodeDataSources, unitNodeMappers); - this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap; - Map<StorageNode, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources); - storageTypes = createStorageTypes(enabledStorageNodeDataSources, unitNodeMappers); - connectionPropertiesMap = createConnectionPropertiesMap(enabledStorageNodeDataSources, storageTypes, unitNodeMappers); - } - - private Map<StorageNode, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources) { - Map<String, DataSource> toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); - for (Entry<StorageNode, DataSource> entry : storageNodeDataSources.entrySet()) { - toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue()); - } - Map<String, DataSource> enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources); - return storageNodeDataSources.entrySet().stream() - .filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); - } - - private Map<String, DataSource> getStorageUnitDataSources(final Map<StorageNode, DataSource> storageNodeDataSources, final Map<String, StorageUnitNodeMapper> unitNodeMappers) { - Map<String, DataSource> result = new LinkedHashMap<>(unitNodeMappers.size(), 1F); - for (Entry<String, StorageUnitNodeMapper> entry : unitNodeMappers.entrySet()) { - DataSource dataSource = storageNodeDataSources.get(entry.getValue().getStorageNode()); - result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl())); - } - return result; - } - - private Map<String, DatabaseType> createStorageTypes(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, final Map<String, StorageUnitNodeMapper> unitNodeMappers) { - Map<String, DatabaseType> result = new LinkedHashMap<>(unitNodeMappers.size(), 1F); - for (Entry<String, StorageUnitNodeMapper> entry : unitNodeMappers.entrySet()) { - result.put(entry.getKey(), DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(entry.getValue().getStorageNode()) - ? Collections.singleton(enabledStorageNodeDataSources.get(entry.getValue().getStorageNode())) - : Collections.emptyList())); - } - return result; - } - - private Map<String, ConnectionProperties> createConnectionPropertiesMap(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, - final Map<String, DatabaseType> storageTypes, final Map<String, StorageUnitNodeMapper> unitNodeMappers) { - Map<String, ConnectionProperties> result = new LinkedHashMap<>(unitNodeMappers.size(), 1F); - for (Entry<String, StorageUnitNodeMapper> entry : unitNodeMappers.entrySet()) { - if (enabledStorageNodeDataSources.containsKey(entry.getValue().getStorageNode())) { - Map<String, Object> standardProps = DataSourcePoolPropertiesCreator.create(enabledStorageNodeDataSources.get(entry.getValue().getStorageNode())) - .getConnectionPropertySynonyms().getStandardProperties(); - DatabaseType storageType = storageTypes.get(entry.getKey()); - ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType); - result.put(entry.getKey(), parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), entry.getValue().getCatalog())); - } - } - return result; - } -} diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java index 0d0f6e718ac..5b5a7ea9fa0 100644 --- a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageResourceUtils.java @@ -26,6 +26,7 @@ import javax.sql.DataSource; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.stream.Collectors; /** * Storage utility class. @@ -54,12 +55,20 @@ public final class StorageResourceUtils { * @return storage unit node mappers */ public static Map<String, StorageUnitNodeMapper> getStorageUnitNodeMappers(final Map<String, DataSource> dataSources) { - Map<String, StorageUnitNodeMapper> result = new LinkedHashMap<>(dataSources.size(), 1F); - for (Entry<String, DataSource> entry : dataSources.entrySet()) { - DataSourcePoolProperties dataSourcePoolProperties = DataSourcePoolPropertiesCreator.create(entry.getValue()); - String url = dataSourcePoolProperties.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); - result.put(entry.getKey(), new StorageUnitNodeMapper(entry.getKey(), new StorageNode(entry.getKey()), url)); - } - return result; + return dataSources.entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, entry -> getStorageUnitNodeMapper(entry.getKey(), entry.getValue()), (oldValue, currentValue) -> currentValue, LinkedHashMap::new)); + } + + /** + * Get storage unit node mapper from provided data source. + * + * @param dataSourceName data source name + * @param dataSource data source + * @return storage unit node mapper + */ + public static StorageUnitNodeMapper getStorageUnitNodeMapper(final String dataSourceName, final DataSource dataSource) { + DataSourcePoolProperties props = DataSourcePoolPropertiesCreator.create(dataSource); + String url = props.getConnectionPropertySynonyms().getStandardProperties().get("url").toString(); + return new StorageUnitNodeMapper(dataSourceName, new StorageNode(dataSourceName), url); } } diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java new file mode 100644 index 00000000000..2b32e913937 --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnit.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.Getter; +import org.apache.shardingsphere.infra.database.DatabaseTypeEngine; +import org.apache.shardingsphere.infra.database.core.connector.ConnectionProperties; +import org.apache.shardingsphere.infra.database.core.connector.ConnectionPropertiesParser; +import org.apache.shardingsphere.infra.database.core.spi.DatabaseTypedSPILoader; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource; +import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; +import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager; + +import javax.sql.DataSource; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Storage unit. + */ +@Getter +public final class StorageUnit { + + private final DataSourcePoolProperties dataSourcePoolPropertiesMap; + + private final StorageUnitNodeMapper unitNodeMapper; + + private final DataSource dataSource; + + private final DatabaseType storageType; + + private final ConnectionProperties connectionProperties; + + public StorageUnit(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources, + final DataSourcePoolProperties propsMap, final StorageUnitNodeMapper unitNodeMapper) { + this.dataSourcePoolPropertiesMap = propsMap; + this.unitNodeMapper = unitNodeMapper; + dataSource = getStorageUnitDataSource(storageNodeDataSources, unitNodeMapper); + Map<StorageNode, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources); + storageType = createStorageType(enabledStorageNodeDataSources, unitNodeMapper); + connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null); + } + + private DataSource getStorageUnitDataSource(final Map<StorageNode, DataSource> storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) { + DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNode()); + return new CatalogSwitchableDataSource(dataSource, unitNodeMapper.getCatalog(), unitNodeMapper.getUrl()); + } + + private Map<StorageNode, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources) { + Map<String, DataSource> toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F); + for (Entry<StorageNode, DataSource> entry : storageNodeDataSources.entrySet()) { + toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue()); + } + Map<String, DataSource> enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources); + return storageNodeDataSources.entrySet().stream() + .filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + } + + private DatabaseType createStorageType(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) { + return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode()) + ? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())) + : Collections.emptyList()); + } + + private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, + final StorageUnitNodeMapper unitNodeMapper, final DatabaseType storageType) { + if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())) { + return Optional.empty(); + } + Map<String, Object> standardProps = DataSourcePoolPropertiesCreator.create( + enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())).getConnectionPropertySynonyms().getStandardProperties(); + ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType); + return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), unitNodeMapper.getCatalog())); + } +} diff --git a/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java new file mode 100644 index 00000000000..d6525540013 --- /dev/null +++ b/infra/common/src/main/java/org/apache/shardingsphere/infra/metadata/database/resource/storage/StorageUnitMetaData.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.shardingsphere.infra.metadata.database.resource.storage; + +import lombok.Getter; +import org.apache.shardingsphere.infra.database.core.type.DatabaseType; +import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties; + +import javax.sql.DataSource; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; + +/** + * Storage unit meta data. + */ +@Getter +public final class StorageUnitMetaData { + + private final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap; + + // TODO zhangliang: should refactor + private final Map<String, StorageUnitNodeMapper> unitNodeMappers; + + private final Map<String, StorageUnit> storageUnits; + + // TODO zhangliang: should refactor + private final Map<String, DataSource> dataSources; + + // TODO zhangliang: should refactor + private final Map<String, DatabaseType> storageTypes; + + public StorageUnitMetaData(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources, + final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageUnitNodeMapper> unitNodeMappers) { + this.dataSourcePoolPropertiesMap = dataSourcePoolPropertiesMap; + this.unitNodeMappers = unitNodeMappers; + storageUnits = new LinkedHashMap<>(unitNodeMappers.size(), 1F); + for (Entry<String, StorageUnitNodeMapper> entry : unitNodeMappers.entrySet()) { + storageUnits.put(entry.getKey(), new StorageUnit(databaseName, storageNodeDataSources, dataSourcePoolPropertiesMap.get(entry.getKey()), entry.getValue())); + } + dataSources = createDataSources(); + storageTypes = createStorageTypes(); + } + + private Map<String, DataSource> createDataSources() { + Map<String, DataSource> result = new LinkedHashMap<>(storageUnits.size(), 1F); + for (Entry<String, StorageUnit> entry : storageUnits.entrySet()) { + result.put(entry.getKey(), entry.getValue().getDataSource()); + } + return result; + } + + private Map<String, DatabaseType> createStorageTypes() { + Map<String, DatabaseType> result = new LinkedHashMap<>(storageUnits.size(), 1F); + for (Entry<String, StorageUnit> entry : storageUnits.entrySet()) { + result.put(entry.getKey(), entry.getValue().getStorageType()); + } + return result; + } +}