This is an automated email from the ASF dual-hosted git repository. ntimofeev pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/cayenne.git
The following commit(s) were added to refs/heads/master by this push: new 394feba6a CAY-2788 DbImport: Add fallback option for the batch attribute loader 394feba6a is described below commit 394feba6a72db92e6543046308db4c68c43b447b Author: Nikita Timofeev <stari...@gmail.com> AuthorDate: Thu Dec 29 11:27:24 2022 +0300 CAY-2788 DbImport: Add fallback option for the batch attribute loader --- RELEASE-NOTES.txt | 1 + .../dbsync/reverse/dbload/AttributeLoader.java | 90 +++------------------- ...ttributeLoader.java => AttributeProcessor.java} | 75 ++++++++---------- .../cayenne/dbsync/reverse/dbload/DbLoader.java | 1 + .../reverse/dbload/FallbackAttributeLoader.java | 62 +++++++++++++++ .../reverse/dbload/PerCatalogAndSchemaLoader.java | 8 ++ 6 files changed, 113 insertions(+), 124 deletions(-) diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt index 516af8519..405d24a19 100644 --- a/RELEASE-NOTES.txt +++ b/RELEASE-NOTES.txt @@ -34,6 +34,7 @@ CAY-2773 Switch master to "5.0" CAY-2776 Upgrade Gradle to 7.6 CAY-2780 Modeler: Multiple configurations for classes generation CAY-2781 Enable 'Create PK properties' by default in the cgen configuration +CAY-2788 DbImport: Add fallback option for the batch attribute loader Bug Fixes: diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java index 9ca2d054d..f9b89d3a1 100644 --- a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java +++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java @@ -21,16 +21,12 @@ package org.apache.cayenne.dbsync.reverse.dbload; import java.sql.DatabaseMetaData; import java.sql.ResultSet; -import java.sql.ResultSetMetaData; import java.sql.SQLException; import org.apache.cayenne.dba.DbAdapter; -import org.apache.cayenne.dba.TypesMapping; -import org.apache.cayenne.dbsync.model.DetectedDbAttribute; import org.apache.cayenne.dbsync.reverse.filters.CatalogFilter; import org.apache.cayenne.dbsync.reverse.filters.PatternFilter; import org.apache.cayenne.dbsync.reverse.filters.SchemaFilter; -import org.apache.cayenne.map.DbAttribute; import org.apache.cayenne.map.DbEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,13 +35,11 @@ class AttributeLoader extends PerCatalogAndSchemaLoader { private static final Logger LOGGER = LoggerFactory.getLogger(DbLoader.class); - private boolean firstRow; - private boolean supportAutoIncrement; + private final AttributeProcessor attributeProcessor; AttributeLoader(DbAdapter adapter, DbLoaderConfiguration config, DbLoaderDelegate delegate) { super(adapter, config, delegate); - firstRow = true; - supportAutoIncrement = false; + attributeProcessor = new AttributeProcessor(adapter); } protected ResultSet getResultSet(String catalogName, String schemaName, DatabaseMetaData metaData) throws SQLException { @@ -53,12 +47,15 @@ class AttributeLoader extends PerCatalogAndSchemaLoader { } @Override - protected void processResultSetRow(CatalogFilter catalog, SchemaFilter schema, DbLoadDataStore map, ResultSet rs) throws SQLException { - if (firstRow) { - supportAutoIncrement = checkForAutoIncrement(rs); - firstRow = false; - } + boolean catchException(String catalogName, String schemaName, SQLException ex) { + String message = "Unable to load columns for the " + + catalogName + "/" + schemaName + ", falling back to per-entity loader."; + LOGGER.warn(message, ex); + return true; + } + @Override + protected void processResultSetRow(CatalogFilter catalog, SchemaFilter schema, DbLoadDataStore map, ResultSet rs) throws SQLException { // for a reason not quiet apparent to me, Oracle sometimes // returns duplicate record sets for the same table, messing up // table names. E.g. for the system table "WK$_ATTR_MAPPING" columns @@ -70,73 +67,8 @@ class AttributeLoader extends PerCatalogAndSchemaLoader { return; } - // Filter out columns by name - String columnName = rs.getString("COLUMN_NAME"); PatternFilter columnFilter = schema.tables.getIncludeTableColumnFilter(tableName); - if (columnFilter == null || !columnFilter.isIncluded(columnName)) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Skip column '" + tableName + "." + columnName + - "' (Path: " + catalog.name + "/" + schema.name + "; Filter: " + columnFilter + ")"); - } - return; - } - - DbAttribute attribute = createDbAttribute(rs); - addToDbEntity(entity, attribute); - } - - private boolean checkForAutoIncrement(ResultSet rs) throws SQLException { - ResultSetMetaData rsMetaData = rs.getMetaData(); - for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { - if("IS_AUTOINCREMENT".equals(rsMetaData.getColumnLabel(i))) { - return true; - } - } - return false; - } - - private void addToDbEntity(DbEntity entity, DbAttribute attribute) { - attribute.setEntity(entity); - - // override existing attributes if it comes again - if (entity.getAttribute(attribute.getName()) != null) { - entity.removeAttribute(attribute.getName()); - } - entity.addAttribute(attribute); - } - - private DbAttribute createDbAttribute(ResultSet rs) throws SQLException { - - // gets attribute's (column's) information - int columnType = rs.getInt("DATA_TYPE"); - - // ignore precision of non-decimal columns - int decimalDigits = -1; - if (TypesMapping.isDecimal(columnType)) { - decimalDigits = rs.getInt("DECIMAL_DIGITS"); - if (rs.wasNull()) { - decimalDigits = -1; - } - } - - // create attribute delegating this task to adapter - DetectedDbAttribute detectedDbAttribute = new DetectedDbAttribute(adapter.buildAttribute( - rs.getString("COLUMN_NAME"), - rs.getString("TYPE_NAME"), - columnType, - rs.getInt("COLUMN_SIZE"), - decimalDigits, - rs.getBoolean("NULLABLE"))); - - // store raw type name - detectedDbAttribute.setJdbcTypeName(rs.getString("TYPE_NAME")); - - if (supportAutoIncrement) { - if ("YES".equals(rs.getString("IS_AUTOINCREMENT"))) { - detectedDbAttribute.setGenerated(true); - } - } - return detectedDbAttribute; + attributeProcessor.processAttribute(rs, columnFilter, entity); } } diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeProcessor.java similarity index 68% copy from cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java copy to cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeProcessor.java index 9ca2d054d..044a528a8 100644 --- a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeLoader.java +++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/AttributeProcessor.java @@ -19,64 +19,58 @@ package org.apache.cayenne.dbsync.reverse.dbload; -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; - import org.apache.cayenne.dba.DbAdapter; import org.apache.cayenne.dba.TypesMapping; import org.apache.cayenne.dbsync.model.DetectedDbAttribute; -import org.apache.cayenne.dbsync.reverse.filters.CatalogFilter; import org.apache.cayenne.dbsync.reverse.filters.PatternFilter; -import org.apache.cayenne.dbsync.reverse.filters.SchemaFilter; import org.apache.cayenne.map.DbAttribute; import org.apache.cayenne.map.DbEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class AttributeLoader extends PerCatalogAndSchemaLoader { +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; + +/** + * Helper class to process attribute data {@link ResultSet} + * + * @since 5.0 + */ +class AttributeProcessor { private static final Logger LOGGER = LoggerFactory.getLogger(DbLoader.class); - private boolean firstRow; - private boolean supportAutoIncrement; + private final DbAdapter adapter; + boolean firstRow; + boolean supportAutoIncrement; - AttributeLoader(DbAdapter adapter, DbLoaderConfiguration config, DbLoaderDelegate delegate) { - super(adapter, config, delegate); - firstRow = true; - supportAutoIncrement = false; + AttributeProcessor(DbAdapter adapter) { + this.adapter = adapter; + this.firstRow = true; } - protected ResultSet getResultSet(String catalogName, String schemaName, DatabaseMetaData metaData) throws SQLException { - return metaData.getColumns(catalogName, schemaName, WILDCARD, WILDCARD); + private boolean checkForAutoIncrement(ResultSet rs) throws SQLException { + ResultSetMetaData rsMetaData = rs.getMetaData(); + for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { + if ("IS_AUTOINCREMENT".equals(rsMetaData.getColumnLabel(i))) { + return true; + } + } + return false; } - @Override - protected void processResultSetRow(CatalogFilter catalog, SchemaFilter schema, DbLoadDataStore map, ResultSet rs) throws SQLException { - if (firstRow) { + void processAttribute(ResultSet rs, PatternFilter columnFilter, DbEntity entity) throws SQLException { + if(firstRow) { supportAutoIncrement = checkForAutoIncrement(rs); firstRow = false; } - // for a reason not quiet apparent to me, Oracle sometimes - // returns duplicate record sets for the same table, messing up - // table names. E.g. for the system table "WK$_ATTR_MAPPING" columns - // are returned twice - as "WK$_ATTR_MAPPING" and "WK$$_ATTR_MAPPING"... - // Go figure - String tableName = rs.getString("TABLE_NAME"); - DbEntity entity = map.getDbEntity(tableName); - if(entity == null) { - return; - } - - // Filter out columns by name String columnName = rs.getString("COLUMN_NAME"); - PatternFilter columnFilter = schema.tables.getIncludeTableColumnFilter(tableName); if (columnFilter == null || !columnFilter.isIncluded(columnName)) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Skip column '" + tableName + "." + columnName + - "' (Path: " + catalog.name + "/" + schema.name + "; Filter: " + columnFilter + ")"); + LOGGER.debug("Skip column '" + entity.getName() + "." + columnName + + "' (Path: " + entity.getCatalog() + "/" + entity.getSchema() + "; Filter: " + columnFilter + ")"); } return; } @@ -85,16 +79,6 @@ class AttributeLoader extends PerCatalogAndSchemaLoader { addToDbEntity(entity, attribute); } - private boolean checkForAutoIncrement(ResultSet rs) throws SQLException { - ResultSetMetaData rsMetaData = rs.getMetaData(); - for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { - if("IS_AUTOINCREMENT".equals(rsMetaData.getColumnLabel(i))) { - return true; - } - } - return false; - } - private void addToDbEntity(DbEntity entity, DbAttribute attribute) { attribute.setEntity(entity); @@ -139,4 +123,5 @@ class AttributeLoader extends PerCatalogAndSchemaLoader { return detectedDbAttribute; } -} + +} \ No newline at end of file diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/DbLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/DbLoader.java index 2ce236079..90e53eef4 100644 --- a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/DbLoader.java +++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/DbLoader.java @@ -64,6 +64,7 @@ public class DbLoader { private void createLoaders() { loaders.add(new EntityLoader(adapter, config, delegate)); loaders.add(new AttributeLoader(adapter, config, delegate)); + loaders.add(new FallbackAttributeLoader(adapter, config, delegate)); loaders.add(new PrimaryKeyLoader(config, delegate)); loaders.add(new ExportedKeyLoader(config, delegate)); loaders.add(new RelationshipLoader(config, delegate, nameGenerator)); diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/FallbackAttributeLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/FallbackAttributeLoader.java new file mode 100644 index 000000000..eb99d60e0 --- /dev/null +++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/FallbackAttributeLoader.java @@ -0,0 +1,62 @@ +/***************************************************************** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + ****************************************************************/ + +package org.apache.cayenne.dbsync.reverse.dbload; + +import org.apache.cayenne.dba.DbAdapter; +import org.apache.cayenne.dbsync.reverse.filters.PatternFilter; +import org.apache.cayenne.map.DbEntity; + +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; + + +/** + * Attribute loader that goes on per-entity level in case {@link AttributeLoader} has failed for some reason. + * @since 5.0 + */ +class FallbackAttributeLoader extends PerEntityLoader { + + private final AttributeProcessor attributeProcessor; + + FallbackAttributeLoader(DbAdapter adapter, DbLoaderConfiguration config, DbLoaderDelegate delegate) { + super(adapter, config, delegate); + this.attributeProcessor = new AttributeProcessor(adapter); + } + + @Override + ResultSet getResultSet(DbEntity dbEntity, DatabaseMetaData metaData) throws SQLException { + return metaData.getColumns(dbEntity.getCatalog(), dbEntity.getSchema(), dbEntity.getName(), WILDCARD); + } + + @Override + boolean shouldLoad(DbEntity entity) { + return entity.getAttributes().size() == 0; + } + + @Override + void processResultSet(DbEntity dbEntity, DbLoadDataStore map, ResultSet rs) throws SQLException { + PatternFilter columnFilter = config.getFiltersConfig() + .tableFilter(dbEntity.getCatalog(), dbEntity.getSchema()) + .getIncludeTableColumnFilter(dbEntity.getName()); + + attributeProcessor.processAttribute(rs, columnFilter, dbEntity); + } +} diff --git a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/PerCatalogAndSchemaLoader.java b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/PerCatalogAndSchemaLoader.java index 03bf5d829..5d18257b0 100644 --- a/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/PerCatalogAndSchemaLoader.java +++ b/cayenne-dbsync/src/main/java/org/apache/cayenne/dbsync/reverse/dbload/PerCatalogAndSchemaLoader.java @@ -43,11 +43,19 @@ public abstract class PerCatalogAndSchemaLoader extends AbstractLoader { while (rs.next()) { processResultSetRow(catalog, schema, map, rs); } + } catch (SQLException ex) { + if(!catchException(catalog.name, schema.name, ex)) { + throw ex; + } } } } } + boolean catchException(String catalogName, String schemaName, SQLException ex) { + return false; + } + boolean shouldLoad(CatalogFilter catalog, SchemaFilter schema) { return true; }