sankarh commented on a change in pull request #551: HIVE-21286: Hive should support clean-up of previously bootstrapped tables when retry from different dump. URL: https://github.com/apache/hive/pull/551#discussion_r262345212
########## File path: ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java ########## @@ -279,6 +292,72 @@ a database ( directory ) return 0; } + /** + * Cleanup/drop tables from the given database which are bootstrapped by input dump dir. + * @throws HiveException Failed to drop the tables. + * @throws IOException File operations failure. + * @throws InvalidInputException Invalid input dump directory. + */ + private void bootstrapRollbackTask() throws HiveException, IOException, InvalidInputException { + Path bootstrapDirectory = new PathBuilder(work.bootstrapDumpToRollback) + .addDescendant(ReplUtils.INC_BOOTSTRAP_ROOT_DIR_NAME).build(); + FileSystem fs = bootstrapDirectory.getFileSystem(conf); + + if (!fs.exists(bootstrapDirectory)) { + throw new InvalidInputException("Input bootstrap dump directory to rollback doesn't exist: " + + bootstrapDirectory); + } + + FileStatus[] fileStatuses = fs.listStatus(bootstrapDirectory, EximUtil.getDirectoryFilter(fs)); + if ((fileStatuses == null) || (fileStatuses.length == 0)) { + throw new InvalidInputException("Input bootstrap dump directory to rollback is empty: " + + bootstrapDirectory); + } + + if (StringUtils.isNotBlank(work.dbNameToLoadIn) && (fileStatuses.length > 1)) { + throw new InvalidInputException("Multiple DB dirs in the dump: " + bootstrapDirectory + + " is not allowed to load to single target DB: " + work.dbNameToLoadIn); + } + + for (FileStatus dbDir : fileStatuses) { + Path dbLevelPath = dbDir.getPath(); + String dbNameInDump = dbLevelPath.getName(); + + List<String> tableNames = new ArrayList<>(); + RemoteIterator<LocatedFileStatus> filesIterator = fs.listFiles(dbLevelPath, true); + while (filesIterator.hasNext()) { + Path nextFile = filesIterator.next().getPath(); + String filePath = nextFile.toString(); + if (filePath.endsWith(EximUtil.METADATA_NAME)) { + // Remove dbLevelPath from the current path to check if this _metadata file is under DB or + // table level directory. + String replacedString = filePath.replace(dbLevelPath.toString(), ""); + if (!replacedString.equalsIgnoreCase(EximUtil.METADATA_NAME)) { + tableNames.add(nextFile.getParent().getName()); + } + } + } + + // No tables listed in the DB level directory to be dropped. + if (tableNames.isEmpty()) { + LOG.info("No tables are listed to be dropped for Database: {} in bootstrap dump: {}", + dbNameInDump, bootstrapDirectory); + continue; + } + + // Drop all tables bootstrapped from previous dump. + // Get the target DB in which previously bootstrapped tables to be dropped. If user specified + // DB name as input in REPL LOAD command, then use it. + String dbName = (StringUtils.isNotBlank(work.dbNameToLoadIn) ? work.dbNameToLoadIn : dbNameInDump); + + Hive db = getHive(); + for (String table : tableNames) { + db.dropTable(dbName + "." + table, true); Review comment: That's not a problem. It is expected behaviour for external tables where Hive shouldn't delete those directories. Even in source these directories will remain there but the table would be dropped. Also, if the same table is found in new bootstrap dump, then it will avoid re-copying those file again. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services