[ https://issues.apache.org/jira/browse/HIVE-27234?focusedWorklogId=859548&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-859548 ]
ASF GitHub Bot logged work on HIVE-27234: ----------------------------------------- Author: ASF GitHub Bot Created on: 28/Apr/23 04:46 Start Date: 28/Apr/23 04:46 Worklog Time Spent: 10m Work Description: ayushtkn commented on code in PR #4216: URL: https://github.com/apache/hive/pull/4216#discussion_r1179920182 ########## ql/src/java/org/apache/hadoop/hive/ql/ddl/table/branch/create/AlterTableCreateBranchAnalyzer.java: ########## @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.table.branch.create; + +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.apache.hadoop.hive.common.TableName; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.AlterTableCreateBranchSpec; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_ALTERTABLE_CREATE_BRANCH) +public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer { + + public AlterTableCreateBranchAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) + throws SemanticException { + Table table = getTable(tableName); + validateAlterTableType(table, AlterTableType.CREATEBRANCH, false); + if (!"ICEBERG".equalsIgnoreCase(table.getParameters().get("table_type"))) { + throw new SemanticException("Cannot perform ALTER CREATE BRANCH statement on non-iceberg table."); + } + inputs.add(new ReadEntity(table)); + + String branchName = command.getChild(0).getText(); + Long snapshotId = null; + Long maxRefAgeMs = null; + Integer minSnapshotsToKeep = null; + Long maxSnapshotAgeMs = null; + for (int i = 1; i < command.getChildCount(); i++) { + ASTNode childNode = (ASTNode) command.getChild(i); + switch (childNode.getToken().getType()) { + case HiveParser.TOK_AS_OF_VERSION_BRANCH: + snapshotId = Long.valueOf(childNode.getChild(0).getText()); + break; + case HiveParser.TOK_RETAIN: + String maxRefAge = childNode.getChild(0).getText(); + String timeUnitOfBranchRetain = childNode.getChild(1).getText(); + maxRefAgeMs = TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxRefAge)); + break; + case HiveParser.TOK_WITH_SNAPSHOT_RETENTION: + minSnapshotsToKeep = Integer.valueOf(childNode.getChild(0).getText()); + if (childNode.getChildren().size() > 1) { + String maxSnapshotAge = childNode.getChild(1).getText(); + String timeUnitOfSnapshotsRetention = childNode.getChild(2).getText(); + maxSnapshotAgeMs = TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH)).toMillis(Long.valueOf(maxSnapshotAge)); Review Comment: use. ``Long.parseLong(maxSnapshotAge)`` Issue Time Tracking ------------------- Worklog Id: (was: 859548) Time Spent: 2h 40m (was: 2.5h) > Iceberg: CREATE BRANCH SQL implementation > ------------------------------------------ > > Key: HIVE-27234 > URL: https://issues.apache.org/jira/browse/HIVE-27234 > Project: Hive > Issue Type: Sub-task > Components: Iceberg integration > Reporter: zhangbutao > Assignee: zhangbutao > Priority: Major > Labels: pull-request-available > Time Spent: 2h 40m > Remaining Estimate: 0h > > Maybe we can follow spark sql about branch ddl implementation > [https://github.com/apache/iceberg/pull/6617] -- This message was sent by Atlassian Jira (v8.20.10#820010)