[ 
https://issues.apache.org/jira/browse/HIVE-23353?focusedWorklogId=434400&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-434400
 ]

ASF GitHub Bot logged work on HIVE-23353:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 18/May/20 14:11
            Start Date: 18/May/20 14:11
    Worklog Time Spent: 10m 
      Work Description: aasha commented on a change in pull request #1021:
URL: https://github.com/apache/hive/pull/1021#discussion_r426648656



##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/AtlasExportProcess.java
##########
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Performs Atlas metadata export.
+ */
+public class AtlasExportProcess extends AtlasProcess {
+  private FileSystem fileSystem = null;
+  protected static final Logger LOG = 
LoggerFactory.getLogger(AtlasExportProcess.class);
+  private static final int DEF_BUF_SIZE = 8 * 1024;
+
+  public void run(AtlasReplInfo atlasReplInfo) throws SemanticException {
+    LOG.info("HiveAtlasPlugin: Starting export from:{}", 
atlasReplInfo.getStagingDir());
+    try {
+      AtlasExportRequest exportRequest = 
atlasRequestBuilder.createExportRequest(atlasReplInfo,
+              getAtlasClusterName(atlasReplInfo.getSrcCluster()));
+      InputStream inputStream = exportData(atlasReplInfo.getAtlasEndpoint(), 
exportRequest, atlasReplInfo.getConf());
+      FileSystem fs = getFileSystem(atlasReplInfo);
+      Path exportFilePath = new Path(atlasReplInfo.getStagingDir(), 
ReplUtils.REPL_ATLAS_EXPORT_FILE_NAME);
+      writeDataToFile(fs, exportFilePath, inputStream);
+    } catch (SemanticException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new SemanticException(ex);
+    }
+  }
+
+  FileSystem getFileSystem(AtlasReplInfo atlasReplInfo) throws IOException {
+    if (fileSystem != null) {
+      return fileSystem;
+    }
+    return FileSystem.get(atlasReplInfo.getStagingDir().toUri(), 
atlasReplInfo.getConf());
+  }
+
+  protected InputStream exportData(String atlasEndpoint, AtlasExportRequest 
request, HiveConf conf) throws Exception {
+    return getClient(atlasEndpoint, conf).exportData(request);
+  }
+
+  private void writeDataToFile(FileSystem fs, Path exportFilePath, InputStream 
is) throws IOException {
+    long numBytesWritten = writeFile(fs, exportFilePath, is);
+    LOG.info("HiveAtlasPlugin: writing to {} ({} bytes)", exportFilePath, 
numBytesWritten);
+  }
+
+  private long writeFile(FileSystem fs, Path exportFilePath, InputStream is) 
throws IOException {

Review comment:
       can be part of utils

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/AtlasProcess.java
##########
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.model.impexp.AtlasServer;
+import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Base class for Atlas Processes, viz. Export & Import
+ */
+public abstract class AtlasProcess {
+  private static final String CLUSTER_NAME_SEPARATOR = "$";
+  protected static final Logger LOG = 
LoggerFactory.getLogger(AtlasProcess.class);
+
+  private RESTClientBuilder builder = new RESTClientBuilder();
+  protected AtlasRequestBuilder atlasRequestBuilder = new 
AtlasRequestBuilder();
+
+  protected AtlasRESTClient getClient(String atlasEndpoint, HiveConf conf) 
throws SemanticException {
+    if (conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)) {
+      return new DummyAtlasRESTClient();
+    }
+    return builder.baseUrl(atlasEndpoint).create();
+  }
+
+  public String getEntityGuid(String atlasEndpoint, String typeName, String 
attributeName,
+                              String attributeValue, HiveConf conf) throws 
SemanticException {
+    return getClient(atlasEndpoint, conf).getEntityGuid(typeName, 
attributeName, attributeValue);
+  }
+
+  public boolean getStatus(String atlasEndpoint, HiveConf conf) throws 
SemanticException {
+    return getClient(atlasEndpoint, conf).getStatus();
+  }
+
+  public abstract void run(AtlasReplInfo atlasReplInfo) throws 
SemanticException;
+
+  public long getCurrentTimestamp(AtlasReplInfo atlasReplInfo, String 
entityGuid) throws SemanticException {
+    AtlasRESTClient client = getClient(atlasReplInfo.getAtlasEndpoint(), 
atlasReplInfo.getConf());
+    AtlasServer atlasServer = client.getServer(atlasReplInfo.getSrcCluster());
+    long ret = (atlasServer == null || 
atlasServer.getAdditionalInfoRepl(entityGuid) == null)
+            ? 0L : (long) atlasServer.getAdditionalInfoRepl(entityGuid);
+    LOG.debug("HiveAtlasPlugin: fromTimestamp: {}", ret);
+    return ret;
+  }
+
+  public String checkHiveEntityGuid(String atlasEndpoint, String 
fullyQualifiedClusterName, String srcDb, HiveConf conf)
+          throws SemanticException {
+    String clusterName = getAtlasClusterName(fullyQualifiedClusterName);
+    AtlasObjectId objectId = atlasRequestBuilder.getItemToExport(clusterName, 
srcDb);
+    Set<Map.Entry<String, Object>> entries = 
objectId.getUniqueAttributes().entrySet();
+    if (entries == null || entries.isEmpty()) {
+      throw new SemanticException("HiveAtlasPlugin: Could find entries in 
objectId for:" + clusterName);
+    }
+    Map.Entry<String, Object> item = entries.iterator().next();
+    String guid = getEntityGuid(atlasEndpoint, objectId.getTypeName(), 
item.getKey(), (String) item.getValue(), conf);
+    if (guid == null || guid.isEmpty()) {
+      throw new SemanticException("HiveAtlasPlugin: Entity not found:" + 
objectId);
+    }
+    return guid;
+  }
+
+  protected static String getAtlasClusterName(String clusterName) {

Review comment:
       is this needed?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/repl/AtlasDumpTask.java
##########
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.repl.atlas.AtlasProcess;
+import org.apache.hadoop.hive.ql.exec.repl.atlas.AtlasReplInfo;
+import org.apache.hadoop.hive.ql.exec.repl.atlas.AtlasExportProcess;
+import org.apache.hadoop.hive.ql.parse.EximUtil;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.repl.dump.Utils;
+import org.apache.hadoop.hive.ql.plan.api.StageType;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Atlas Metadata Replication Task.
+ **/
+public class AtlasDumpTask extends Task<AtlasDumpWork> implements Serializable 
{
+
+  private static final long serialVersionUID = 1L;
+
+  @Override
+  public int execute() {
+    try {
+      AtlasReplInfo atlasReplInfo = work.getAtlasReplInfo();
+      LOG.info("Dumping Atlas metadata of srcDb: {}, for TgtDb: {} to staging 
location:",
+              atlasReplInfo.getSrcDB(), atlasReplInfo.getTgtDB(), 
atlasReplInfo.getStagingDir());
+      AtlasProcess atlasProcess = new AtlasExportProcess();
+      String entityGuid = 
atlasProcess.checkHiveEntityGuid(atlasReplInfo.getAtlasEndpoint(),
+              atlasReplInfo.getSrcCluster(), atlasReplInfo.getSrcDB(), conf);
+      long currentModifiedTime = 
atlasProcess.getCurrentTimestamp(atlasReplInfo, entityGuid);
+      atlasProcess.run(atlasReplInfo);
+      createDumpMetadata(atlasReplInfo, currentModifiedTime);
+      return 0;
+    } catch (Exception e) {
+      LOG.error("Exception during AtlasDumpTask.execute", e);
+      setException(e);
+      return ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
+    }
+  }
+
+  private void createDumpMetadata(AtlasReplInfo atlasReplInfo, long 
lastModifiedTime) throws SemanticException {
+    Path dumpFile = new Path(atlasReplInfo.getStagingDir(), 
EximUtil.METADATA_NAME);
+    List<List<String>> listValues = new ArrayList<>();
+    listValues.add(
+            Arrays.asList(
+                    atlasReplInfo.getSrcFsUri(),
+                    String.valueOf(lastModifiedTime)
+            )
+    );
+    Utils.writeOutput(listValues, dumpFile, conf, true);

Review comment:
       is retry present?

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/AtlasExportProcess.java
##########
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Performs Atlas metadata export.
+ */
+public class AtlasExportProcess extends AtlasProcess {
+  private FileSystem fileSystem = null;
+  protected static final Logger LOG = 
LoggerFactory.getLogger(AtlasExportProcess.class);
+  private static final int DEF_BUF_SIZE = 8 * 1024;
+
+  public void run(AtlasReplInfo atlasReplInfo) throws SemanticException {
+    LOG.info("HiveAtlasPlugin: Starting export from:{}", 
atlasReplInfo.getStagingDir());
+    try {
+      AtlasExportRequest exportRequest = 
atlasRequestBuilder.createExportRequest(atlasReplInfo,
+              getAtlasClusterName(atlasReplInfo.getSrcCluster()));
+      InputStream inputStream = exportData(atlasReplInfo.getAtlasEndpoint(), 
exportRequest, atlasReplInfo.getConf());
+      FileSystem fs = getFileSystem(atlasReplInfo);
+      Path exportFilePath = new Path(atlasReplInfo.getStagingDir(), 
ReplUtils.REPL_ATLAS_EXPORT_FILE_NAME);
+      writeDataToFile(fs, exportFilePath, inputStream);
+    } catch (SemanticException ex) {
+      throw ex;
+    } catch (Exception ex) {
+      throw new SemanticException(ex);
+    }
+  }
+
+  FileSystem getFileSystem(AtlasReplInfo atlasReplInfo) throws IOException {
+    if (fileSystem != null) {
+      return fileSystem;
+    }
+    return FileSystem.get(atlasReplInfo.getStagingDir().toUri(), 
atlasReplInfo.getConf());
+  }
+
+  protected InputStream exportData(String atlasEndpoint, AtlasExportRequest 
request, HiveConf conf) throws Exception {
+    return getClient(atlasEndpoint, conf).exportData(request);
+  }
+
+  private void writeDataToFile(FileSystem fs, Path exportFilePath, InputStream 
is) throws IOException {
+    long numBytesWritten = writeFile(fs, exportFilePath, is);
+    LOG.info("HiveAtlasPlugin: writing to {} ({} bytes)", exportFilePath, 
numBytesWritten);
+  }
+
+  private long writeFile(FileSystem fs, Path exportFilePath, InputStream is) 
throws IOException {

Review comment:
       retry

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/RESTClientBuilder.java
##########
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasException;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationConverter;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Properties;
+
+/**
+ * RestClientBuilder for AtlasRESTClient.
+ */
+public class RESTClientBuilder {
+  private static final Logger LOG = 
LoggerFactory.getLogger(RESTClientBuilder.class);
+  private static final String ATLAS_PROPERTY_CLIENT_HA_RETRIES_KEY = 
"atlas.client.ha.retries";
+  private static final String ATLAS_PROPERTY_CLIENT_HA_SLEEP_INTERVAL_MS_KEY = 
"atlas.client.ha.sleep.interval.ms";
+  private static final String ATLAS_PROPERTY_REST_ADDRESS = 
"atlas.rest.address";
+  private static final String ATLAS_PROPERTY_AUTH_KERBEROS = 
"atlas.authentication.method.kerberos";
+  private static final String URL_SEPERATOR = ",";
+
+  private AuthStrategy authStrategy;
+  private UserGroupInformation userGroupInformation;
+  protected String incomingUrl;
+  protected String[] baseUrls;
+
+  enum AuthStrategy {
+    KERBEROS
+  }
+
+  public RESTClientBuilder() {
+  }
+
+  public RESTClientBuilder baseUrl(String urls) {
+    this.incomingUrl = urls;
+    if (urls.contains(URL_SEPERATOR)) {
+      this.baseUrls = urls.split(URL_SEPERATOR);
+    } else {
+      this.baseUrls = new String[]{urls};
+    }
+
+    return this;
+  }
+
+  public RESTClientBuilder setAuthStrategy() throws SemanticException {
+    return inferKerberosAuthStrategy();
+  }
+
+  private RESTClientBuilder inferKerberosAuthStrategy() throws 
SemanticException {

Review comment:
       This will always be Kerberos. Don't need to infer

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/AtlasRESTClientImpl.java
##########
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasServiceException;
+import org.apache.atlas.model.impexp.AtlasExportRequest;
+import org.apache.atlas.model.impexp.AtlasImportRequest;
+import org.apache.atlas.model.impexp.AtlasImportResult;
+import org.apache.atlas.model.impexp.AtlasServer;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import static com.sun.jersey.api.client.ClientResponse.Status.NOT_FOUND;
+
+/**
+ * Implementation of RESTClient, encapsulates Atlas' REST APIs.
+ */
+public class AtlasRESTClientImpl extends RetryingClient implements 
AtlasRESTClient{
+  private static final Logger LOG = 
LoggerFactory.getLogger(AtlasRESTClientImpl.class);
+  private final AtlasClientV2 clientV2;
+
+  public AtlasRESTClientImpl(AtlasClientV2 clientV2) {
+    this.clientV2 = clientV2;
+  }
+
+  private static <T> T runWithTimeout(Callable<T> callable, long timeout, 
TimeUnit timeUnit) throws Exception {
+    final ExecutorService executor = Executors.newSingleThreadExecutor();
+    final Future<T> future = executor.submit(callable);
+    executor.shutdown();
+    try {
+      return future.get(timeout, timeUnit);
+    } catch (TimeoutException e) {
+      future.cancel(true);
+      throw e;
+    } catch (ExecutionException e) {
+      Throwable t = e.getCause();
+      if (t instanceof Error) {
+        throw (Error) t;
+      } else if (t instanceof Exception) {
+        throw (Exception) t;
+      } else {
+        throw new IllegalStateException(t);
+      }
+    }
+  }
+
+  public InputStream exportData(AtlasExportRequest request) throws Exception {
+    LOG.debug("exportData: {}" + request);
+    return invokeWithRetry(new Callable<InputStream>() {
+      @Override
+      public InputStream call() throws Exception {
+        return clientV2.exportData(request);
+      }
+    }, null);
+  }
+
+  public AtlasImportResult importData(AtlasImportRequest request, 
AtlasReplInfo atlasReplInfo) throws Exception {
+    AtlasImportResult defaultResult = getDefaultAtlasImportResult(request);
+    Path exportFilePath = new Path(atlasReplInfo.getStagingDir(), 
ReplUtils.REPL_ATLAS_EXPORT_FILE_NAME);
+    FileSystem fs = FileSystem.get(exportFilePath.toUri(), 
atlasReplInfo.getConf());
+    if (!fs.exists(exportFilePath)) {
+      return defaultResult;
+    }
+    LOG.debug("HiveAtlasPlugin:importData: {}" + request);
+    return invokeWithRetry(new Callable<AtlasImportResult>() {
+      @Override
+      public AtlasImportResult call() throws Exception {
+        InputStream is = null;
+        try {
+          is = fs.open(exportFilePath);
+          return clientV2.importData(request, is);
+        } finally {
+          if (is != null) {
+            is.close();
+          }
+        }
+      }
+    }, defaultResult);
+  }
+
+  private AtlasImportResult getDefaultAtlasImportResult(AtlasImportRequest 
request) {

Review comment:
       Can this be part of NoOp client?

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/RESTClientBuilder.java
##########
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasException;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationConverter;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Properties;
+
+/**
+ * RestClientBuilder for AtlasRESTClient.
+ */
+public class RESTClientBuilder {
+  private static final Logger LOG = 
LoggerFactory.getLogger(RESTClientBuilder.class);
+  private static final String ATLAS_PROPERTY_CLIENT_HA_RETRIES_KEY = 
"atlas.client.ha.retries";
+  private static final String ATLAS_PROPERTY_CLIENT_HA_SLEEP_INTERVAL_MS_KEY = 
"atlas.client.ha.sleep.interval.ms";
+  private static final String ATLAS_PROPERTY_REST_ADDRESS = 
"atlas.rest.address";
+  private static final String ATLAS_PROPERTY_AUTH_KERBEROS = 
"atlas.authentication.method.kerberos";
+  private static final String URL_SEPERATOR = ",";
+
+  private AuthStrategy authStrategy;
+  private UserGroupInformation userGroupInformation;
+  protected String incomingUrl;
+  protected String[] baseUrls;
+
+  enum AuthStrategy {
+    KERBEROS
+  }
+
+  public RESTClientBuilder() {
+  }
+
+  public RESTClientBuilder baseUrl(String urls) {
+    this.incomingUrl = urls;
+    if (urls.contains(URL_SEPERATOR)) {
+      this.baseUrls = urls.split(URL_SEPERATOR);
+    } else {
+      this.baseUrls = new String[]{urls};
+    }
+
+    return this;
+  }
+
+  public RESTClientBuilder setAuthStrategy() throws SemanticException {
+    return inferKerberosAuthStrategy();
+  }
+
+  private RESTClientBuilder inferKerberosAuthStrategy() throws 
SemanticException {
+    try {
+      authStrategy = AuthStrategy.KERBEROS;
+      this.userGroupInformation = UserGroupInformation.getLoginUser();
+      LOG.info("HiveAtlasPlugin: authStrategy: {} : urls: {}: 
userGroupInformation: {}",
+              authStrategy, baseUrls, userGroupInformation);
+    } catch (Exception e) {
+      throw new SemanticException("Error: setAuthStrategy: 
UserGroupInformation.getLoginUser: failed!", e);
+    }
+    return this;
+  }
+
+  public AtlasRESTClient create() throws SemanticException {
+    if (baseUrls == null || baseUrls.length == 0) {
+      throw new SemanticException("baseUrls is not set.");
+    }
+    setAuthStrategy();
+    initializeAtlasApplicationProperties();
+    AtlasClientV2 clientV2;
+    LOG.info("HiveAtlasPlugin: authStrategyUsed: {} : {}", authStrategy, 
baseUrls);
+    switch (authStrategy) {
+      case KERBEROS:
+        clientV2 = new AtlasClientV2(this.userGroupInformation,
+                this.userGroupInformation.getShortUserName(), baseUrls);
+        return new AtlasRESTClientImpl(clientV2);
+      default:
+        throw new SemanticException("AtlasRESTClient: unsupported auth 
strategy:" + authStrategy);
+    }
+  }
+
+  private void initializeAtlasApplicationProperties() throws SemanticException 
{
+    try {
+      ApplicationProperties.set(getClientProperties());

Review comment:
       Does this have to be static?

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/exec/repl/atlas/RESTClientBuilder.java
##########
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.repl.atlas;
+
+import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.AtlasClientV2;
+import org.apache.atlas.AtlasException;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationConverter;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Properties;
+
+/**
+ * RestClientBuilder for AtlasRESTClient.
+ */
+public class RESTClientBuilder {
+  private static final Logger LOG = 
LoggerFactory.getLogger(RESTClientBuilder.class);
+  private static final String ATLAS_PROPERTY_CLIENT_HA_RETRIES_KEY = 
"atlas.client.ha.retries";
+  private static final String ATLAS_PROPERTY_CLIENT_HA_SLEEP_INTERVAL_MS_KEY = 
"atlas.client.ha.sleep.interval.ms";
+  private static final String ATLAS_PROPERTY_REST_ADDRESS = 
"atlas.rest.address";
+  private static final String ATLAS_PROPERTY_AUTH_KERBEROS = 
"atlas.authentication.method.kerberos";
+  private static final String URL_SEPERATOR = ",";
+
+  private AuthStrategy authStrategy;
+  private UserGroupInformation userGroupInformation;
+  protected String incomingUrl;
+  protected String[] baseUrls;
+
+  enum AuthStrategy {
+    KERBEROS
+  }
+
+  public RESTClientBuilder() {
+  }
+
+  public RESTClientBuilder baseUrl(String urls) {
+    this.incomingUrl = urls;
+    if (urls.contains(URL_SEPERATOR)) {
+      this.baseUrls = urls.split(URL_SEPERATOR);
+    } else {
+      this.baseUrls = new String[]{urls};
+    }
+
+    return this;
+  }
+
+  public RESTClientBuilder setAuthStrategy() throws SemanticException {
+    return inferKerberosAuthStrategy();
+  }
+
+  private RESTClientBuilder inferKerberosAuthStrategy() throws 
SemanticException {
+    try {
+      authStrategy = AuthStrategy.KERBEROS;
+      this.userGroupInformation = UserGroupInformation.getLoginUser();
+      LOG.info("HiveAtlasPlugin: authStrategy: {} : urls: {}: 
userGroupInformation: {}",
+              authStrategy, baseUrls, userGroupInformation);
+    } catch (Exception e) {
+      throw new SemanticException("Error: setAuthStrategy: 
UserGroupInformation.getLoginUser: failed!", e);
+    }
+    return this;
+  }
+
+  public AtlasRESTClient create() throws SemanticException {
+    if (baseUrls == null || baseUrls.length == 0) {
+      throw new SemanticException("baseUrls is not set.");
+    }
+    setAuthStrategy();
+    initializeAtlasApplicationProperties();
+    AtlasClientV2 clientV2;
+    LOG.info("HiveAtlasPlugin: authStrategyUsed: {} : {}", authStrategy, 
baseUrls);
+    switch (authStrategy) {
+      case KERBEROS:

Review comment:
       Only Kerberos is supported




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 434400)
    Time Spent: 40m  (was: 0.5h)

> Atlas metadata replication scheduling
> -------------------------------------
>
>                 Key: HIVE-23353
>                 URL: https://issues.apache.org/jira/browse/HIVE-23353
>             Project: Hive
>          Issue Type: Task
>            Reporter: PRAVIN KUMAR SINHA
>            Assignee: PRAVIN KUMAR SINHA
>            Priority: Major
>              Labels: pull-request-available
>         Attachments: HIVE-23353.01.patch
>
>          Time Spent: 40m
>  Remaining Estimate: 0h
>




--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to