This is an automated email from the ASF dual-hosted git repository.

roryqi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new 1297713992 [#6536] improvement(authz): Create Ranger service if 
service is absent (#6575)
1297713992 is described below

commit 1297713992dfd376fc2a6fba805a6cdee61c4373
Author: roryqi <h...@datastrato.com>
AuthorDate: Fri Mar 7 16:40:53 2025 +0800

    [#6536] improvement(authz): Create Ranger service if service is absent 
(#6575)
    
    ### What changes were proposed in this pull request?
    
     Create Ranger service if service is absent
    
    ### Why are the changes needed?
    
    Fix: #6536
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, I will add the document.
    
    ### How was this patch tested?
    
    Add a UT.
---
 .../test/TestChainedAuthorizationIT.java           |  34 ++++++
 .../common/RangerAuthorizationProperties.java      |  35 ++++--
 .../ranger/RangerAuthorizationHDFSPlugin.java      |  42 +++++++
 .../ranger/RangerAuthorizationHadoopSQLPlugin.java |  30 +++++
 .../ranger/RangerAuthorizationPlugin.java          |  57 ++++++++++
 .../ranger/integration/test/RangerHiveE2EIT.java   |  41 +++++++
 .../ranger/integration/test/RangerITEnv.java       | 122 +--------------------
 .../apache/gravitino/connector/BaseCatalog.java    |   3 +-
 .../gravitino/hook/CatalogHookDispatcher.java      |   9 +-
 docs/security/authorization-pushdown.md            |  24 ++--
 10 files changed, 258 insertions(+), 139 deletions(-)

diff --git 
a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainedAuthorizationIT.java
 
b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainedAuthorizationIT.java
index a59a80601d..a7e1dc465f 100644
--- 
a/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainedAuthorizationIT.java
+++ 
b/authorizations/authorization-chain/src/test/java/org/apache/gravitino/authorization/chain/integration/test/TestChainedAuthorizationIT.java
@@ -55,6 +55,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException;
 import org.apache.ranger.RangerServiceException;
 import org.apache.ranger.plugin.model.RangerPolicy;
+import org.apache.ranger.plugin.model.RangerService;
 import org.apache.spark.sql.SparkSession;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
@@ -210,6 +211,39 @@ public class TestChainedAuthorizationIT extends 
RangerBaseE2EIT {
     metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, "hive", 
"comment", catalogConf);
     catalog = metalake.loadCatalog(catalogName);
     LOG.info("Catalog created: {}", catalog);
+
+    // Test to create chained authorization plugin automatically
+    Map<String, String> autoProperties = new HashMap<>();
+    autoProperties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS);
+    autoProperties.put(IMPERSONATION_ENABLE, "true");
+    autoProperties.put(Catalog.AUTHORIZATION_PROVIDER, "chain");
+    
autoProperties.put(ChainedAuthorizationProperties.CHAIN_PLUGINS_PROPERTIES_KEY, 
"hive1,hdfs1");
+    autoProperties.put("authorization.chain.hive1.provider", "ranger");
+    autoProperties.put("authorization.chain.hive1.ranger.auth.type", 
RangerContainer.authType);
+    autoProperties.put("authorization.chain.hive1.ranger.admin.url", 
RangerITEnv.RANGER_ADMIN_URL);
+    autoProperties.put("authorization.chain.hive1.ranger.username", 
RangerContainer.rangerUserName);
+    autoProperties.put("authorization.chain.hive1.ranger.password", 
RangerContainer.rangerPassword);
+    autoProperties.put("authorization.chain.hive1.ranger.service.type", 
"HadoopSQL");
+    autoProperties.put("authorization.chain.hive1.ranger.service.name", 
"test899");
+    
autoProperties.put("authorization.chain.hive1.ranger.service.create-if-absent", 
"true");
+    autoProperties.put("authorization.chain.hdfs1.provider", "ranger");
+    autoProperties.put("authorization.chain.hdfs1.ranger.auth.type", 
RangerContainer.authType);
+    autoProperties.put("authorization.chain.hdfs1.ranger.admin.url", 
RangerITEnv.RANGER_ADMIN_URL);
+    autoProperties.put("authorization.chain.hdfs1.ranger.username", 
RangerContainer.rangerUserName);
+    autoProperties.put("authorization.chain.hdfs1.ranger.password", 
RangerContainer.rangerPassword);
+    autoProperties.put("authorization.chain.hdfs1.ranger.service.type", 
"HDFS");
+    autoProperties.put("authorization.chain.hdfs1.ranger.service.name", 
"test833");
+    
autoProperties.put("authorization.chain.hdfs1.ranger.service.create-if-absent", 
"true");
+    metalake.createCatalog("test", Catalog.Type.RELATIONAL, "hive", "comment", 
autoProperties);
+    try {
+      RangerService rangerService = 
RangerITEnv.rangerClient.getService("test833");
+      Assertions.assertNotNull(rangerService);
+      rangerService = RangerITEnv.rangerClient.getService("test899");
+      Assertions.assertNotNull(rangerService);
+    } catch (Exception e) {
+      Assertions.fail();
+    }
+    metalake.dropCatalog("test", true);
   }
 
   private String storageLocation(String dirName) {
diff --git 
a/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/common/RangerAuthorizationProperties.java
 
b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/common/RangerAuthorizationProperties.java
index 73af3bc377..71cf686efa 100644
--- 
a/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/common/RangerAuthorizationProperties.java
+++ 
b/authorizations/authorization-common/src/main/java/org/apache/gravitino/authorization/common/RangerAuthorizationProperties.java
@@ -23,6 +23,8 @@ import java.util.Map;
 
 /** The properties for Ranger authorization plugin. */
 public class RangerAuthorizationProperties extends AuthorizationProperties {
+  public static final String RANGER_PREFIX = "authorization.ranger";
+
   /** Ranger admin web URIs */
   public static final String RANGER_ADMIN_URL = 
"authorization.ranger.admin.url";
 
@@ -46,13 +48,34 @@ public class RangerAuthorizationProperties extends 
AuthorizationProperties {
    */
   public static final String RANGER_PASSWORD = "authorization.ranger.password";
 
+  public static final String RANGER_SERVICE_CREATE_IF_ABSENT =
+      "authorization.ranger.service.create-if-absent";
+
+  public static final String HADOOP_SECURITY_AUTHENTICATION =
+      "authorization.ranger.hadoop.security.authentication";
+  public static final String DEFAULT_HADOOP_SECURITY_AUTHENTICATION = "simple";
+  public static final String HADOOP_RPC_PROTECTION = 
"authorization.ranger.hadoop.rpc.protection";
+  public static final String DEFAULT_HADOOP_RPC_PROTECTION = "authentication";
+  public static final String HADOOP_SECURITY_AUTHORIZATION =
+      "authorization.ranger.hadoop.security.authorization";
+  public static final String FS_DEFAULT_NAME = 
"authorization.ranger.fs.default.name";
+  public static final String FS_DEFAULT_VALUE = "hdfs://127.0.0.1:8090";
+
+  public static final String JDBC_DRIVER_CLASS_NAME = 
"authorization.ranger.jdbc.driverClassName";
+
+  public static final String DEFAULT_JDBC_DRIVER_CLASS_NAME = 
"org.apache.hive.jdbc.HiveDriver";
+
+  public static final String JDBC_URL = "authorization.ranger.jdbc.url";
+
+  public static final String DEFAULT_JDBC_URL = "jdbc:hive2://127.0.0.1:8081";
+
   public RangerAuthorizationProperties(Map<String, String> properties) {
     super(properties);
   }
 
   @Override
   public String getPropertiesPrefix() {
-    return "authorization.ranger";
+    return RANGER_PREFIX;
   }
 
   @Override
@@ -63,9 +86,6 @@ public class RangerAuthorizationProperties extends 
AuthorizationProperties {
     Preconditions.checkArgument(
         properties.containsKey(RANGER_SERVICE_TYPE),
         String.format("%s is required", RANGER_SERVICE_TYPE));
-    Preconditions.checkArgument(
-        properties.containsKey(RANGER_SERVICE_NAME),
-        String.format("%s is required", RANGER_SERVICE_NAME));
     Preconditions.checkArgument(
         properties.containsKey(RANGER_AUTH_TYPE),
         String.format("%s is required", RANGER_AUTH_TYPE));
@@ -76,9 +96,6 @@ public class RangerAuthorizationProperties extends 
AuthorizationProperties {
     Preconditions.checkArgument(
         properties.get(RANGER_ADMIN_URL) != null,
         String.format("%s is required", RANGER_ADMIN_URL));
-    Preconditions.checkArgument(
-        properties.get(RANGER_SERVICE_NAME) != null,
-        String.format("%s is required", RANGER_SERVICE_NAME));
     Preconditions.checkArgument(
         properties.get(RANGER_AUTH_TYPE) != null,
         String.format("%s is required", RANGER_AUTH_TYPE));
@@ -86,5 +103,9 @@ public class RangerAuthorizationProperties extends 
AuthorizationProperties {
         properties.get(RANGER_USERNAME) != null, String.format("%s is 
required", RANGER_USERNAME));
     Preconditions.checkArgument(
         properties.get(RANGER_PASSWORD) != null, String.format("%s is 
required", RANGER_PASSWORD));
+
+    Preconditions.checkArgument(
+        properties.get(RANGER_SERVICE_NAME) != null,
+        String.format("%s is required", RANGER_SERVICE_NAME));
   }
 }
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
index 162a1bf308..1807484061 100644
--- 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
@@ -51,6 +51,7 @@ import org.apache.gravitino.authorization.Privilege;
 import org.apache.gravitino.authorization.SecurableObject;
 import org.apache.gravitino.authorization.common.PathBasedMetadataObject;
 import org.apache.gravitino.authorization.common.PathBasedSecurableObject;
+import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
 import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
 import org.apache.gravitino.exceptions.AuthorizationPluginException;
 import org.apache.gravitino.utils.MetadataObjectUtil;
@@ -679,4 +680,45 @@ public class RangerAuthorizationHDFSPlugin extends 
RangerAuthorizationPlugin {
     }
     return Boolean.TRUE;
   }
+
+  @Override
+  protected String getServiceType() {
+    return HDFS_SERVICE_TYPE;
+  }
+
+  @Override
+  protected Map<String, String> getServiceConfigs(Map<String, String> config) {
+    return ImmutableMap.<String, String>builder()
+        .put(
+            
RangerAuthorizationProperties.RANGER_USERNAME.substring(getPrefixLength()),
+            config.get(RangerAuthorizationProperties.RANGER_USERNAME))
+        .put(
+            
RangerAuthorizationProperties.RANGER_PASSWORD.substring(getPrefixLength()),
+            config.get(RangerAuthorizationProperties.RANGER_PASSWORD))
+        .put(
+            
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHENTICATION.substring(
+                getPrefixLength()),
+            getConfValue(
+                config,
+                RangerAuthorizationProperties.HADOOP_SECURITY_AUTHENTICATION,
+                
RangerAuthorizationProperties.DEFAULT_HADOOP_SECURITY_AUTHENTICATION))
+        .put(
+            
RangerAuthorizationProperties.HADOOP_RPC_PROTECTION.substring(getPrefixLength()),
+            getConfValue(
+                config,
+                RangerAuthorizationProperties.HADOOP_RPC_PROTECTION,
+                RangerAuthorizationProperties.DEFAULT_HADOOP_RPC_PROTECTION))
+        .put(
+            
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHORIZATION.substring(
+                getPrefixLength()),
+            getConfValue(
+                config, 
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHORIZATION, "false"))
+        .put(
+            
RangerAuthorizationProperties.FS_DEFAULT_NAME.substring(getPrefixLength()),
+            getConfValue(
+                config,
+                RangerAuthorizationProperties.FS_DEFAULT_NAME,
+                RangerAuthorizationProperties.FS_DEFAULT_VALUE))
+        .build();
+  }
 }
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java
index e67864f85d..b4f8c14c86 100644
--- 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java
@@ -45,6 +45,7 @@ import 
org.apache.gravitino.authorization.MetadataObjectChange;
 import org.apache.gravitino.authorization.Privilege;
 import org.apache.gravitino.authorization.SecurableObject;
 import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
 import 
org.apache.gravitino.authorization.ranger.RangerPrivileges.RangerHadoopSQLPrivilege;
 import 
org.apache.gravitino.authorization.ranger.reference.RangerDefines.PolicyResource;
 import org.apache.gravitino.exceptions.AuthorizationPluginException;
@@ -802,4 +803,33 @@ public class RangerAuthorizationHadoopSQLPlugin extends 
RangerAuthorizationPlugi
     }
     return Boolean.TRUE;
   }
+
+  @Override
+  protected String getServiceType() {
+    return HADOOP_SQL_SERVICE_TYPE;
+  }
+
+  @Override
+  protected Map<String, String> getServiceConfigs(Map<String, String> config) {
+    return ImmutableMap.<String, String>builder()
+        .put(
+            
RangerAuthorizationProperties.RANGER_USERNAME.substring(getPrefixLength()),
+            config.get(RangerAuthorizationProperties.RANGER_USERNAME))
+        .put(
+            
RangerAuthorizationProperties.RANGER_PASSWORD.substring(getPrefixLength()),
+            config.get(RangerAuthorizationProperties.RANGER_PASSWORD))
+        .put(
+            
RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME.substring(getPrefixLength()),
+            getConfValue(
+                config,
+                RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME,
+                RangerAuthorizationProperties.DEFAULT_JDBC_DRIVER_CLASS_NAME))
+        .put(
+            
RangerAuthorizationProperties.JDBC_URL.substring(getPrefixLength()),
+            getConfValue(
+                config,
+                RangerAuthorizationProperties.JDBC_URL,
+                RangerAuthorizationProperties.DEFAULT_JDBC_URL))
+        .build();
+  }
 }
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
index 01e743173d..b7189ba6a6 100644
--- 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
@@ -21,6 +21,7 @@ package org.apache.gravitino.authorization.ranger;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
+import com.sun.jersey.api.client.ClientResponse;
 import java.io.IOException;
 import java.time.Instant;
 import java.util.Arrays;
@@ -56,6 +57,7 @@ import org.apache.gravitino.meta.UserEntity;
 import org.apache.gravitino.utils.PrincipalUtils;
 import org.apache.ranger.RangerServiceException;
 import org.apache.ranger.plugin.model.RangerPolicy;
+import org.apache.ranger.plugin.model.RangerService;
 import org.apache.ranger.plugin.util.GrantRevokeRoleRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -74,6 +76,8 @@ import org.slf4j.LoggerFactory;
 public abstract class RangerAuthorizationPlugin
     implements AuthorizationPlugin, AuthorizationPrivilegesMappingProvider {
   private static final Logger LOG = 
LoggerFactory.getLogger(RangerAuthorizationPlugin.class);
+  protected static final String HDFS_SERVICE_TYPE = "hdfs";
+  protected static final String HADOOP_SQL_SERVICE_TYPE = "hive";
 
   protected String metalake;
   protected final String rangerServiceName;
@@ -87,13 +91,22 @@ public abstract class RangerAuthorizationPlugin
         new RangerAuthorizationProperties(config);
     rangerAuthorizationProperties.validate();
     String rangerUrl = 
config.get(RangerAuthorizationProperties.RANGER_ADMIN_URL);
+
     String authType = 
config.get(RangerAuthorizationProperties.RANGER_AUTH_TYPE);
+
     rangerAdminName = 
config.get(RangerAuthorizationProperties.RANGER_USERNAME);
+
     // Apache Ranger Password should be minimum 8 characters with min one 
alphabet and one numeric.
     String password = 
config.get(RangerAuthorizationProperties.RANGER_PASSWORD);
+
     rangerServiceName = 
config.get(RangerAuthorizationProperties.RANGER_SERVICE_NAME);
     rangerClient = new RangerClientExtension(rangerUrl, authType, 
rangerAdminName, password);
 
+    if (Boolean.parseBoolean(
+        
config.get(RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT))) {
+      createRangerServiceIfNecessary(config, rangerServiceName);
+    }
+
     rangerHelper =
         new RangerHelper(
             rangerClient,
@@ -769,6 +782,34 @@ public abstract class RangerAuthorizationPlugin
     return Boolean.TRUE;
   }
 
+  private void createRangerServiceIfNecessary(Map<String, String> config, 
String serviceName) {
+    try {
+      rangerClient.getService(serviceName);
+    } catch (RangerServiceException rse) {
+      if (rse.getStatus().equals(ClientResponse.Status.NOT_FOUND)) {
+        try {
+          RangerService rangerService = new RangerService();
+          rangerService.setType(getServiceType());
+          rangerService.setName(serviceName);
+          rangerService.setConfigs(getServiceConfigs(config));
+          rangerClient.createService(rangerService);
+          // We should remove some default policies, they will cause users to 
get more policies
+          // than they should do.
+          List<RangerPolicy> policies = 
rangerClient.getPoliciesInService(serviceName);
+          for (RangerPolicy policy : policies) {
+            rangerClient.deletePolicy(policy.getId());
+          }
+        } catch (RangerServiceException crse) {
+          throw new AuthorizationPluginException(
+              "Fail to create ranger service %s, exception: %s", serviceName, 
crse.getMessage());
+        }
+      } else {
+        throw new AuthorizationPluginException(
+            "Fail to get ranger service name %s, exception: %s", serviceName, 
rse.getMessage());
+      }
+    }
+  }
+
   /**
    * Add the securable object's privilege to the Ranger policy. <br>
    * 1. Find the policy base the metadata object. <br>
@@ -959,6 +1000,22 @@ public abstract class RangerAuthorizationPlugin
     }
   }
 
+  protected String getConfValue(Map<String, String> conf, String key, String 
defaultValue) {
+    if (conf.containsKey(key)) {
+      return conf.get(key);
+    }
+    return defaultValue;
+  }
+
+  protected abstract String getServiceType();
+
+  protected abstract Map<String, String> getServiceConfigs(Map<String, String> 
config);
+
+  protected int getPrefixLength() {
+    // We should consider `.`. We need to add 1
+    return RangerAuthorizationProperties.RANGER_PREFIX.length() + 1;
+  }
+
   @Override
   public void close() throws IOException {}
 
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
index 2b305a6750..a90b0d86bb 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
@@ -23,6 +23,7 @@ import static 
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENAB
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
+import java.util.List;
 import java.util.Map;
 import org.apache.gravitino.Catalog;
 import org.apache.gravitino.Configs;
@@ -35,6 +36,7 @@ import 
org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
 import org.apache.gravitino.integration.test.util.GravitinoITUtils;
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException;
+import org.apache.ranger.plugin.model.RangerService;
 import org.apache.spark.SparkUnsupportedOperationException;
 import org.apache.spark.sql.AnalysisException;
 import org.apache.spark.sql.SparkSession;
@@ -196,6 +198,45 @@ public class RangerHiveE2EIT extends RangerBaseE2EIT {
     metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, 
"comment", properties);
     catalog = metalake.loadCatalog(catalogName);
     LOG.info("Catalog created: {}", catalog);
+
+    // Test to create catalog automatically
+    Map<String, String> uuidProperties =
+        ImmutableMap.of(
+            HiveConstants.METASTORE_URIS,
+            HIVE_METASTORE_URIS,
+            IMPERSONATION_ENABLE,
+            "true",
+            AUTHORIZATION_PROVIDER,
+            "ranger",
+            RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+            "HadoopSQL",
+            RangerAuthorizationProperties.RANGER_ADMIN_URL,
+            RangerITEnv.RANGER_ADMIN_URL,
+            RangerAuthorizationProperties.RANGER_AUTH_TYPE,
+            RangerContainer.authType,
+            RangerAuthorizationProperties.RANGER_USERNAME,
+            RangerContainer.rangerUserName,
+            RangerAuthorizationProperties.RANGER_PASSWORD,
+            RangerContainer.rangerPassword,
+            RangerAuthorizationProperties.RANGER_SERVICE_NAME,
+            "test555",
+            RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT,
+            "true");
+
+    try {
+      List<RangerService> serviceList = 
RangerITEnv.rangerClient.findServices(Maps.newHashMap());
+      int expectServiceCount = serviceList.size() + 1;
+      Catalog catalogTest =
+          metalake.createCatalog(
+              "test", Catalog.Type.RELATIONAL, provider, "comment", 
uuidProperties);
+      Map<String, String> newProperties = catalogTest.properties();
+      
Assertions.assertTrue(newProperties.containsKey("authorization.ranger.service.name"));
+      serviceList = RangerITEnv.rangerClient.findServices(Maps.newHashMap());
+      Assertions.assertEquals(expectServiceCount, serviceList.size());
+      metalake.dropCatalog("test", true);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
   }
 
   protected void checkTableAllPrivilegesExceptForCreating() {
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
index 1cbf076c12..0d00733437 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
@@ -116,7 +116,9 @@ public class RangerITEnv {
                 RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
                 "HadoopSQL",
                 RangerAuthorizationProperties.RANGER_SERVICE_NAME,
-                RangerITEnv.RANGER_HIVE_REPO_NAME));
+                RangerITEnv.RANGER_HIVE_REPO_NAME,
+                RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT,
+                "true"));
 
     RangerAuthorizationHDFSPlugin spyRangerAuthorizationHDFSPlugin =
         Mockito.spy(
@@ -137,7 +139,9 @@ public class RangerITEnv {
                     RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
                     "HDFS",
                     RangerAuthorizationProperties.RANGER_SERVICE_NAME,
-                    RangerITEnv.RANGER_HDFS_REPO_NAME)));
+                    RangerITEnv.RANGER_HDFS_REPO_NAME,
+                    
RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT,
+                    "true")));
     rangerAuthHDFSPlugin = spyRangerAuthorizationHDFSPlugin;
 
     rangerHelper =
@@ -158,9 +162,6 @@ public class RangerITEnv {
 
     if (!initRangerService) {
       synchronized (RangerITEnv.class) {
-        // No IP address set, no impact on testing
-        createRangerHdfsRepository("", true);
-        createRangerHiveRepository("", true);
         if (allowAnyoneAccessHDFS) {
           allowAnyoneAccessHDFS();
         }
@@ -301,117 +302,6 @@ public class RangerITEnv {
     }
   }
 
-  public static void createRangerHiveRepository(String hiveIp, boolean 
cleanAllPolicy) {
-    try {
-      if (null != rangerClient.getService(RANGER_HIVE_REPO_NAME)) {
-        return;
-      }
-    } catch (RangerServiceException e) {
-      LOG.warn("Error while fetching service: {}", e.getMessage());
-    }
-
-    String usernameKey = "username";
-    String usernameVal = "admin";
-    String passwordKey = "password";
-    String passwordVal = "admin";
-    String jdbcKey = "jdbc.driverClassName";
-    String jdbcVal = "org.apache.hive.jdbc.HiveDriver";
-    String jdbcUrlKey = "jdbc.url";
-    String jdbcUrlVal =
-        String.format("jdbc:hive2://%s:%d", hiveIp, 
HiveContainer.HIVE_SERVICE_PORT);
-
-    RangerService service = new RangerService();
-    service.setType(RANGER_HIVE_TYPE);
-    service.setName(RANGER_HIVE_REPO_NAME);
-    service.setConfigs(
-        ImmutableMap.<String, String>builder()
-            .put(usernameKey, usernameVal)
-            .put(passwordKey, passwordVal)
-            .put(jdbcKey, jdbcVal)
-            .put(jdbcUrlKey, jdbcUrlVal)
-            .build());
-
-    try {
-      RangerService createdService = rangerClient.createService(service);
-      Assertions.assertNotNull(createdService);
-
-      Map<String, String> filter =
-          ImmutableMap.of(SearchFilter.SERVICE_NAME, RANGER_HIVE_REPO_NAME);
-      List<RangerService> services = rangerClient.findServices(filter);
-      Assertions.assertEquals(RANGER_HIVE_TYPE, services.get(0).getType());
-      Assertions.assertEquals(RANGER_HIVE_REPO_NAME, 
services.get(0).getName());
-      Assertions.assertEquals(usernameVal, 
services.get(0).getConfigs().get(usernameKey));
-      Assertions.assertEquals(jdbcVal, 
services.get(0).getConfigs().get(jdbcKey));
-      Assertions.assertEquals(jdbcUrlVal, 
services.get(0).getConfigs().get(jdbcUrlKey));
-
-      if (cleanAllPolicy) {
-        cleanAllPolicy(RANGER_HIVE_REPO_NAME);
-      }
-    } catch (RangerServiceException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  public static void createRangerHdfsRepository(String hdfsIp, boolean 
cleanAllPolicy) {
-    try {
-      if (null != rangerClient.getService(RANGER_HDFS_REPO_NAME)) {
-        return;
-      }
-    } catch (RangerServiceException e) {
-      LOG.warn("Error while fetching service: {}", e.getMessage());
-    }
-
-    String usernameKey = "username";
-    String usernameVal = "admin";
-    String passwordKey = "password";
-    String passwordVal = "admin";
-    String authenticationKey = "hadoop.security.authentication";
-    String authenticationVal = "simple";
-    String protectionKey = "hadoop.rpc.protection";
-    String protectionVal = "authentication";
-    String authorizationKey = "hadoop.security.authorization";
-    String authorizationVal = "false";
-    String fsDefaultNameKey = "fs.default.name";
-    String fsDefaultNameVal =
-        String.format("hdfs://%s:%d", hdfsIp, 
HiveContainer.HDFS_DEFAULTFS_PORT);
-
-    RangerService service = new RangerService();
-    service.setType(RANGER_HDFS_TYPE);
-    service.setName(RANGER_HDFS_REPO_NAME);
-    service.setConfigs(
-        ImmutableMap.<String, String>builder()
-            .put(usernameKey, usernameVal)
-            .put(passwordKey, passwordVal)
-            .put(authenticationKey, authenticationVal)
-            .put(protectionKey, protectionVal)
-            .put(authorizationKey, authorizationVal)
-            .put(fsDefaultNameKey, fsDefaultNameVal)
-            .build());
-
-    try {
-      RangerService createdService = rangerClient.createService(service);
-      Assertions.assertNotNull(createdService);
-
-      Map<String, String> filter =
-          ImmutableMap.of(SearchFilter.SERVICE_NAME, RANGER_HDFS_REPO_NAME);
-      List<RangerService> services = rangerClient.findServices(filter);
-      Assertions.assertEquals(RANGER_HDFS_TYPE, services.get(0).getType());
-      Assertions.assertEquals(RANGER_HDFS_REPO_NAME, 
services.get(0).getName());
-      Assertions.assertEquals(usernameVal, 
services.get(0).getConfigs().get(usernameKey));
-      Assertions.assertEquals(
-          authenticationVal, 
services.get(0).getConfigs().get(authenticationKey));
-      Assertions.assertEquals(protectionVal, 
services.get(0).getConfigs().get(protectionKey));
-      Assertions.assertEquals(authorizationVal, 
services.get(0).getConfigs().get(authorizationKey));
-      Assertions.assertEquals(fsDefaultNameVal, 
services.get(0).getConfigs().get(fsDefaultNameKey));
-
-      if (cleanAllPolicy) {
-        cleanAllPolicy(RANGER_HDFS_REPO_NAME);
-      }
-    } catch (RangerServiceException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
   protected static void verifyRoleInRanger(
       RangerAuthorizationPlugin rangerAuthPlugin,
       Role role,
diff --git a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java 
b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
index 444e89062a..53005bf9a0 100644
--- a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
+++ b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
@@ -209,8 +209,6 @@ public abstract class BaseCatalog<T extends BaseCatalog>
             BaseAuthorization<?> authorization =
                 BaseAuthorization.createAuthorization(classLoader, 
authorizationProvider);
 
-            // Load the authorization plugin with the class loader of the 
catalog.
-            // Because the JDBC authorization plugin may load JDBC driver 
using the class loader.
             authorizationPlugin =
                 classLoader.withClassLoader(
                     cl ->
@@ -354,6 +352,7 @@ public abstract class BaseCatalog<T extends BaseCatalog>
           tempProperties.putIfAbsent(
               PROPERTY_IN_USE,
               
catalogPropertiesMetadata().getDefaultValue(PROPERTY_IN_USE).toString());
+
           properties = tempProperties;
         }
       }
diff --git 
a/core/src/main/java/org/apache/gravitino/hook/CatalogHookDispatcher.java 
b/core/src/main/java/org/apache/gravitino/hook/CatalogHookDispatcher.java
index cc350a15cc..07dc4f079a 100644
--- a/core/src/main/java/org/apache/gravitino/hook/CatalogHookDispatcher.java
+++ b/core/src/main/java/org/apache/gravitino/hook/CatalogHookDispatcher.java
@@ -131,18 +131,17 @@ public class CatalogHookDispatcher implements 
CatalogDispatcher {
       return false;
     }
 
-    // If we call the authorization plugin after dropping catalog, we can't 
load the plugin of the
-    // catalog
     Catalog catalog = dispatcher.loadCatalog(ident);
-    boolean dropped = dispatcher.dropCatalog(ident, force);
 
-    if (dropped && catalog != null) {
+    if (catalog != null) {
       List<String> locations =
           AuthorizationUtils.getMetadataObjectLocation(ident, 
Entity.EntityType.CATALOG);
       AuthorizationUtils.removeCatalogPrivileges(catalog, locations);
     }
 
-    return dropped;
+    // We should call the authorization plugin before dropping the catalog, 
because the dropping
+    // catalog will close the authorization plugin.
+    return dispatcher.dropCatalog(ident, force);
   }
 
   @Override
diff --git a/docs/security/authorization-pushdown.md 
b/docs/security/authorization-pushdown.md
index 9c8e972193..22b1f62aab 100644
--- a/docs/security/authorization-pushdown.md
+++ b/docs/security/authorization-pushdown.md
@@ -17,15 +17,21 @@ This module translates Gravitino's authorization model into 
the permission rules
 
 In order to use the Ranger Hadoop SQL Plugin, you need to configure the 
following properties:
 
-| Property Name                       | Description                            
                                                                                
                              | Default Value | Required | Since Version    |
-|-------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|----------|------------------|
-| `authorization-provider`            | Providers to use to implement 
authorization plugin such as `ranger`.                                          
                                       | (none)        | No       | 
0.6.0-incubating |
-| `authorization.ranger.admin.url`    | The Apache Ranger web URIs.            
                                                                                
                              | (none)        | No       | 0.6.0-incubating |
-| `authorization.ranger.service.type` | The Apache Ranger service type, 
Currently only supports `HadoopSQL` or `HDFS`                                   
                                     | (none)        | No       | 
0.8.0-incubating |
-| `authorization.ranger.auth.type`    | The Apache Ranger authentication type 
`simple` or `kerberos`.                                                         
                               | `simple`      | No       | 0.6.0-incubating |
-| `authorization.ranger.username`     | The Apache Ranger admin web login 
username (auth type=simple), or kerberos principal(auth type=kerberos), Need 
have Ranger administrator permission. | (none)        | No       | 
0.6.0-incubating |
-| `authorization.ranger.password`     | The Apache Ranger admin web login user 
password (auth type=simple), or path of the keytab file(auth type=kerberos)     
                              | (none)        | No       | 0.6.0-incubating |
-| `authorization.ranger.service.name` | The Apache Ranger service name.        
                                                                                
                              | (none)        | No       | 0.6.0-incubating |
+| Property Name                                         | Description          
                                                                                
                                                | Default Value                 
    | Required | Since Version    |
+|-------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------|----------|------------------|
+| `authorization-provider`                              | Providers to use to 
implement authorization plugin such as `ranger`.                                
                                                 | (none)                       
     | No       | 0.6.0-incubating |
+| `authorization.ranger.admin.url`                      | The Apache Ranger 
web URIs.                                                                       
                                                   | (none)                     
       | No       | 0.6.0-incubating |
+| `authorization.ranger.service.type`                   | The Apache Ranger 
service type, Currently only supports `HadoopSQL` or `HDFS`                     
                                                   | (none)                     
       | No       | 0.8.0-incubating |
+| `authorization.ranger.auth.type`                      | The Apache Ranger 
authentication type `simple` or `kerberos`.                                     
                                                   | `simple`                   
       | No       | 0.6.0-incubating |
+| `authorization.ranger.username`                       | The Apache Ranger 
admin web login username (auth type=simple), or kerberos principal(auth 
type=kerberos), Need have Ranger administrator permission. | (none)             
               | No       | 0.6.0-incubating |
+| `authorization.ranger.password`                       | The Apache Ranger 
admin web login user password (auth type=simple), or path of the keytab 
file(auth type=kerberos)                                   | (none)             
               | No       | 0.6.0-incubating |
+| `authorization.ranger.service.name`                   | The Apache Ranger 
service name.                                                                   
                                                   | (none)                     
       | No       | 0.6.0-incubating |
+| `authorization.ranger.service.create-if-absent`       | If this property is 
true and the Ranger service doesn't exist, Gravitino will create a Ranger 
service                                                | false                  
           | No       | 0.9.0-incubating |
+| `authorization.ranger.jdbc.driverClassName`           | The property is used 
to specify driver class name when creating Ranger HadoopSQL service             
                                                | 
`org.apache.hive.jdbc.HiveDrive`  | No       | 0.9.0-incubating |
+| `authorization.ranger.jdbc.url`                       | The property is used 
to specify jdbc url when creating Ranger HadoopSQL service                      
                                                | `jdbc:hive2://127.0.0.1:8081` 
    | No       | 0.9.0-incubating |
+| `authorization.ranger.hadoop.security.authentication` | The property is used 
to specify Hadoop security authentication when creating Ranger HDFS service     
                                                | `simple`                      
    | No       | 0.9.0-incubating |
+| `authorization.ranger.hadoop.rpc.protection`          | The property is used 
to specify Hadoop rpc protection when creating Ranger HDFS service              
                                                | `authentication`              
    | No       | 0.9.0-incubating |
+| `authorization.ranger.fs.default.name`                | The property is used 
to specify default filesystem when creating Ranger HDFS service                 
                                                | `hdfs://127.0.0.1:8090`       
    | No       | 0.9.0-incubating |
 
 :::caution
 The Gravitino Ranger authorization plugin only supports the Apache Ranger 
HadoopSQL Plugin and Apache Ranger HDFS Plugin.


Reply via email to