This is an automated email from the ASF dual-hosted git repository.

liuxun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new 983ce4be0 [#5790] auth(chain): Chain authorization properties (#5791)
983ce4be0 is described below

commit 983ce4be0535d719f1b76c56abf4493969d53f79
Author: Xun <x...@datastrato.com>
AuthorDate: Tue Dec 17 16:20:00 2024 +0800

    [#5790] auth(chain): Chain authorization properties (#5791)
    
    ### What changes were proposed in this pull request?
    
    Add ChainAuthorizationProperties class
    
    ### Why are the changes needed?
    
    Fix: #5790
    
    ### Does this PR introduce _any_ user-facing change?
    
    N/A
    
    ### How was this patch tested?
    
    Add test
---
 .../ranger/ChainAuthorizationProperties.java       | 160 ++++++++++++++++
 .../authorization/ranger/RangerAuthorization.java  |  23 ++-
 .../ranger/RangerAuthorizationPlugin.java          |  17 +-
 .../ranger/RangerAuthorizationProperties.java      |  80 ++++++++
 .../ranger/TestChainAuthorizationProperties.java   | 213 +++++++++++++++++++++
 .../ranger/TestRangerAuthorizationProperties.java  | 110 +++++++++++
 .../ranger/integration/test/RangerFilesetIT.java   |  18 +-
 .../ranger/integration/test/RangerHiveE2EIT.java   |  18 +-
 .../ranger/integration/test/RangerITEnv.java       |  26 +--
 .../integration/test/RangerIcebergE2EIT.java       |  51 ++---
 .../ranger/integration/test/RangerPaimonE2EIT.java |  18 +-
 .../apache/gravitino/catalog/hive/HiveCatalog.java |   8 +-
 .../catalog/hive/HiveCatalogOperations.java        |  12 +-
 ...eta.java => HiveCatalogPropertiesMetadata.java} |   4 +-
 .../gravitino/catalog/hive/TestHiveCatalog.java    |   2 +-
 .../catalog/hive/TestHiveCatalogOperations.java    |  32 +---
 .../gravitino/catalog/hive/TestHiveSchema.java     |   2 +-
 .../gravitino/catalog/hive/TestHiveTable.java      |   2 +-
 .../hive/integration/test/CatalogHiveIT.java       |   2 +-
 .../integration/test/HiveUserAuthenticationIT.java |   8 +-
 .../hive/integration/test/ProxyCatalogHiveIT.java  |   4 +-
 .../connector/AuthorizationPropertiesMeta.java     |  68 -------
 docs/security/authorization-pushdown.md            |   2 +
 23 files changed, 675 insertions(+), 205 deletions(-)

diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java
new file mode 100644
index 000000000..edaa37574
--- /dev/null
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/ChainAuthorizationProperties.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+/**
+ * The properties for Chain authorization plugin. <br>
+ * <br>
+ * Configuration Example: <br>
+ * "authorization.chain.plugins" = "hive1,hdfs1" <br>
+ * "authorization.chain.hive1.provider" = "ranger"; <br>
+ * "authorization.chain.hive1.ranger.service.type" = "HadoopSQL"; <br>
+ * "authorization.chain.hive1.ranger.service.name" = "hiveDev"; <br>
+ * "authorization.chain.hive1.ranger.auth.type" = "simple"; <br>
+ * "authorization.chain.hive1.ranger.admin.url" = "http://localhost:6080";; <br>
+ * "authorization.chain.hive1.ranger.username" = "admin"; <br>
+ * "authorization.chain.hive1.ranger.password" = "admin"; <br>
+ * "authorization.chain.hdfs1.provider" = "ranger"; <br>
+ * "authorization.chain.hdfs1.ranger.service.type" = "HDFS"; <br>
+ * "authorization.chain.hdfs1.ranger.service.name" = "hdfsDev"; <br>
+ * "authorization.chain.hdfs1.ranger.auth.type" = "simple"; <br>
+ * "authorization.chain.hdfs1.ranger.admin.url" = "http://localhost:6080";; <br>
+ * "authorization.chain.hdfs1.ranger.username" = "admin"; <br>
+ * "authorization.chain.hdfs1.ranger.password" = "admin"; <br>
+ */
+public class ChainAuthorizationProperties {
+  public static final String PLUGINS_SPLITTER = ",";
+  /** Chain authorization plugin names */
+  public static final String CHAIN_PLUGINS_PROPERTIES_KEY = 
"authorization.chain.plugins";
+
+  /** Chain authorization plugin provider */
+  public static final String CHAIN_PROVIDER = "authorization.chain.*.provider";
+
+  static Map<String, String> fetchAuthPluginProperties(
+      String pluginName, Map<String, String> properties) {
+    Preconditions.checkArgument(
+        properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY)
+            && properties.get(CHAIN_PLUGINS_PROPERTIES_KEY) != null,
+        String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));
+
+    String[] pluginNames = 
properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER);
+    Preconditions.checkArgument(
+        Arrays.asList(pluginNames).contains(pluginName),
+        String.format("pluginName %s must be one of %s", pluginName, 
Arrays.toString(pluginNames)));
+
+    String regex = "^authorization\\.chain\\.(" + pluginName + ")\\..*";
+    Pattern pattern = Pattern.compile(regex);
+
+    Map<String, String> filteredProperties = new HashMap<>();
+    for (Map.Entry<String, String> entry : properties.entrySet()) {
+      Matcher matcher = pattern.matcher(entry.getKey());
+      if (matcher.matches()) {
+        filteredProperties.put(entry.getKey(), entry.getValue());
+      }
+    }
+
+    String removeRegex = "^authorization\\.chain\\.(" + pluginName + ")\\.";
+    Pattern removePattern = Pattern.compile(removeRegex);
+
+    Map<String, String> resultProperties = new HashMap<>();
+    for (Map.Entry<String, String> entry : filteredProperties.entrySet()) {
+      Matcher removeMatcher = removePattern.matcher(entry.getKey());
+      if (removeMatcher.find()) {
+        resultProperties.put(removeMatcher.replaceFirst("authorization."), 
entry.getValue());
+      }
+    }
+
+    return resultProperties;
+  }
+
+  public static void validate(Map<String, String> properties) {
+    Preconditions.checkArgument(
+        properties.containsKey(CHAIN_PLUGINS_PROPERTIES_KEY),
+        String.format("%s is required", CHAIN_PLUGINS_PROPERTIES_KEY));
+    List<String> pluginNames =
+        
Arrays.stream(properties.get(CHAIN_PLUGINS_PROPERTIES_KEY).split(PLUGINS_SPLITTER))
+            .map(String::trim)
+            .collect(Collectors.toList());
+    Preconditions.checkArgument(
+        !pluginNames.isEmpty(),
+        String.format("%s must have at least one plugin name", 
CHAIN_PLUGINS_PROPERTIES_KEY));
+    Preconditions.checkArgument(
+        pluginNames.size() == pluginNames.stream().distinct().count(),
+        "Duplicate plugin name in %s: %s",
+        CHAIN_PLUGINS_PROPERTIES_KEY,
+        pluginNames);
+    pluginNames.stream()
+        .filter(v -> v.contains("."))
+        .forEach(
+            v -> {
+              throw new IllegalArgumentException(
+                  String.format(
+                      "Plugin name cannot be contain `.` character in the `%s 
= %s`.",
+                      CHAIN_PLUGINS_PROPERTIES_KEY, 
properties.get(CHAIN_PLUGINS_PROPERTIES_KEY)));
+            });
+
+    Pattern pattern = Pattern.compile("^authorization\\.chain\\..*\\..*$");
+    Map<String, String> filteredProperties =
+        properties.entrySet().stream()
+            .filter(entry -> pattern.matcher(entry.getKey()).matches())
+            .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+
+    String pluginNamesPattern = String.join("|", pluginNames);
+    Pattern patternPluginNames =
+        Pattern.compile("^authorization\\.chain\\.(" + pluginNamesPattern + 
")\\..*$");
+    for (String key : filteredProperties.keySet()) {
+      Matcher matcher = patternPluginNames.matcher(key);
+      Preconditions.checkArgument(
+          matcher.matches(),
+          "The key %s does not match the pattern %s",
+          key,
+          patternPluginNames.pattern());
+    }
+
+    // Generate regex patterns from wildcardProperties
+    List<String> wildcardProperties = ImmutableList.of(CHAIN_PROVIDER);
+    for (String pluginName : pluginNames) {
+      List<Pattern> patterns =
+          wildcardProperties.stream()
+              .map(wildcard -> "^" + wildcard.replace("*", pluginName) + "$")
+              .map(Pattern::compile)
+              .collect(Collectors.toList());
+      // Validate properties keys
+      for (Pattern pattern1 : patterns) {
+        boolean matches =
+            filteredProperties.keySet().stream().anyMatch(key -> 
pattern1.matcher(key).matches());
+        Preconditions.checkArgument(
+            matches,
+            "Missing required properties %s for plugin: %s",
+            filteredProperties,
+            pattern1.pattern());
+      }
+    }
+  }
+}
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
index 04c40e219..cd27d9f12 100644
--- 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorization.java
@@ -18,6 +18,9 @@
  */
 package org.apache.gravitino.authorization.ranger;
 
+import static 
org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties.RANGER_SERVICE_TYPE;
+
+import com.google.common.base.Preconditions;
 import java.util.Map;
 import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
 import org.apache.gravitino.connector.authorization.BaseAuthorization;
@@ -31,16 +34,18 @@ public class RangerAuthorization extends 
BaseAuthorization<RangerAuthorization>
 
   @Override
   protected AuthorizationPlugin newPlugin(
-      String metalake, String catalogProvider, Map<String, String> config) {
-    switch (catalogProvider) {
-      case "hive":
-      case "lakehouse-iceberg":
-      case "lakehouse-paimon":
-        return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, 
config);
-      case "hadoop":
-        return RangerAuthorizationHDFSPlugin.getInstance(metalake, config);
+      String metalake, String catalogProvider, Map<String, String> properties) 
{
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_SERVICE_TYPE),
+        String.format("%s is required", RANGER_SERVICE_TYPE));
+    String serviceType = properties.get(RANGER_SERVICE_TYPE).toUpperCase();
+    switch (serviceType) {
+      case "HADOOPSQL":
+        return RangerAuthorizationHadoopSQLPlugin.getInstance(metalake, 
properties);
+      case "HDFS":
+        return RangerAuthorizationHDFSPlugin.getInstance(metalake, properties);
       default:
-        throw new IllegalArgumentException("Unknown catalog provider: " + 
catalogProvider);
+        throw new IllegalArgumentException("Unsupported service type: " + 
serviceType);
     }
   }
 }
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
index a3ce047aa..9c30ee119 100644
--- 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
@@ -52,7 +52,6 @@ import 
org.apache.gravitino.authorization.ranger.reference.VXGroup;
 import org.apache.gravitino.authorization.ranger.reference.VXGroupList;
 import org.apache.gravitino.authorization.ranger.reference.VXUser;
 import org.apache.gravitino.authorization.ranger.reference.VXUserList;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.connector.authorization.AuthorizationPlugin;
 import org.apache.gravitino.exceptions.AuthorizationPluginException;
 import org.apache.gravitino.meta.AuditInfo;
@@ -88,17 +87,13 @@ public abstract class RangerAuthorizationPlugin
 
   protected RangerAuthorizationPlugin(String metalake, Map<String, String> 
config) {
     this.metalake = metalake;
-    String rangerUrl = 
config.get(AuthorizationPropertiesMeta.RANGER_ADMIN_URL);
-    String authType = config.get(AuthorizationPropertiesMeta.RANGER_AUTH_TYPE);
-    rangerAdminName = config.get(AuthorizationPropertiesMeta.RANGER_USERNAME);
+    RangerAuthorizationProperties.validate(config);
+    String rangerUrl = 
config.get(RangerAuthorizationProperties.RANGER_ADMIN_URL);
+    String authType = 
config.get(RangerAuthorizationProperties.RANGER_AUTH_TYPE);
+    rangerAdminName = 
config.get(RangerAuthorizationProperties.RANGER_USERNAME);
     // Apache Ranger Password should be minimum 8 characters with min one 
alphabet and one numeric.
-    String password = config.get(AuthorizationPropertiesMeta.RANGER_PASSWORD);
-    rangerServiceName = 
config.get(AuthorizationPropertiesMeta.RANGER_SERVICE_NAME);
-    Preconditions.checkArgument(rangerUrl != null, "Ranger admin URL is 
required");
-    Preconditions.checkArgument(authType != null, "Ranger auth type is 
required");
-    Preconditions.checkArgument(rangerAdminName != null, "Ranger username is 
required");
-    Preconditions.checkArgument(password != null, "Ranger password is 
required");
-    Preconditions.checkArgument(rangerServiceName != null, "Ranger service 
name is required");
+    String password = 
config.get(RangerAuthorizationProperties.RANGER_PASSWORD);
+    rangerServiceName = 
config.get(RangerAuthorizationProperties.RANGER_SERVICE_NAME);
     rangerClient = new RangerClientExtension(rangerUrl, authType, 
rangerAdminName, password);
 
     rangerHelper =
diff --git 
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java
 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java
new file mode 100644
index 000000000..e7fee3088
--- /dev/null
+++ 
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationProperties.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.base.Preconditions;
+import java.util.Map;
+
+/** The properties for Ranger authorization plugin. */
+public class RangerAuthorizationProperties {
+  /** Ranger admin web URIs */
+  public static final String RANGER_ADMIN_URL = 
"authorization.ranger.admin.url";
+
+  /** Ranger service type */
+  public static final String RANGER_SERVICE_TYPE = 
"authorization.ranger.service.type";
+
+  /** Ranger service name */
+  public static final String RANGER_SERVICE_NAME = 
"authorization.ranger.service.name";
+
+  /** Ranger authentication type kerberos or simple */
+  public static final String RANGER_AUTH_TYPE = 
"authorization.ranger.auth.type";
+
+  /**
+   * Ranger admin web login username(auth_type=simple), or kerberos 
principal(auth_type=kerberos)
+   */
+  public static final String RANGER_USERNAME = "authorization.ranger.username";
+
+  /**
+   * Ranger admin web login user password(auth_type=simple), or path of the 
keytab
+   * file(auth_type=kerberos)
+   */
+  public static final String RANGER_PASSWORD = "authorization.ranger.password";
+
+  public static void validate(Map<String, String> properties) {
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_ADMIN_URL),
+        String.format("%s is required", RANGER_ADMIN_URL));
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_SERVICE_TYPE),
+        String.format("%s is required", RANGER_SERVICE_TYPE));
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_SERVICE_NAME),
+        String.format("%s is required", RANGER_SERVICE_NAME));
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_AUTH_TYPE),
+        String.format("%s is required", RANGER_AUTH_TYPE));
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_USERNAME), String.format("%s is 
required", RANGER_USERNAME));
+    Preconditions.checkArgument(
+        properties.containsKey(RANGER_PASSWORD), String.format("%s is 
required", RANGER_PASSWORD));
+    Preconditions.checkArgument(
+        properties.get(RANGER_ADMIN_URL) != null,
+        String.format("%s is required", RANGER_ADMIN_URL));
+    Preconditions.checkArgument(
+        properties.get(RANGER_SERVICE_NAME) != null,
+        String.format("%s is required", RANGER_SERVICE_NAME));
+    Preconditions.checkArgument(
+        properties.get(RANGER_AUTH_TYPE) != null,
+        String.format("%s is required", RANGER_AUTH_TYPE));
+    Preconditions.checkArgument(
+        properties.get(RANGER_USERNAME) != null, String.format("%s is 
required", RANGER_USERNAME));
+    Preconditions.checkArgument(
+        properties.get(RANGER_PASSWORD) != null, String.format("%s is 
required", RANGER_PASSWORD));
+  }
+}
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java
new file mode 100644
index 000000000..5d19f2340
--- /dev/null
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestChainAuthorizationProperties.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
+import static 
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
+
+import com.google.common.collect.Maps;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.gravitino.catalog.hive.HiveConstants;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+public class TestChainAuthorizationProperties {
+  @Test
+  void testChainOnePlugin() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    Assertions.assertDoesNotThrow(() -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testChainTwoPlugins() {
+    Map<String, String> properties = new HashMap<>();
+    properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083");
+    properties.put("gravitino.bypass.hive.metastore.client.capability.check", 
"true");
+    properties.put(IMPERSONATION_ENABLE, "true");
+    properties.put(AUTHORIZATION_PROVIDER, "chain");
+    properties.put("authorization.chain.plugins", "hive1,hdfs1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+    Assertions.assertDoesNotThrow(() -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testPluginsHasSpace() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive1, hdfs1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+    Assertions.assertDoesNotThrow(() -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testPluginsOneButHasTowPluginConfig() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testPluginsHasPoint() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive.1,hdfs1");
+    properties.put("authorization.chain.hive.1.provider", "ranger");
+    properties.put("authorization.chain.hive.1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive.1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive.1.ranger.username", "admin");
+    properties.put("authorization.chain.hive.1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive.1.ranger.service.name", 
"hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testErrorPluginName() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive1,hdfs1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.plug3.ranger.service.name", "hdfsDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testDuplicationPluginName() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.chain.plugins", "hive1,hive1,hdfs1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
ChainAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testFetchRangerPrpoerties() {
+    Map<String, String> properties = new HashMap<>();
+    properties.put(HiveConstants.METASTORE_URIS, "thrift://localhost:9083");
+    properties.put("gravitino.bypass.hive.metastore.client.capability.check", 
"true");
+    properties.put(IMPERSONATION_ENABLE, "true");
+    properties.put(AUTHORIZATION_PROVIDER, "chain");
+    properties.put("authorization.chain.plugins", "hive1,hdfs1");
+    properties.put("authorization.chain.hive1.provider", "ranger");
+    properties.put("authorization.chain.hive1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hive1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hive1.ranger.username", "admin");
+    properties.put("authorization.chain.hive1.ranger.password", "admin");
+    properties.put("authorization.chain.hive1.ranger.service.type", "hive");
+    properties.put("authorization.chain.hive1.ranger.service.name", "hiveDev");
+    properties.put("authorization.chain.hdfs1.provider", "ranger");
+    properties.put("authorization.chain.hdfs1.ranger.auth.type", "simple");
+    properties.put("authorization.chain.hdfs1.ranger.admin.url", 
"http://localhost:6080";);
+    properties.put("authorization.chain.hdfs1.ranger.username", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.password", "admin");
+    properties.put("authorization.chain.hdfs1.ranger.service.type", "hadoop");
+    properties.put("authorization.chain.hdfs1.ranger.service.name", "hdfsDev");
+
+    Map<String, String> rangerHiveProperties =
+        ChainAuthorizationProperties.fetchAuthPluginProperties("hive1", 
properties);
+    Assertions.assertDoesNotThrow(
+        () -> RangerAuthorizationProperties.validate(rangerHiveProperties));
+
+    Map<String, String> rangerHDFSProperties =
+        ChainAuthorizationProperties.fetchAuthPluginProperties("hdfs1", 
properties);
+    Assertions.assertDoesNotThrow(
+        () -> RangerAuthorizationProperties.validate(rangerHDFSProperties));
+  }
+}
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java
new file mode 100644
index 000000000..a90b164a2
--- /dev/null
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/TestRangerAuthorizationProperties.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger;
+
+import com.google.common.collect.Maps;
+import java.util.Map;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+public class TestRangerAuthorizationProperties {
+  @Test
+  void testRangerProperties() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertDoesNotThrow(() -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLoseAuthType() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLoseAdminUrl() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLoseUserName() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLosePassword() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLoseServiceType() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.name", "hiveDev");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+
+  @Test
+  void testRangerPropertiesLoseServiceName() {
+    Map<String, String> properties = Maps.newHashMap();
+    properties.put("authorization.ranger.auth.type", "simple");
+    properties.put("authorization.ranger.admin.url", "http://localhost:6080";);
+    properties.put("authorization.ranger.username", "admin");
+    properties.put("authorization.ranger.password", "admin");
+    properties.put("authorization.ranger.service.type", "hive");
+    Assertions.assertThrows(
+        IllegalArgumentException.class, () -> 
RangerAuthorizationProperties.validate(properties));
+  }
+}
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
index bbaae3278..56f097815 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerFilesetIT.java
@@ -23,10 +23,6 @@ import static 
org.apache.gravitino.authorization.ranger.integration.test.RangerI
 import static 
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.rangerClient;
 import static 
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.rangerHelper;
 import static 
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
 import static 
org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
 
 import com.google.common.collect.ImmutableMap;
@@ -49,10 +45,10 @@ import org.apache.gravitino.auth.AuthenticatorType;
 import org.apache.gravitino.authorization.Privileges;
 import org.apache.gravitino.authorization.SecurableObject;
 import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
 import org.apache.gravitino.authorization.ranger.RangerHelper;
 import org.apache.gravitino.authorization.ranger.RangerPrivileges;
 import org.apache.gravitino.client.GravitinoMetalake;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.file.Fileset;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
@@ -540,15 +536,17 @@ public class RangerFilesetIT extends BaseIT {
             "true",
             AUTHORIZATION_PROVIDER,
             "ranger",
-            RANGER_SERVICE_NAME,
+            RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+            "HDFS",
+            RangerAuthorizationProperties.RANGER_SERVICE_NAME,
             RangerITEnv.RANGER_HDFS_REPO_NAME,
-            AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+            RangerAuthorizationProperties.RANGER_ADMIN_URL,
             RANGER_ADMIN_URL,
-            RANGER_AUTH_TYPE,
+            RangerAuthorizationProperties.RANGER_AUTH_TYPE,
             RangerContainer.authType,
-            RANGER_USERNAME,
+            RangerAuthorizationProperties.RANGER_USERNAME,
             RangerContainer.rangerUserName,
-            RANGER_PASSWORD,
+            RangerAuthorizationProperties.RANGER_PASSWORD,
             RangerContainer.rangerPassword));
 
     catalog = metalake.loadCatalog(catalogName);
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
index 600463fbc..baec9434c 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
@@ -20,10 +20,6 @@ package 
org.apache.gravitino.authorization.ranger.integration.test;
 
 import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
 import static 
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
 import static 
org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
 
 import com.google.common.collect.ImmutableMap;
@@ -33,8 +29,8 @@ import org.apache.gravitino.Catalog;
 import org.apache.gravitino.Configs;
 import org.apache.gravitino.auth.AuthConstants;
 import org.apache.gravitino.auth.AuthenticatorType;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
 import org.apache.gravitino.catalog.hive.HiveConstants;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
 import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -179,15 +175,17 @@ public class RangerHiveE2EIT extends RangerBaseE2EIT {
             "true",
             AUTHORIZATION_PROVIDER,
             "ranger",
-            RANGER_SERVICE_NAME,
+            RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+            "HadoopSQL",
+            RangerAuthorizationProperties.RANGER_SERVICE_NAME,
             RangerITEnv.RANGER_HIVE_REPO_NAME,
-            AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+            RangerAuthorizationProperties.RANGER_ADMIN_URL,
             RANGER_ADMIN_URL,
-            RANGER_AUTH_TYPE,
+            RangerAuthorizationProperties.RANGER_AUTH_TYPE,
             RangerContainer.authType,
-            RANGER_USERNAME,
+            RangerAuthorizationProperties.RANGER_USERNAME,
             RangerContainer.rangerUserName,
-            RANGER_PASSWORD,
+            RangerAuthorizationProperties.RANGER_PASSWORD,
             RangerContainer.rangerPassword);
 
     metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, 
"comment", properties);
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
index f6b83bb9d..b3be410ea 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
@@ -35,10 +35,10 @@ import org.apache.gravitino.authorization.Role;
 import org.apache.gravitino.authorization.ranger.RangerAuthorizationHDFSPlugin;
 import 
org.apache.gravitino.authorization.ranger.RangerAuthorizationHadoopSQLPlugin;
 import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
 import org.apache.gravitino.authorization.ranger.RangerHelper;
 import org.apache.gravitino.authorization.ranger.RangerPrivileges;
 import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.integration.test.container.ContainerSuite;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
@@ -98,18 +98,20 @@ public class RangerITEnv {
         RangerAuthorizationHadoopSQLPlugin.getInstance(
             "metalake",
             ImmutableMap.of(
-                AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+                RangerAuthorizationProperties.RANGER_ADMIN_URL,
                 String.format(
                     "http://%s:%d";,
                     
containerSuite.getRangerContainer().getContainerIpAddress(),
                     RangerContainer.RANGER_SERVER_PORT),
-                AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+                RangerAuthorizationProperties.RANGER_AUTH_TYPE,
                 RangerContainer.authType,
-                AuthorizationPropertiesMeta.RANGER_USERNAME,
+                RangerAuthorizationProperties.RANGER_USERNAME,
                 RangerContainer.rangerUserName,
-                AuthorizationPropertiesMeta.RANGER_PASSWORD,
+                RangerAuthorizationProperties.RANGER_PASSWORD,
                 RangerContainer.rangerPassword,
-                AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+                RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+                "HadoopSQL",
+                RangerAuthorizationProperties.RANGER_SERVICE_NAME,
                 RangerITEnv.RANGER_HIVE_REPO_NAME));
 
     RangerAuthorizationHDFSPlugin spyRangerAuthorizationHDFSPlugin =
@@ -117,18 +119,20 @@ public class RangerITEnv {
             RangerAuthorizationHDFSPlugin.getInstance(
                 "metalake",
                 ImmutableMap.of(
-                    AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+                    RangerAuthorizationProperties.RANGER_ADMIN_URL,
                     String.format(
                         "http://%s:%d";,
                         
containerSuite.getRangerContainer().getContainerIpAddress(),
                         RangerContainer.RANGER_SERVER_PORT),
-                    AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+                    RangerAuthorizationProperties.RANGER_AUTH_TYPE,
                     RangerContainer.authType,
-                    AuthorizationPropertiesMeta.RANGER_USERNAME,
+                    RangerAuthorizationProperties.RANGER_USERNAME,
                     RangerContainer.rangerUserName,
-                    AuthorizationPropertiesMeta.RANGER_PASSWORD,
+                    RangerAuthorizationProperties.RANGER_PASSWORD,
                     RangerContainer.rangerPassword,
-                    AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+                    RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+                    "HDFS",
+                    RangerAuthorizationProperties.RANGER_SERVICE_NAME,
                     RangerITEnv.RANGER_HDFS_REPO_NAME)));
     
doReturn("/test").when(spyRangerAuthorizationHDFSPlugin).getFileSetPath(Mockito.any());
     rangerAuthHDFSPlugin = spyRangerAuthorizationHDFSPlugin;
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
index a4fc1253e..d8bd70c64 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerIcebergE2EIT.java
@@ -21,16 +21,12 @@ package 
org.apache.gravitino.authorization.ranger.integration.test;
 import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
 import static 
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
 import static 
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
 import static 
org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
 
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.Map;
 import org.apache.gravitino.Catalog;
 import org.apache.gravitino.Configs;
@@ -39,8 +35,8 @@ import org.apache.gravitino.auth.AuthenticatorType;
 import org.apache.gravitino.authorization.Privileges;
 import org.apache.gravitino.authorization.SecurableObject;
 import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
 import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -168,31 +164,24 @@ public class RangerIcebergE2EIT extends RangerBaseE2EIT {
   }
 
   private static void createCatalog() {
-    Map<String, String> properties =
-        ImmutableMap.of(
-            IcebergConstants.URI,
-            HIVE_METASTORE_URIS,
-            IcebergConstants.CATALOG_BACKEND,
-            "hive",
-            IcebergConstants.WAREHOUSE,
-            String.format(
-                "hdfs://%s:%d/user/hive/warehouse",
-                
containerSuite.getHiveRangerContainer().getContainerIpAddress(),
-                HiveContainer.HDFS_DEFAULTFS_PORT),
-            IMPERSONATION_ENABLE,
-            "true",
-            AUTHORIZATION_PROVIDER,
-            "ranger",
-            RANGER_SERVICE_NAME,
-            RangerITEnv.RANGER_HIVE_REPO_NAME,
-            AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
-            RANGER_ADMIN_URL,
-            RANGER_AUTH_TYPE,
-            RangerContainer.authType,
-            RANGER_USERNAME,
-            RangerContainer.rangerUserName,
-            RANGER_PASSWORD,
-            RangerContainer.rangerPassword);
+    Map<String, String> properties = new HashMap<>();
+    properties.put(IcebergConstants.URI, HIVE_METASTORE_URIS);
+    properties.put(IcebergConstants.CATALOG_BACKEND, "hive");
+    properties.put(
+        IcebergConstants.WAREHOUSE,
+        String.format(
+            "hdfs://%s:%d/user/hive/warehouse",
+            containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+            HiveContainer.HDFS_DEFAULTFS_PORT));
+    properties.put(IMPERSONATION_ENABLE, "true");
+    properties.put(AUTHORIZATION_PROVIDER, "ranger");
+    properties.put(RangerAuthorizationProperties.RANGER_SERVICE_TYPE, 
"HadoopSQL");
+    properties.put(
+        RangerAuthorizationProperties.RANGER_SERVICE_NAME, 
RangerITEnv.RANGER_HIVE_REPO_NAME);
+    properties.put(RangerAuthorizationProperties.RANGER_ADMIN_URL, 
RANGER_ADMIN_URL);
+    properties.put(RangerAuthorizationProperties.RANGER_AUTH_TYPE, 
RangerContainer.authType);
+    properties.put(RangerAuthorizationProperties.RANGER_USERNAME, 
RangerContainer.rangerUserName);
+    properties.put(RangerAuthorizationProperties.RANGER_PASSWORD, 
RangerContainer.rangerPassword);
 
     metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, 
"comment", properties);
     catalog = metalake.loadCatalog(catalogName);
diff --git 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
index b2529837e..79d1eb187 100644
--- 
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
+++ 
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerPaimonE2EIT.java
@@ -20,10 +20,6 @@ package 
org.apache.gravitino.authorization.ranger.integration.test;
 
 import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
 import static 
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
-import static 
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
 import static 
org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
 
 import com.google.common.collect.ImmutableMap;
@@ -38,7 +34,7 @@ import org.apache.gravitino.auth.AuthenticatorType;
 import org.apache.gravitino.authorization.Privileges;
 import org.apache.gravitino.authorization.SecurableObject;
 import org.apache.gravitino.authorization.SecurableObjects;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationProperties;
 import org.apache.gravitino.integration.test.container.HiveContainer;
 import org.apache.gravitino.integration.test.container.RangerContainer;
 import org.apache.gravitino.integration.test.util.GravitinoITUtils;
@@ -197,15 +193,17 @@ public class RangerPaimonE2EIT extends RangerBaseE2EIT {
                 HiveContainer.HDFS_DEFAULTFS_PORT),
             AUTHORIZATION_PROVIDER,
             "ranger",
-            RANGER_SERVICE_NAME,
+            RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+            "HadoopSQL",
+            RangerAuthorizationProperties.RANGER_SERVICE_NAME,
             RangerITEnv.RANGER_HIVE_REPO_NAME,
-            AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+            RangerAuthorizationProperties.RANGER_ADMIN_URL,
             RANGER_ADMIN_URL,
-            RANGER_AUTH_TYPE,
+            RangerAuthorizationProperties.RANGER_AUTH_TYPE,
             RangerContainer.authType,
-            RANGER_USERNAME,
+            RangerAuthorizationProperties.RANGER_USERNAME,
             RangerContainer.rangerUserName,
-            RANGER_PASSWORD,
+            RangerAuthorizationProperties.RANGER_PASSWORD,
             RangerContainer.rangerPassword);
 
     metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, 
"comment", properties);
diff --git 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
index 717694e18..98711f98a 100644
--- 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
+++ 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalog.java
@@ -29,8 +29,8 @@ import org.apache.gravitino.connector.capability.Capability;
 /** Implementation of an Apache Hive catalog in Apache Gravitino. */
 public class HiveCatalog extends BaseCatalog<HiveCatalog> {
 
-  static final HiveCatalogPropertiesMeta CATALOG_PROPERTIES_METADATA =
-      new HiveCatalogPropertiesMeta();
+  static final HiveCatalogPropertiesMetadata CATALOG_PROPERTIES_METADATA =
+      new HiveCatalogPropertiesMetadata();
 
   static final HiveSchemaPropertiesMetadata SCHEMA_PROPERTIES_METADATA =
       new HiveSchemaPropertiesMetadata();
@@ -69,8 +69,8 @@ public class HiveCatalog extends BaseCatalog<HiveCatalog> {
   protected Optional<ProxyPlugin> newProxyPlugin(Map<String, String> config) {
     boolean impersonationEnabled =
         (boolean)
-            new HiveCatalogPropertiesMeta()
-                .getOrDefault(config, 
HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE);
+            new HiveCatalogPropertiesMetadata()
+                .getOrDefault(config, 
HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE);
     if (!impersonationEnabled) {
       return Optional.empty();
     }
diff --git 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
index bb7d06f6b..902fce377 100644
--- 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
+++ 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogOperations.java
@@ -18,9 +18,9 @@
  */
 package org.apache.gravitino.catalog.hive;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.LIST_ALL_TABLES;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.LIST_ALL_TABLES;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
 import static org.apache.gravitino.catalog.hive.HiveTable.SUPPORT_TABLE_TYPES;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.COMMENT;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.TABLE_TYPE;
@@ -200,7 +200,7 @@ public class HiveCatalogOperations implements 
CatalogOperations, SupportsSchemas
             (String)
                 propertiesMetadata
                     .catalogPropertiesMetadata()
-                    .getOrDefault(conf, HiveCatalogPropertiesMeta.KEY_TAB_URI);
+                    .getOrDefault(conf, 
HiveCatalogPropertiesMetadata.KEY_TAB_URI);
         Preconditions.checkArgument(StringUtils.isNotBlank(keytabUri), "Keytab 
uri can't be blank");
         // TODO: Support to download the file from Kerberos HDFS
         Preconditions.checkArgument(
@@ -210,7 +210,7 @@ public class HiveCatalogOperations implements 
CatalogOperations, SupportsSchemas
             (int)
                 propertiesMetadata
                     .catalogPropertiesMetadata()
-                    .getOrDefault(conf, 
HiveCatalogPropertiesMeta.FETCH_TIMEOUT_SEC);
+                    .getOrDefault(conf, 
HiveCatalogPropertiesMetadata.FETCH_TIMEOUT_SEC);
 
         FetchFileUtils.fetchFileFromUri(
             keytabUri, keytabPath.toFile(), fetchKeytabFileTimeout, 
hadoopConf);
@@ -244,7 +244,7 @@ public class HiveCatalogOperations implements 
CatalogOperations, SupportsSchemas
             (int)
                 propertiesMetadata
                     .catalogPropertiesMetadata()
-                    .getOrDefault(conf, 
HiveCatalogPropertiesMeta.CHECK_INTERVAL_SEC);
+                    .getOrDefault(conf, 
HiveCatalogPropertiesMetadata.CHECK_INTERVAL_SEC);
 
         checkTgtExecutor.scheduleAtFixedRate(
             () -> {
diff --git 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java
 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
similarity index 95%
rename from 
catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java
rename to 
catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
index dc532e601..8897d7705 100644
--- 
a/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMeta.java
+++ 
b/catalogs/catalog-hive/src/main/java/org/apache/gravitino/catalog/hive/HiveCatalogPropertiesMetadata.java
@@ -21,12 +21,11 @@ package org.apache.gravitino.catalog.hive;
 
 import com.google.common.collect.ImmutableMap;
 import java.util.Map;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.connector.BaseCatalogPropertiesMetadata;
 import org.apache.gravitino.connector.PropertyEntry;
 import org.apache.gravitino.hive.ClientPropertiesMetadata;
 
-public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata {
+public class HiveCatalogPropertiesMetadata extends 
BaseCatalogPropertiesMetadata {
 
   public static final String CLIENT_POOL_SIZE = HiveConstants.CLIENT_POOL_SIZE;
   public static final String METASTORE_URIS = HiveConstants.METASTORE_URIS;
@@ -110,7 +109,6 @@ public class HiveCatalogPropertiesMeta extends 
BaseCatalogPropertiesMetadata {
                   DEFAULT_LIST_ALL_TABLES,
                   false /* hidden */,
                   false /* reserved */))
-          
.putAll(AuthorizationPropertiesMeta.RANGER_AUTHORIZATION_PROPERTY_ENTRIES)
           .putAll(CLIENT_PROPERTIES_METADATA.propertyEntries())
           .build();
 
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
index 7b3f944b9..ddf761631 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalog.java
@@ -21,7 +21,7 @@ package org.apache.gravitino.catalog.hive;
 import static 
org.apache.gravitino.catalog.hive.HiveCatalog.CATALOG_PROPERTIES_METADATA;
 import static 
org.apache.gravitino.catalog.hive.HiveCatalog.SCHEMA_PROPERTIES_METADATA;
 import static 
org.apache.gravitino.catalog.hive.HiveCatalog.TABLE_PROPERTIES_METADATA;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
 
 import com.google.common.collect.Maps;
 import java.time.Instant;
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
index 9e355ed04..2c87bfd58 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveCatalogOperations.java
@@ -23,15 +23,15 @@ import static 
org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
 import static org.apache.gravitino.Catalog.CLOUD_NAME;
 import static org.apache.gravitino.Catalog.CLOUD_REGION_CODE;
 import static org.apache.gravitino.Catalog.PROPERTY_IN_USE;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CHECK_INTERVAL_SEC;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CLIENT_POOL_SIZE;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.FETCH_TIMEOUT_SEC;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KEY_TAB_URI;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.LIST_ALL_TABLES;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CHECK_INTERVAL_SEC;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.CLIENT_POOL_SIZE;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.FETCH_TIMEOUT_SEC;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.KEY_TAB_URI;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.LIST_ALL_TABLES;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
 import static 
org.apache.gravitino.catalog.hive.TestHiveCatalog.HIVE_PROPERTIES_METADATA;
 import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
 import static org.mockito.ArgumentMatchers.any;
@@ -43,7 +43,6 @@ import com.google.common.collect.Maps;
 import java.util.Map;
 import org.apache.gravitino.Catalog;
 import org.apache.gravitino.NameIdentifier;
-import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
 import org.apache.gravitino.connector.BaseCatalog;
 import org.apache.gravitino.connector.PropertyEntry;
 import org.apache.gravitino.exceptions.ConnectionFailedException;
@@ -74,7 +73,7 @@ class TestHiveCatalogOperations {
     Map<String, PropertyEntry<?>> propertyEntryMap =
         HIVE_PROPERTIES_METADATA.catalogPropertiesMetadata().propertyEntries();
 
-    Assertions.assertEquals(21, propertyEntryMap.size());
+    Assertions.assertEquals(16, propertyEntryMap.size());
     Assertions.assertTrue(propertyEntryMap.containsKey(METASTORE_URIS));
     
Assertions.assertTrue(propertyEntryMap.containsKey(Catalog.PROPERTY_PACKAGE));
     
Assertions.assertTrue(propertyEntryMap.containsKey(BaseCatalog.CATALOG_OPERATION_IMPL));
@@ -83,17 +82,6 @@ class TestHiveCatalogOperations {
     Assertions.assertTrue(propertyEntryMap.containsKey(CLIENT_POOL_SIZE));
     Assertions.assertTrue(propertyEntryMap.containsKey(IMPERSONATION_ENABLE));
     Assertions.assertTrue(propertyEntryMap.containsKey(LIST_ALL_TABLES));
-    Assertions.assertTrue(
-        
propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_ADMIN_URL));
-    Assertions.assertTrue(
-        
propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_AUTH_TYPE));
-    Assertions.assertTrue(
-        
propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_USERNAME));
-    Assertions.assertTrue(
-        
propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_PASSWORD));
-    Assertions.assertTrue(
-        
propertyEntryMap.containsKey(AuthorizationPropertiesMeta.RANGER_SERVICE_NAME));
-
     Assertions.assertTrue(propertyEntryMap.get(METASTORE_URIS).isRequired());
     
Assertions.assertFalse(propertyEntryMap.get(Catalog.PROPERTY_PACKAGE).isRequired());
     
Assertions.assertFalse(propertyEntryMap.get(CLIENT_POOL_SIZE).isRequired());
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
index d3bfb1e3c..337600a63 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveSchema.java
@@ -18,7 +18,7 @@
  */
 package org.apache.gravitino.catalog.hive;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
 import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
 
 import com.google.common.collect.Maps;
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
index 2823bf276..cd143b1e8 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/TestHiveTable.java
@@ -18,7 +18,7 @@
  */
 package org.apache.gravitino.catalog.hive;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.TABLE_TYPE;
 import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
 import static org.apache.gravitino.rel.expressions.transforms.Transforms.day;
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
index d9e6fe70d..7d8079d1e 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/CatalogHiveIT.java
@@ -18,7 +18,7 @@
  */
 package org.apache.gravitino.catalog.hive.integration.test;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.COMMENT;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.EXTERNAL;
 import static 
org.apache.gravitino.catalog.hive.HiveTablePropertiesMetadata.FORMAT;
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
index c333cf351..861bb44ed 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/HiveUserAuthenticationIT.java
@@ -19,10 +19,10 @@
 
 package org.apache.gravitino.catalog.hive.integration.test;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KEY_TAB_URI;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.KEY_TAB_URI;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.PRINCIPAL;
 import static org.apache.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
 import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
 
diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
index b7d61582e..3d71948b7 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
@@ -18,8 +18,8 @@
  */
 package org.apache.gravitino.catalog.hive.integration.test;
 
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
-import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.IMPERSONATION_ENABLE;
+import static 
org.apache.gravitino.catalog.hive.HiveCatalogPropertiesMetadata.METASTORE_URIS;
 import static 
org.apache.gravitino.server.GravitinoServer.WEBSERVER_CONF_PREFIX;
 
 import com.google.common.collect.ImmutableMap;
diff --git 
a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java
 
b/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java
deleted file mode 100644
index e1b389f7c..000000000
--- 
a/core/src/main/java/org/apache/gravitino/connector/AuthorizationPropertiesMeta.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.gravitino.connector;
-
-import com.google.common.collect.ImmutableMap;
-import java.util.Map;
-
-public class AuthorizationPropertiesMeta {
-  /** Ranger admin web URIs */
-  public static final String RANGER_ADMIN_URL = 
"authorization.ranger.admin.url";
-  /** Ranger authentication type kerberos or simple */
-  public static final String RANGER_AUTH_TYPE = 
"authorization.ranger.auth.type";
-  /**
-   * Ranger admin web login username(auth_type=simple), or kerberos 
principal(auth_type=kerberos)
-   */
-  public static final String RANGER_USERNAME = "authorization.ranger.username";
-  /**
-   * Ranger admin web login user password(auth_type=simple), or path of the 
keytab
-   * file(auth_type=kerberos)
-   */
-  public static final String RANGER_PASSWORD = "authorization.ranger.password";
-  /** Ranger service name */
-  public static final String RANGER_SERVICE_NAME = 
"authorization.ranger.service.name";
-
-  public static final Map<String, PropertyEntry<?>> 
RANGER_AUTHORIZATION_PROPERTY_ENTRIES =
-      ImmutableMap.<String, PropertyEntry<?>>builder()
-          .put(
-              RANGER_SERVICE_NAME,
-              PropertyEntry.stringOptionalPropertyEntry(
-                  RANGER_SERVICE_NAME, "The Ranger service name", true, null, 
false))
-          .put(
-              RANGER_ADMIN_URL,
-              PropertyEntry.stringOptionalPropertyEntry(
-                  RANGER_ADMIN_URL, "The Ranger admin web URIs", true, null, 
false))
-          .put(
-              RANGER_AUTH_TYPE,
-              PropertyEntry.stringOptionalPropertyEntry(
-                  RANGER_AUTH_TYPE,
-                  "The Ranger admin web auth type (kerberos/simple)",
-                  true,
-                  "simple",
-                  false))
-          .put(
-              RANGER_USERNAME,
-              PropertyEntry.stringOptionalPropertyEntry(
-                  RANGER_USERNAME, "The Ranger admin web login username", 
true, null, false))
-          .put(
-              RANGER_PASSWORD,
-              PropertyEntry.stringOptionalPropertyEntry(
-                  RANGER_PASSWORD, "The Ranger admin web login password", 
true, null, false))
-          .build();
-}
diff --git a/docs/security/authorization-pushdown.md 
b/docs/security/authorization-pushdown.md
index 43c1096bd..fe42a0955 100644
--- a/docs/security/authorization-pushdown.md
+++ b/docs/security/authorization-pushdown.md
@@ -24,6 +24,7 @@ In order to use the Ranger Hadoop SQL Plugin, you need to 
configure the followin
 | `authorization.ranger.auth.type`    | The Apache Ranger authentication type 
`simple` or `kerberos`.                                                         
                               | `simple`      | No       | 0.6.0-incubating |
 | `authorization.ranger.username`     | The Apache Ranger admin web login 
username (auth type=simple), or kerberos principal(auth type=kerberos), Need 
have Ranger administrator permission. | (none)        | No       | 
0.6.0-incubating |
 | `authorization.ranger.password`     | The Apache Ranger admin web login user 
password (auth type=simple), or path of the keytab file(auth type=kerberos)     
                              | (none)        | No       | 0.6.0-incubating |
+| `authorization.ranger.service.type` | The Apache Ranger service type.        
                                                                                
                              | (none)        | No       | 0.8.0-incubating |
 | `authorization.ranger.service.name` | The Apache Ranger service name.        
                                                                                
                              | (none)        | No       | 0.6.0-incubating |
 
 Once you have used the correct configuration, you can perform authorization 
operations by calling Gravitino [authorization RESTful 
API](https://gravitino.apache.org/docs/latest/api/rest/grant-roles-to-a-user).
@@ -46,6 +47,7 @@ authorization.ranger.admin.url=172.0.0.100:6080
 authorization.ranger.auth.type=simple
 authorization.ranger.username=Jack
 authorization.ranger.password=PWD123
+authorization.ranger.service.type=HadoopSQL
 authorization.ranger.service.name=hiveRepo
 ```
 

Reply via email to