This is an automated email from the ASF dual-hosted git repository.

liuxun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new b803a565b [#4922] improvement(test): Add the integration test for 
client withSimpleAuth(String) (#4924)
b803a565b is described below

commit b803a565b6af022a8cf7b7b8611d0e06a49b846d
Author: roryqi <ror...@apache.org>
AuthorDate: Wed Sep 18 14:59:35 2024 +0800

    [#4922] improvement(test): Add the integration test for client 
withSimpleAuth(String) (#4924)
    
    ### What changes were proposed in this pull request?
    
    Add the integration test for client withSimpleAuth(String)
    
    ### Why are the changes needed?
    
    Fix: #4922
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Add more test cases.
---
 .../hive/integration/test/ProxyCatalogHiveIT.java  | 101 +++++++++++++++++++++
 1 file changed, 101 insertions(+)

diff --git 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
index 73139a05d..24c3c2cf4 100644
--- 
a/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
+++ 
b/catalogs/catalog-hive/src/test/java/org/apache/gravitino/catalog/hive/integration/test/ProxyCatalogHiveIT.java
@@ -84,7 +84,11 @@ public class ProxyCatalogHiveIT extends AbstractIT {
   private static FileSystem hdfs;
   private static String originHadoopUser;
   private static GravitinoAdminClient anotherClient;
+  private static GravitinoAdminClient anotherClientWithUsername;
+  private static GravitinoAdminClient anotherClientWithNotExistingName;
   private static Catalog anotherCatalog;
+  private static Catalog anotherCatalogWithUsername;
+  private static Catalog anotherCatatlogWithNotExistingName;
 
   @BeforeAll
   public static void startIntegrationTest() throws Exception {
@@ -123,6 +127,10 @@ public class ProxyCatalogHiveIT extends AbstractIT {
     String uri = "http://"; + jettyServerConfig.getHost() + ":" + 
jettyServerConfig.getHttpPort();
     System.setProperty("user.name", "test");
     anotherClient = GravitinoAdminClient.builder(uri).withSimpleAuth().build();
+    anotherClientWithUsername =
+        GravitinoAdminClient.builder(uri).withSimpleAuth(EXPECT_USER).build();
+    anotherClientWithNotExistingName =
+        GravitinoAdminClient.builder(uri).withSimpleAuth("not-exist").build();
     createMetalake();
     createCatalog();
     loadCatalogWithAnotherClient();
@@ -132,6 +140,8 @@ public class ProxyCatalogHiveIT extends AbstractIT {
   public static void stop() {
     setEnv(HADOOP_USER_NAME, originHadoopUser);
     anotherClient.close();
+    anotherClientWithUsername.close();
+    anotherClientWithNotExistingName.close();
 
     AbstractIT.client = null;
   }
@@ -165,6 +175,30 @@ public class ProxyCatalogHiveIT extends AbstractIT {
             RuntimeException.class,
             () -> schemas.createSchema(anotherSchemaName, comment, 
properties));
     Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
+
+    // Test the client using `withSimpleAuth(expectUser)`.
+    anotherCatalogWithUsername.asSchemas().createSchema(anotherSchemaName, 
comment, properties);
+    db = hiveClientPool.run(client -> client.getDatabase(schemaName));
+    Assertions.assertEquals(EXPECT_USER, db.getOwnerName());
+    Assertions.assertEquals(
+        EXPECT_USER, hdfs.getFileStatus(new 
Path(db.getLocationUri())).getOwner());
+
+    // Test the client using `withSimpleAuth(unknownUser)`
+    properties.put(
+        "location",
+        String.format(
+            "hdfs://%s:%d/user/hive/warehouse/%s.db",
+            containerSuite.getHiveContainer().getContainerIpAddress(),
+            HiveContainer.HDFS_DEFAULTFS_PORT,
+            "new_schema"));
+    e =
+        Assertions.assertThrows(
+            RuntimeException.class,
+            () ->
+                anotherCatatlogWithNotExistingName
+                    .asSchemas()
+                    .createSchema("new_schema", comment, properties));
+    Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
   }
 
   @Test
@@ -203,6 +237,35 @@ public class ProxyCatalogHiveIT extends AbstractIT {
                   anotherNameIdentifier, columns, comment, of, 
Partitioning.EMPTY_PARTITIONING);
             });
     Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
+
+    // Test the client using `withSimpleAuth(String)`.
+    anotherCatalogWithUsername
+        .asTableCatalog()
+        .createTable(anotherNameIdentifier, columns, comment, of, 
Partitioning.EMPTY_PARTITIONING);
+    Table anotherCreatedTable =
+        anotherCatalogWithUsername.asTableCatalog().loadTable(nameIdentifier);
+    String anotherLocation = anotherCreatedTable.properties().get("location");
+    Assertions.assertEquals(EXPECT_USER, hdfs.getFileStatus(new 
Path(anotherLocation)).getOwner());
+    org.apache.hadoop.hive.metastore.api.Table anotherHiveTab =
+        hiveClientPool.run(client -> client.getTable(schemaName, 
anotherTableName));
+    Assertions.assertEquals(EXPECT_USER, anotherHiveTab.getOwner());
+
+    // Test the client using `withSimpleAuth(not-existing)`
+    NameIdentifier anotherIdentWithNotExisting = NameIdentifier.of(schemaName, 
"new_table");
+    e =
+        Assertions.assertThrows(
+            RuntimeException.class,
+            () -> {
+              anotherCatatlogWithNotExistingName
+                  .asTableCatalog()
+                  .createTable(
+                      anotherIdentWithNotExisting,
+                      columns,
+                      comment,
+                      of,
+                      Partitioning.EMPTY_PARTITIONING);
+            });
+    Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
   }
 
   private static void createSchema(String schemaName, String comment) {
@@ -281,6 +344,38 @@ public class ProxyCatalogHiveIT extends AbstractIT {
                     .supportPartitions()
                     .addPartition(anotherIdentity));
     Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
+
+    // Test the client using `withSimpleAuth(String)`.
+    Partition anotherPartition =
+        anotherCatalogWithUsername
+            .asTableCatalog()
+            .loadTable(nameIdentifier)
+            .supportPartitions()
+            .addPartition(anotherIdentity);
+    org.apache.hadoop.hive.metastore.api.Partition anotherPartitionGot =
+        hiveClientPool.run(
+            client -> client.getPartition(schemaName, tableName, 
anotherPartition.name()));
+
+    Assertions.assertEquals(
+        EXPECT_USER,
+        hdfs.getFileStatus(new 
Path(anotherPartitionGot.getSd().getLocation())).getOwner());
+
+    // Test the client using `withSimpleAuth(not-existing)`.
+    Literal<?> anotherNewSecondaryPartition = 
Literals.stringLiteral("gravitino_it_test4");
+    Partition anotherNewIdentity =
+        Partitions.identity(
+            new String[][] {field1, field2},
+            new Literal<?>[] {primaryPartition, anotherNewSecondaryPartition});
+    e =
+        Assertions.assertThrows(
+            RuntimeException.class,
+            () ->
+                anotherCatatlogWithNotExistingName
+                    .asTableCatalog()
+                    .loadTable(nameIdentifier)
+                    .supportPartitions()
+                    .addPartition(anotherNewIdentity));
+    Assertions.assertTrue(e.getMessage().contains("AccessControlException 
Permission denied"));
   }
 
   private Column[] createColumns() {
@@ -322,6 +417,12 @@ public class ProxyCatalogHiveIT extends AbstractIT {
   private static void loadCatalogWithAnotherClient() {
     GravitinoMetalake metaLake = anotherClient.loadMetalake(METALAKE_NAME);
     anotherCatalog = metaLake.loadCatalog(CATALOG_NAME);
+
+    anotherCatalogWithUsername =
+        
anotherClientWithUsername.loadMetalake(METALAKE_NAME).loadCatalog(CATALOG_NAME);
+
+    anotherCatatlogWithNotExistingName =
+        
anotherClientWithNotExistingName.loadMetalake(METALAKE_NAME).loadCatalog(CATALOG_NAME);
   }
 
   public static void setEnv(String key, String value) {

Reply via email to