[ 
https://issues.apache.org/jira/browse/HIVE-25214?focusedWorklogId=621043&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-621043
 ]

ASF GitHub Bot logged work on HIVE-25214:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 09/Jul/21 15:52
            Start Date: 09/Jul/21 15:52
    Worklog Time Spent: 10m 
      Work Description: nrg4878 commented on a change in pull request #2384:
URL: https://github.com/apache/hive/pull/2384#discussion_r667035410



##########
File path: 
standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
##########
@@ -148,15 +148,21 @@ ALTER TABLE COMPLETED_COMPACTIONS ADD 
CC_INITIATOR_VERSION varchar(128);
 ALTER TABLE COMPLETED_COMPACTIONS ADD CC_WORKER_VERSION varchar(128);
 
 -- HIVE-24396
-CREATE TABLE "APP"."DATACONNECTORS" ("DC_NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
-CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("DC_NAME" VARCHAR(128) NOT NULL, 
"PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" 
VARCHAR(256));
+CREATE TABLE "APP"."DATACONNECTORS" ("NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);

Review comment:
       good catch. Thanks for fixing this.

##########
File path: 
standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
##########
@@ -148,15 +148,21 @@ ALTER TABLE COMPLETED_COMPACTIONS ADD 
CC_INITIATOR_VERSION varchar(128);
 ALTER TABLE COMPLETED_COMPACTIONS ADD CC_WORKER_VERSION varchar(128);
 
 -- HIVE-24396
-CREATE TABLE "APP"."DATACONNECTORS" ("DC_NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
-CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("DC_NAME" VARCHAR(128) NOT NULL, 
"PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" 
VARCHAR(256));
+CREATE TABLE "APP"."DATACONNECTORS" ("NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
+CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("NAME" VARCHAR(128) NOT NULL, 
"PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" 
VARCHAR(256));
 ALTER TABLE "APP"."DBS" ADD COLUMN "TYPE" VARCHAR(32) DEFAULT 'NATIVE' NOT 
NULL;
 ALTER TABLE "APP"."DBS" ADD COLUMN "DATACONNECTOR_NAME" VARCHAR(128);
 ALTER TABLE "APP"."DBS" ADD COLUMN "REMOTE_DBNAME" VARCHAR(128);
 UPDATE "APP"."DBS" SET TYPE='NATIVE' WHERE TYPE IS NULL;
-ALTER TABLE "APP"."DATACONNECTORS" ADD CONSTRAINT "DATACONNECTORS_KEY_PK" 
PRIMARY KEY ("DC_NAME");
-ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT 
"DATACONNECTOR_PARAMS_KEY_PK" PRIMARY KEY ("DC_NAME", "PARAM_KEY");
-ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT "DC_NAME_FK1" FOREIGN 
KEY ("DC_NAME") REFERENCES "APP"."DATACONNECTORS" ("DC_NAME") ON DELETE NO 
ACTION ON UPDATE NO ACTION;
+ALTER TABLE "APP"."DATACONNECTORS" ADD CONSTRAINT "DATACONNECTORS_KEY_PK" 
PRIMARY KEY ("NAME");
+ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT 
"DATACONNECTOR_PARAMS_KEY_PK" PRIMARY KEY ("NAME", "PARAM_KEY");
+ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT "NAME_FK1" FOREIGN KEY 
("NAME") REFERENCES "APP"."DATACONNECTORS" ("NAME") ON DELETE NO ACTION ON 
UPDATE NO ACTION;
+
+CREATE TABLE "APP"."DC_PRIVS" ("DC_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "DC_NAME" VARCHAR(128), "GRANT_OPTION" SMALLINT NOT NULL, 
"GRANTOR" VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" 
VARCHAR(128), "PRINCIPAL_TYPE" VARCHAR(128), "DC_PRIV" VARCHAR(128), 
"AUTHORIZER" VARCHAR(128));

Review comment:
       so here we are using DC_NAME whereas we used NAME in the DATACONNECTOR 
and DATACONNECTOR_PARAMS table. Should we make it consistent so it is not 
confusing?  

##########
File path: 
ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
##########
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
+import org.apache.hadoop.hive.ql.metadata.Hive;

Review comment:
       Is this import necessary ?

##########
File path: 
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
##########
@@ -6496,6 +6497,88 @@ public PrincipalPrivilegeSet getDBPrivilegeSet(String 
catName, String dbName,
     return ret;
   }
 
+  private List<PrivilegeGrantInfo> getConnectorPrivilege(String catName, 
String connectorName,
+     String principalName, PrincipalType principalType) {
+
+    // normalize string name
+    catName = normalizeIdentifier(catName);
+    connectorName = normalizeIdentifier(connectorName);
+
+    if (principalName != null) {
+      // get all data connector granted privilege
+      List<MDCPrivilege> userNameDcPriv = this.listPrincipalMDCGrants(
+              principalName, principalType, catName, connectorName);
+
+      // populate and return grantInfos
+      if (CollectionUtils.isNotEmpty(userNameDcPriv)) {
+        List<PrivilegeGrantInfo> grantInfos = new ArrayList<>(
+                userNameDcPriv.size());
+        for (int i = 0; i < userNameDcPriv.size(); i++) {
+          MDCPrivilege item = userNameDcPriv.get(i);
+          grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
+                  .getCreateTime(), item.getGrantor(), 
getPrincipalTypeFromStr(item
+                  .getGrantorType()), item.getGrantOption()));
+        }
+        return grantInfos;
+      }
+    }
+
+    // return empty list if no principalName
+    return Collections.emptyList();
+  }
+
+  @Override
+  public PrincipalPrivilegeSet getConnectorPrivilegeSet (String catName, 
String connectorName,
+      String userName, List<String> groupNames)  throws InvalidObjectException,
+      MetaException {
+
+    boolean commited = false;
+    catName = normalizeIdentifier(catName);
+    connectorName = normalizeIdentifier(connectorName);
+
+    PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
+    try {
+      openTransaction();
+
+      // get user privileges
+      if (userName != null) {
+        Map<String, List<PrivilegeGrantInfo>> connectorUserPriv = new 
HashMap<>();
+        connectorUserPriv.put(userName, getConnectorPrivilege(catName, 
connectorName, userName,
+                PrincipalType.USER));
+        ret.setUserPrivileges(connectorUserPriv);
+      }
+
+      // get group privileges
+      if (CollectionUtils.isNotEmpty(groupNames)) {
+        Map<String, List<PrivilegeGrantInfo>> dbGroupPriv = new HashMap<>();
+        for (String groupName : groupNames) {
+          dbGroupPriv.put(groupName, getConnectorPrivilege(catName, 
connectorName, groupName,
+                  PrincipalType.GROUP));
+        }
+        ret.setGroupPrivileges(dbGroupPriv);
+      }
+
+      // get role privileges
+      Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
+      if (CollectionUtils.isNotEmpty(roleNames)) {
+        Map<String, List<PrivilegeGrantInfo>> dbRolePriv = new HashMap<>();
+        for (String roleName : roleNames) {
+          dbRolePriv

Review comment:
       nit:re-indent these 2 lines of code if possible.

##########
File path: 
ql/src/test/results/clientnegative/dataconnector_authfail_create.q.out
##########
@@ -0,0 +1 @@
+FAILED: HiveAccessControlException Permission denied: Principal 
[name=hive_test_user, type=USER] does not have following privileges for 
operation CREATEDATACONNECTOR [[ADMIN PRIVILEGE] on Object [type=DATACONNECTOR, 
name=mysql_auth]]

Review comment:
       This test output seems too short. I would have expected this to contain 
the PREHOOK output that prints out the query. But maybe the PREHOOK is run 
after the authorization. if this is the case, then it makes sense. Just asking 
you to double check.

##########
File path: 
standalone-metastore/metastore-server/src/main/sql/derby/hive-schema-4.0.0.derby.sql
##########
@@ -48,6 +48,8 @@ CREATE TABLE "APP"."PARTITION_KEY_VALS" ("PART_ID" BIGINT NOT 
NULL, "PART_KEY_VA
 
 CREATE TABLE "APP"."DB_PRIVS" ("DB_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "DB_ID" BIGINT, "GRANT_OPTION" SMALLINT NOT NULL, "GRANTOR" 
VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" VARCHAR(128), 
"PRINCIPAL_TYPE" VARCHAR(128), "DB_PRIV" VARCHAR(128), "AUTHORIZER" 
VARCHAR(128));
 
+CREATE TABLE "APP"."DC_PRIVS" ("DC_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "DC_NAME" VARCHAR(128), "GRANT_OPTION" SMALLINT NOT NULL, 
"GRANTOR" VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" 
VARCHAR(128), "PRINCIPAL_TYPE" VARCHAR(128), "DC_PRIV" VARCHAR(128), 
"AUTHORIZER" VARCHAR(128));

Review comment:
       NAME vs DC_NAME

##########
File path: 
standalone-metastore/metastore-server/src/main/sql/postgres/hive-schema-4.0.0.postgres.sql
##########
@@ -108,6 +108,24 @@ CREATE TABLE "DB_PRIVS" (
 );
 
 
+--
+-- Name: DC_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace:
+--
+
+CREATE TABLE "DC_PRIVS" (

Review comment:
       nit: re-indent to match other tables please.

##########
File path: 
standalone-metastore/metastore-server/src/main/sql/derby/upgrade-3.2.0-to-4.0.0.derby.sql
##########
@@ -148,15 +148,21 @@ ALTER TABLE COMPLETED_COMPACTIONS ADD 
CC_INITIATOR_VERSION varchar(128);
 ALTER TABLE COMPLETED_COMPACTIONS ADD CC_WORKER_VERSION varchar(128);
 
 -- HIVE-24396
-CREATE TABLE "APP"."DATACONNECTORS" ("DC_NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
-CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("DC_NAME" VARCHAR(128) NOT NULL, 
"PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" 
VARCHAR(256));
+CREATE TABLE "APP"."DATACONNECTORS" ("NAME" VARCHAR(128) NOT NULL, "TYPE" 
VARCHAR(128) NOT NULL, "COMMENT" VARCHAR(256), "OWNER_NAME" VARCHAR(256), 
"OWNER_TYPE" VARCHAR(10), "CREATE_TIME" INTEGER NOT NULL);
+CREATE TABLE "APP"."DATACONNECTOR_PARAMS" ("NAME" VARCHAR(128) NOT NULL, 
"PARAM_KEY" VARCHAR(180) NOT NULL, "PARAM_VALUE" VARCHAR(4000), "COMMENT" 
VARCHAR(256));
 ALTER TABLE "APP"."DBS" ADD COLUMN "TYPE" VARCHAR(32) DEFAULT 'NATIVE' NOT 
NULL;
 ALTER TABLE "APP"."DBS" ADD COLUMN "DATACONNECTOR_NAME" VARCHAR(128);
 ALTER TABLE "APP"."DBS" ADD COLUMN "REMOTE_DBNAME" VARCHAR(128);
 UPDATE "APP"."DBS" SET TYPE='NATIVE' WHERE TYPE IS NULL;
-ALTER TABLE "APP"."DATACONNECTORS" ADD CONSTRAINT "DATACONNECTORS_KEY_PK" 
PRIMARY KEY ("DC_NAME");
-ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT 
"DATACONNECTOR_PARAMS_KEY_PK" PRIMARY KEY ("DC_NAME", "PARAM_KEY");
-ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT "DC_NAME_FK1" FOREIGN 
KEY ("DC_NAME") REFERENCES "APP"."DATACONNECTORS" ("DC_NAME") ON DELETE NO 
ACTION ON UPDATE NO ACTION;
+ALTER TABLE "APP"."DATACONNECTORS" ADD CONSTRAINT "DATACONNECTORS_KEY_PK" 
PRIMARY KEY ("NAME");
+ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT 
"DATACONNECTOR_PARAMS_KEY_PK" PRIMARY KEY ("NAME", "PARAM_KEY");
+ALTER TABLE "APP"."DATACONNECTOR_PARAMS" ADD CONSTRAINT "NAME_FK1" FOREIGN KEY 
("NAME") REFERENCES "APP"."DATACONNECTORS" ("NAME") ON DELETE NO ACTION ON 
UPDATE NO ACTION;
+
+CREATE TABLE "APP"."DC_PRIVS" ("DC_GRANT_ID" BIGINT NOT NULL, "CREATE_TIME" 
INTEGER NOT NULL, "DC_NAME" VARCHAR(128), "GRANT_OPTION" SMALLINT NOT NULL, 
"GRANTOR" VARCHAR(128), "GRANTOR_TYPE" VARCHAR(128), "PRINCIPAL_NAME" 
VARCHAR(128), "PRINCIPAL_TYPE" VARCHAR(128), "DC_PRIV" VARCHAR(128), 
"AUTHORIZER" VARCHAR(128));
+ALTER TABLE "APP"."DC_PRIVS" ADD CONSTRAINT "DC_PRIVS_PK" PRIMARY KEY 
("DC_GRANT_ID");
+ALTER TABLE "APP"."DC_PRIVS" ADD CONSTRAINT "DC_PRIVS_FK1" FOREIGN KEY 
("DC_NAME") REFERENCES "APP"."DATACONNECTORS" ("NAME") ON DELETE NO ACTION ON 
UPDATE NO ACTION;

Review comment:
       if we are changing the name, it changes this FK reference as well.

##########
File path: standalone-metastore/metastore-server/src/main/resources/package.jdo
##########
@@ -1655,6 +1655,49 @@
           </value>
        </field>
     </class>
+    <class name="MDCPrivilege" table="DC_PRIVS" identity-type="datastore" 
detachable="true">
+          <index name="DCPrivilegeIndex" unique="true">
+            <column name="AUTHORIZER"/>
+            <column name="DC_NAME"/>
+            <column name="PRINCIPAL_NAME"/>
+            <column name="PRINCIPAL_TYPE"/>
+            <column name="DC_PRIV"/>
+            <column name="GRANTOR"/>
+            <column name="GRANTOR_TYPE"/>
+          </index>
+
+          <datastore-identity>
+            <column name="DC_GRANT_ID"/>
+          </datastore-identity>
+
+          <field name="principalName">
+            <column name="PRINCIPAL_NAME" length="128" jdbc-type="VARCHAR"/>
+          </field>
+          <field name="principalType">
+            <column name="PRINCIPAL_TYPE" length="128" jdbc-type="VARCHAR"/>
+          </field>
+          <field name="dataConnector">

Review comment:
       so MDCPrivilege.dataConnector is of type MDataConnector whereas this 
mapping indicates it is a string field. Shouldnt these types match? Am I 
missing something here? 

##########
File path: 
standalone-metastore/metastore-server/src/main/sql/postgres/upgrade-3.2.0-to-4.0.0.postgres.sql
##########
@@ -303,6 +303,31 @@ ALTER TABLE "DBS" ADD "DATACONNECTOR_NAME" character 
varying(128);
 ALTER TABLE "DBS" ADD "REMOTE_DBNAME" character varying(128);
 UPDATE "DBS" SET "TYPE"= 'NATIVE' WHERE "TYPE" IS NULL;
 
+
+CREATE TABLE "DC_PRIVS" (
+                            "DC_GRANT_ID" bigint NOT NULL,

Review comment:
       nit: re-indent to match other tables please

##########
File path: 
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/model/MDCPrivilege.java
##########
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.model;
+
+public class MDCPrivilege {
+    private String principalName;
+
+    private String principalType;
+
+    private MDataConnector dataConnector;

Review comment:
       should the type match the mapping in package.jdo?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 621043)
    Time Spent: 20m  (was: 10m)

> Add hive authorization support for Data connectors.
> ---------------------------------------------------
>
>                 Key: HIVE-25214
>                 URL: https://issues.apache.org/jira/browse/HIVE-25214
>             Project: Hive
>          Issue Type: Sub-task
>            Reporter: Naveen Gangam
>            Assignee: Dantong Dong
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 20m
>  Remaining Estimate: 0h
>
> We need to add authorization support for data connectors in hive. The default 
> behavior should be
> 1) Connectors can be create/dropped by users in admin role.
> 2) Connectors have READ and WRITE permissions.
> *   READ permissions are required to fetch a connector object or fetch all 
> connector names. So to create a REMOTE database using a connector, users will 
> need READ permission on the connector. DDL queries like "show connectors" and 
> "describe <connector>" will check for read access on the connector as well.
> *   WRITE permissions are required to alter/drop a connector. DDL queries 
> like "alter connector" and "drop connector" will need WRITE access on the 
> connector.
> Adding this support, Ranger can integrate with this.
>    



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to