[ 
https://issues.apache.org/jira/browse/HIVE-25243?focusedWorklogId=612624&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-612624
 ]

ASF GitHub Bot logged work on HIVE-25243:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 21/Jun/21 13:34
            Start Date: 21/Jun/21 13:34
    Worklog Time Spent: 10m 
      Work Description: maheshk114 commented on a change in pull request #2391:
URL: https://github.com/apache/hive/pull/2391#discussion_r655374537



##########
File path: 
itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMiniLlapVectorArrowWithLlapIODisabled.java
##########
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.llap.LlapArrowRowInputFormat;
+import org.apache.hadoop.hive.llap.Row;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.InputFormat;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * TestMiniLlapVectorArrowWithLlapIODisabled - turns off llap io while testing 
LLAP external client flow.
+ * The aim of turning off LLAP IO is -
+ * when we create table through this test, LLAP caches them and returns the 
same

Review comment:
       You mean, wen tested with turning on LLAP IO ?

##########
File path: 
itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestMiniLlapVectorArrowWithLlapIODisabled.java
##########
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.llap.LlapArrowRowInputFormat;
+import org.apache.hadoop.hive.llap.Row;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.InputFormat;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * TestMiniLlapVectorArrowWithLlapIODisabled - turns off llap io while testing 
LLAP external client flow.
+ * The aim of turning off LLAP IO is -
+ * when we create table through this test, LLAP caches them and returns the 
same
+ * when we do a read query, due to this we miss some code paths which may have 
been hit otherwise.
+ */
+public class TestMiniLlapVectorArrowWithLlapIODisabled extends 
BaseJdbcWithMiniLlap {
+
+  @BeforeClass
+  public static void beforeTest() throws Exception {
+    HiveConf conf = defaultConf();
+    conf.setBoolVar(ConfVars.LLAP_OUTPUT_FORMAT_ARROW, true);
+    conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_FILESINK_ARROW_NATIVE_ENABLED, 
true);
+    conf.set(ConfVars.LLAP_IO_ENABLED.varname, "false");
+    BaseJdbcWithMiniLlap.beforeTest(conf);
+  }
+
+  @Override
+  protected InputFormat<NullWritable, Row> getInputFormat() {
+    //For unit testing, no harm in hard-coding allocator ceiling to 
LONG.MAX_VALUE
+    return new LlapArrowRowInputFormat(Long.MAX_VALUE);
+  }
+
+  @Test
+  public void testNullsInStructFields() throws Exception {
+    createDataTypesTable("datatypes");
+    RowCollector2 rowCollector = new RowCollector2();
+    // c8 struct<r:string,s:int,t:double>
+    // c15 struct<r:int,s:struct<a:int,b:string>>
+    // c16 array<struct<m:map<string,string>,n:int>>
+    String query = "select c8, c15, c16 from datatypes";
+    int rowCount = processQuery(query, 1, rowCollector);
+    assertEquals(3, rowCount);
+  }
+
+  @Override
+  public void testDataTypes() throws Exception {
+    // the test should be exactly identical to TestJdbcWithMiniLlapVectorArrow
+    TestJdbcWithMiniLlapVectorArrow testJdbcWithMiniLlapVectorArrow = new 
TestJdbcWithMiniLlapVectorArrow();

Review comment:
       All the tests using this method are ignored. Better create a new data as 
per your need. 
   Add a test for nested struct like struct inside struct. And set the parent 
null. 

##########
File path: 
itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniLlapVectorArrow.java
##########
@@ -238,7 +238,10 @@ public void testDataTypes() throws Exception {
     assertEquals(Date.valueOf("2013-01-01"), rowValues[19]);
     assertEquals("abc123", rowValues[20]);
     assertEquals("abc123         ", rowValues[21]);
-    assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]);
+
+    // one of the above assertions already has assertEquals(null, 
rowValues[22])
+    // and below assertion fails with - java.lang.AssertionError: actual array 
was null
+    // assertArrayEquals("X'01FF'".getBytes("UTF-8"), (byte[]) rowValues[22]);

Review comment:
        assertEquals(null, rowValues[22] is done for second row. This is third 
row.
   // Third row    rowValues = rowCollector.rows.get(2);

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/io/arrow/Serializer.java
##########
@@ -347,6 +347,21 @@ private void writeStruct(NonNullableStructVector 
arrowVector, StructColumnVector
     final ColumnVector[] hiveFieldVectors = hiveVector == null ? null : 
hiveVector.fields;
     final int fieldSize = fieldTypeInfos.size();
 
+    // This is to handle following scenario -
+    // if any struct value itself is NULL, we get structVector.isNull[i]=true
+    // but we don't get the same for it's child fields which later causes 
exceptions while setting to arrow vectors
+    // see - https://issues.apache.org/jira/browse/HIVE-25243
+    if (hiveVector != null && hiveFieldVectors != null) {
+      for (int i = 0; i < size; i++) {
+        if (hiveVector.isNull[i]) {
+          for (ColumnVector fieldVector : hiveFieldVectors) {
+            fieldVector.isNull[i] = true;

Review comment:
       Shouldn't it set only specific filed vector to null. For every i, why 
its iterating over all the hiveFieldVectors.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 612624)
    Time Spent: 20m  (was: 10m)

> Llap external client - Handle nested values when the parent struct is null
> --------------------------------------------------------------------------
>
>                 Key: HIVE-25243
>                 URL: https://issues.apache.org/jira/browse/HIVE-25243
>             Project: Hive
>          Issue Type: Bug
>          Components: Serializers/Deserializers
>            Reporter: Shubham Chaurasia
>            Assignee: Shubham Chaurasia
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 20m
>  Remaining Estimate: 0h
>
> Consider the following table in text format - 
> {code}
> +-------------------------------+
> |              c8               |
> +-------------------------------+
> | NULL                          |
> | {"r":null,"s":null,"t":null}  |
> | {"r":"a","s":9,"t":2.2}       |
> +-------------------------------+
> {code}
> When we query above table via llap external client, it throws following 
> exception - 
> {code:java}
> Caused by: java.lang.NullPointerException: src
>         at io.netty.util.internal.ObjectUtil.checkNotNull(ObjectUtil.java:33)
>         at 
> io.netty.buffer.UnsafeByteBufUtil.setBytes(UnsafeByteBufUtil.java:537)
>         at 
> io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes(PooledUnsafeDirectByteBuf.java:199)
>         at io.netty.buffer.WrappedByteBuf.setBytes(WrappedByteBuf.java:486)
>         at 
> io.netty.buffer.UnsafeDirectLittleEndian.setBytes(UnsafeDirectLittleEndian.java:34)
>         at io.netty.buffer.ArrowBuf.setBytes(ArrowBuf.java:933)
>         at 
> org.apache.arrow.vector.BaseVariableWidthVector.setBytes(BaseVariableWidthVector.java:1191)
>         at 
> org.apache.arrow.vector.BaseVariableWidthVector.setSafe(BaseVariableWidthVector.java:1026)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.lambda$static$15(Serializer.java:834)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.writeGeneric(Serializer.java:777)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.writePrimitive(Serializer.java:581)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.write(Serializer.java:290)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.writeStruct(Serializer.java:359)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.write(Serializer.java:296)
>         at 
> org.apache.hadoop.hive.ql.io.arrow.Serializer.serializeBatch(Serializer.java:213)
>         at 
> org.apache.hadoop.hive.ql.exec.vector.filesink.VectorFileSinkArrowOperator.process(VectorFileSinkArrowOperator.java:135)
> {code}
> Created a test to repro it - 
> {code:java}
> /**
>  * TestMiniLlapVectorArrowWithLlapIODisabled - turns off llap io while 
> testing LLAP external client flow.
>  * The aim of turning off LLAP IO is -
>  * when we create table through this test, LLAP caches them and returns the 
> same
>  * when we do a read query, due to this we miss some code paths which may 
> have been hit otherwise.
>  */
> public class TestMiniLlapVectorArrowWithLlapIODisabled extends 
> BaseJdbcWithMiniLlap {
>   @BeforeClass
>   public static void beforeTest() throws Exception {
>     HiveConf conf = defaultConf();
>     conf.setBoolVar(ConfVars.LLAP_OUTPUT_FORMAT_ARROW, true);
>     
> conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_FILESINK_ARROW_NATIVE_ENABLED, 
> true);
>     conf.set(ConfVars.LLAP_IO_ENABLED.varname, "false");
>     BaseJdbcWithMiniLlap.beforeTest(conf);
>   }
>   @Override
>   protected InputFormat<NullWritable, Row> getInputFormat() {
>     //For unit testing, no harm in hard-coding allocator ceiling to 
> LONG.MAX_VALUE
>     return new LlapArrowRowInputFormat(Long.MAX_VALUE);
>   }
>   @Test
>   public void testNullsInStructFields() throws Exception {
>     createDataTypesTable("datatypes");
>     RowCollector2 rowCollector = new RowCollector2();
>     // c8 struct<r:string,s:int,t:double>
>     String query = "select c8 from datatypes";
>     int rowCount = processQuery(query, 1, rowCollector);
>     assertEquals(3, rowCount);
>   }
> }
> {code}
> Cause - As we see in the table above, first row of the table is NULL, and 
> correspondingly we get {{structVector.isNull[i]=true}} in arrow serializer 
> but we don't get {{isNull[i]=true}} for the fields of struct. And later the 
> code goes for setting such fields in arrow vector and we see above exception.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to