[ 
https://issues.apache.org/jira/browse/HIVE-25090?focusedWorklogId=599924&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-599924
 ]

ASF GitHub Bot logged work on HIVE-25090:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 20/May/21 17:13
            Start Date: 20/May/21 17:13
    Worklog Time Spent: 10m 
      Work Description: soumyakanti3578 commented on a change in pull request 
#2302:
URL: https://github.com/apache/hive/pull/2302#discussion_r636302618



##########
File path: ql/src/test/results/clientpositive/llap/subquery_corr_join.q.out
##########
@@ -0,0 +1,212 @@
+PREHOOK: query: create table alltypestiny(
+id int,
+int_col int,
+bigint_col bigint,
+bool_col boolean
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@alltypestiny
+POSTHOOK: query: create table alltypestiny(
+id int,
+int_col int,
+bigint_col bigint,
+bool_col boolean
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@alltypestiny
+PREHOOK: query: insert into alltypestiny(id, int_col, bigint_col, bool_col) 
values
+(1, 1, 10, true),
+(2, 4, 5, false),
+(3, 5, 15, true),
+(10, 10, 30, false)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypestiny
+POSTHOOK: query: insert into alltypestiny(id, int_col, bigint_col, bool_col) 
values
+(1, 1, 10, true),
+(2, 4, 5, false),
+(3, 5, 15, true),
+(10, 10, 30, false)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypestiny
+POSTHOOK: Lineage: alltypestiny.bigint_col SCRIPT []
+POSTHOOK: Lineage: alltypestiny.bool_col SCRIPT []
+POSTHOOK: Lineage: alltypestiny.id SCRIPT []
+POSTHOOK: Lineage: alltypestiny.int_col SCRIPT []
+PREHOOK: query: create table alltypesagg(
+id int,
+int_col int,
+bool_col boolean
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@alltypesagg
+POSTHOOK: query: create table alltypesagg(
+id int,
+int_col int,
+bool_col boolean
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@alltypesagg
+PREHOOK: query: insert into alltypesagg(id, int_col, bool_col) values
+(1, 1, true),
+(2, 4, false),
+(5, 6, true),
+(null, null, false)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@alltypesagg
+POSTHOOK: query: insert into alltypesagg(id, int_col, bool_col) values
+(1, 1, true),
+(2, 4, false),
+(5, 6, true),
+(null, null, false)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@alltypesagg
+POSTHOOK: Lineage: alltypesagg.bool_col SCRIPT []
+POSTHOOK: Lineage: alltypesagg.id SCRIPT []
+POSTHOOK: Lineage: alltypesagg.int_col SCRIPT []
+Warning: Shuffle Join MERGEJOIN[64][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in 
Stage 'Reducer 3' is a cross product
+PREHOOK: query: explain cbo select *
+from alltypesagg t1
+where t1.id not in
+    (select tt1.id
+     from alltypestiny tt1 left JOIN alltypesagg tt2
+     on tt1.int_col = tt2.int_col)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesagg
+PREHOOK: Input: default@alltypestiny
+#### A masked pattern was here ####
+POSTHOOK: query: explain cbo select *
+from alltypesagg t1
+where t1.id not in
+    (select tt1.id
+     from alltypestiny tt1 left JOIN alltypesagg tt2
+     on tt1.int_col = tt2.int_col)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesagg
+POSTHOOK: Input: default@alltypestiny
+#### A masked pattern was here ####
+CBO PLAN:
+HiveProject(id=[$0], int_col=[$1], bool_col=[$2])
+  HiveFilter(condition=[OR(=($3, 0), AND(IS NULL($6), >=($4, $3), IS NOT 
NULL($0)))])
+    HiveProject(id=[$0], int_col=[$1], bool_col=[$2], c=[$5], ck=[$6], 
id0=[$3], literalTrue=[$4])
+      HiveJoin(condition=[true], joinType=[inner], algorithm=[none], cost=[not 
available])
+        HiveJoin(condition=[=($0, $3)], joinType=[left], algorithm=[none], 
cost=[not available])
+          HiveProject(id=[$0], int_col=[$1], bool_col=[$2])
+            HiveTableScan(table=[[default, alltypesagg]], table:alias=[t1])
+          HiveProject(id=[$0], literalTrue=[true])
+            HiveAggregate(group=[{0}])
+              HiveJoin(condition=[=($1, $2)], joinType=[left], 
algorithm=[none], cost=[not available])
+                HiveProject(id=[$0], int_col=[$1])
+                  HiveFilter(condition=[IS NOT NULL($0)])
+                    HiveTableScan(table=[[default, alltypestiny]], 
table:alias=[tt1])
+                HiveProject(int_col=[$1])
+                  HiveFilter(condition=[IS NOT NULL($1)])
+                    HiveTableScan(table=[[default, alltypesagg]], 
table:alias=[tt2])
+        HiveProject(c=[$0], ck=[$1])
+          HiveAggregate(group=[{}], c=[COUNT()], ck=[COUNT($0)])
+            HiveJoin(condition=[=($1, $2)], joinType=[left], algorithm=[none], 
cost=[not available])
+              HiveProject(id=[$0], int_col=[$1])
+                HiveTableScan(table=[[default, alltypestiny]], 
table:alias=[tt1])
+              HiveProject(int_col=[$1])
+                HiveFilter(condition=[IS NOT NULL($1)])
+                  HiveTableScan(table=[[default, alltypesagg]], 
table:alias=[tt2])
+
+Warning: Shuffle Join MERGEJOIN[64][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in 
Stage 'Reducer 3' is a cross product
+PREHOOK: query: select *
+from alltypesagg t1
+where t1.id not in
+    (select tt1.id
+     from alltypestiny tt1 left JOIN alltypesagg tt2
+     on tt1.int_col = tt2.int_col)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesagg
+PREHOOK: Input: default@alltypestiny
+#### A masked pattern was here ####
+POSTHOOK: query: select *
+from alltypesagg t1
+where t1.id not in
+    (select tt1.id
+     from alltypestiny tt1 left JOIN alltypesagg tt2
+     on tt1.int_col = tt2.int_col)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesagg
+POSTHOOK: Input: default@alltypestiny
+#### A masked pattern was here ####
+5      6       true

Review comment:
       This is correct.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 599924)
    Time Spent: 0.5h  (was: 20m)

> Join condition parsing error in subquery
> ----------------------------------------
>
>                 Key: HIVE-25090
>                 URL: https://issues.apache.org/jira/browse/HIVE-25090
>             Project: Hive
>          Issue Type: Bug
>          Components: Parser
>            Reporter: Soumyakanti Das
>            Assignee: Soumyakanti Das
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 0.5h
>  Remaining Estimate: 0h
>
>  
> The following query fails
> {code:java}
> select *
> from alltypesagg t1
> where t1.id not in
>     (select tt1.id
>      from alltypesagg tt1 LEFT JOIN alltypestiny tt2
>      on t1.int_col = tt2.int_col){code}
> Stack trace:
> {code:java}
>  
> org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException: 
> Line 8:8 Invalid table alias or column reference 't1': (possible column names 
> are: tt1.id, tt1.int_col, tt1.bool_col, tt2.id, tt2.int_col, tt2.bigint_col, 
> tt2.bool_col) 
> org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException: 
> Line 8:8 Invalid table alias or column reference 't1': (possible column names 
> are: tt1.id, tt1.int_col, tt1.bool_col, tt2.id, tt2.int_col, tt2.bigint_col, 
> tt2.bool_col) at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.genSubQueryRelNode(CalcitePlanner.java:3886)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.genFilterRelNode(CalcitePlanner.java:3899)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.genFilterLogicalPlan(CalcitePlanner.java:3927)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.genLogicalPlan(CalcitePlanner.java:5489)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.apply(CalcitePlanner.java:2018)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner$CalcitePlannerAction.apply(CalcitePlanner.java:1964)
>  at 
> org.apache.calcite.tools.Frameworks.lambda$withPlanner$0(Frameworks.java:130) 
> at 
> org.apache.calcite.prepare.CalcitePrepareImpl.perform(CalcitePrepareImpl.java:915)
>  at org.apache.calcite.tools.Frameworks.withPrepare(Frameworks.java:179) at 
> org.apache.calcite.tools.Frameworks.withPlanner(Frameworks.java:125) at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.logicalPlan(CalcitePlanner.java:1725)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.genOPTree(CalcitePlanner.java:565)
>  at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:12486)
>  at 
> org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:458)
>  at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:316)
>  at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:223) at 
> org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:104) at 
> org.apache.hadoop.hive.ql.Driver.compile(Driver.java:492) at 
> org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:445) at 
> org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:409) at 
> org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:403) at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:125)
>  at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:229) 
> at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258) 
> at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:203) at 
> org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:129) at 
> org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:424) at 
> org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:355) at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:744) 
> at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:714) at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:170)
>  at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157) at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>  at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>  at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
>  at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>  at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>  at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>  at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at 
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at 
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at 
> org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at 
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at 
> org.junit.runners.ParentRunner.run(ParentRunner.java:413) at 
> org.junit.runners.Suite.runChild(Suite.java:128) at 
> org.junit.runners.Suite.runChild(Suite.java:27) at 
> org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at 
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at 
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at 
> org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at 
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
>  at org.junit.rules.RunRules.evaluate(RunRules.java:20) at 
> org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at 
> org.junit.runners.ParentRunner.run(ParentRunner.java:413) at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138) 
> at org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465) 
> at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451)
> {code}
>  
>  
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to