[ https://issues.apache.org/jira/browse/HIVE-13723?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15377138#comment-15377138 ]
Hive QA commented on HIVE-13723: -------------------------------- Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12817800/HIVE-13723.4.patch.txt {color:green}SUCCESS:{color} +1 due to 2 test(s) being added or modified. {color:red}ERROR:{color} -1 due to 10 failed/errored test(s), 10320 tests executed *Failed tests:* {noformat} org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_acid_globallimit org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_list_bucket_dml_12 org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_stats_list_bucket org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_subquery_multiinsert org.apache.hadoop.hive.cli.TestMiniTezCliDriver.testCliDriver_acid_globallimit org.apache.hadoop.hive.cli.TestMinimrCliDriver.org.apache.hadoop.hive.cli.TestMinimrCliDriver org.apache.hadoop.hive.llap.daemon.impl.TestLlapTokenChecker.testCheckPermissions org.apache.hadoop.hive.llap.daemon.impl.TestLlapTokenChecker.testGetToken org.apache.hadoop.hive.llap.tezplugins.TestLlapTaskSchedulerService.testDelayedLocalityNodeCommErrorImmediateAllocation org.apache.hive.jdbc.TestJdbcWithMiniHS2.testAddJarConstructorUnCaching {noformat} Test results: https://builds.apache.org/job/PreCommit-HIVE-MASTER-Build/510/testReport Console output: https://builds.apache.org/job/PreCommit-HIVE-MASTER-Build/510/console Test logs: http://ec2-204-236-174-241.us-west-1.compute.amazonaws.com/logs/PreCommit-HIVE-MASTER-Build-510/ Messages: {noformat} Executing org.apache.hive.ptest.execution.TestCheckPhase Executing org.apache.hive.ptest.execution.PrepPhase Executing org.apache.hive.ptest.execution.ExecutionPhase Executing org.apache.hive.ptest.execution.ReportingPhase Tests exited with: TestsFailedException: 10 tests failed {noformat} This message is automatically generated. ATTACHMENT ID: 12817800 - PreCommit-HIVE-MASTER-Build > Executing join query on type Float using Thrift Serde will result in Float > cast to Double error > ----------------------------------------------------------------------------------------------- > > Key: HIVE-13723 > URL: https://issues.apache.org/jira/browse/HIVE-13723 > Project: Hive > Issue Type: Sub-task > Components: HiveServer2, JDBC, Serializers/Deserializers > Affects Versions: 2.1.0 > Reporter: Ziyang Zhao > Assignee: Ziyang Zhao > Priority: Critical > Attachments: HIVE-13723.4.patch.txt > > > After enable thrift Serde, execute the following queries in beeline, > >create table test1 (a int); > >create table test2 (b float); > >insert into test1 values (1); > >insert into test2 values (1); > >select * from test1 join test2 on test1.a=test2.b; > this will give the error: > java.lang.Exception: java.lang.RuntimeException: > org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while > processing row {"b":1.0} > at > org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522) > [hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > Caused by: java.lang.RuntimeException: > org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while > processing row {"b":1.0} > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:168) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime > Error while processing row {"b":1.0} > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:568) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:159) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Unexpected > exception from MapJoinOperator : > org.apache.hadoop.hive.serde2.SerDeException: java.lang.ClassCastException: > java.lang.Float cannot be cast to java.lang.Double > at > org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:454) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:126) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:130) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:164) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:558) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:159) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: > org.apache.hadoop.hive.serde2.SerDeException: java.lang.ClassCastException: > java.lang.Float cannot be cast to java.lang.Double > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:796) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.internalForward(CommonJoinOperator.java:647) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genAllOneUniqueJoinObject(CommonJoinOperator.java:679) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:757) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:441) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:126) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:130) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:164) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:558) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:159) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] > Caused by: org.apache.hadoop.hive.serde2.SerDeException: > java.lang.ClassCastException: java.lang.Float cannot be cast to > java.lang.Double > at > org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe.serialize(ThriftJDBCBinarySerDe.java:150) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:721) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.internalForward(CommonJoinOperator.java:647) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genAllOneUniqueJoinObject(CommonJoinOperator.java:679) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:757) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:441) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:126) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:130) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:164) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:558) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:159) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] > Caused by: java.lang.ClassCastException: java.lang.Float cannot be cast to > java.lang.Double > at > org.apache.hadoop.hive.serde2.thrift.ColumnBuffer.addValue(ColumnBuffer.java:372) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.serde2.thrift.ColumnBuffer.addValue(ColumnBuffer.java:341) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe.serialize(ThriftJDBCBinarySerDe.java:147) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:721) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.internalForward(CommonJoinOperator.java:647) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.genAllOneUniqueJoinObject(CommonJoinOperator.java:679) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.CommonJoinOperator.checkAndGenObject(CommonJoinOperator.java:757) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapJoinOperator.process(MapJoinOperator.java:441) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:126) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:130) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:164) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:558) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:159) > ~[hive-exec-2.1.0-SNAPSHOT.jar:2.1.0-SNAPSHOT] > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > ~[hadoop-mapreduce-client-core-2.7.1.2.4.0.0-169.jar:?] > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > ~[hadoop-mapreduce-client-common-2.7.1.2.4.0.0-169.jar:?] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > ~[?:1.7.0_95] > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > ~[?:1.7.0_95] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > ~[?:1.7.0_95] > at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_95] -- This message was sent by Atlassian JIRA (v6.3.4#6332)