[ https://issues.apache.org/jira/browse/HIVE-8874?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Brock Noland updated HIVE-8874: ------------------------------- Resolution: Fixed Fix Version/s: 0.15.0 Status: Resolved (was: Patch Available) Thank you [~ychena]! I have committed your contribution to trunk! > Error Accessing HBase from Hive via Oozie on Kerberos 5.0.1 cluster > ------------------------------------------------------------------- > > Key: HIVE-8874 > URL: https://issues.apache.org/jira/browse/HIVE-8874 > Project: Hive > Issue Type: Bug > Components: HBase Handler > Reporter: Yongzhi Chen > Assignee: Yongzhi Chen > Fix For: 0.15.0 > > Attachments: HIVE-8874.1.patch > > > A Hive action workflow on a secure cluster, that does an INSERT INTO <regular > table> FROM <hbase table> as part of its script will reproduce the issue. And > it can be reproduced in Hive 0.13 cluster. > {noformat} > 10309 [main] ERROR org.apache.hadoop.hive.ql.Driver - FAILED: > SemanticException Error while configuring input job properties > org.apache.hadoop.hive.ql.parse.SemanticException: Error while configuring > input job properties > at > org.apache.hadoop.hive.ql.optimizer.SimpleFetchOptimizer.transform(SimpleFetchOptimizer.java:94) > at > org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:146) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:9261) > at > org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:206) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:434) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332) > at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:988) > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1053) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:924) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:914) > at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:269) > at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:221) > at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:431) > at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:367) > at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:464) > at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:474) > at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:756) > at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:694) > at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:633) > at org.apache.oozie.action.hadoop.HiveMain.runHive(HiveMain.java:323) > at org.apache.oozie.action.hadoop.HiveMain.run(HiveMain.java:284) > at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:39) > at org.apache.oozie.action.hadoop.HiveMain.main(HiveMain.java:66) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at > org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:227) > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54) > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:450) > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343) > at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:168) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:415) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614) > at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:163) > Caused by: java.lang.IllegalStateException: Error while configuring input > job properties > at > org.apache.hadoop.hive.hbase.HBaseStorageHandler.configureTableJobProperties(HBaseStorageHandler.java:343) > at > org.apache.hadoop.hive.hbase.HBaseStorageHandler.configureInputJobProperties(HBaseStorageHandler.java:279) > at > org.apache.hadoop.hive.ql.plan.PlanUtils.configureJobPropertiesForStorageHandler(PlanUtils.java:804) > at > org.apache.hadoop.hive.ql.plan.PlanUtils.configureInputJobPropertiesForStorageHandler(PlanUtils.java:774) > at > org.apache.hadoop.hive.ql.optimizer.SimpleFetchOptimizer$FetchData.convertToWork(SimpleFetchOptimizer.java:241) > at > org.apache.hadoop.hive.ql.optimizer.SimpleFetchOptimizer$FetchData.access$000(SimpleFetchOptimizer.java:207) > at > org.apache.hadoop.hive.ql.optimizer.SimpleFetchOptimizer.optimize(SimpleFetchOptimizer.java:112) > at > org.apache.hadoop.hive.ql.optimizer.SimpleFetchOptimizer.transform(SimpleFetchOptimizer.java:83) > ... 35 more > Caused by: org.apache.hadoop.hbase.security.AccessDeniedException: > org.apache.hadoop.hbase.security.AccessDeniedException: Token generation only > allowed for Kerberos authenticated clients > at > org.apache.hadoop.hbase.security.token.TokenProvider.getAuthenticationToken(TokenProvider.java:124) > at > org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService$1.getAuthenticationToken(AuthenticationProtos.java:4267) > at > org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService.callMethod(AuthenticationProtos.java:4387) > at > org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:5602) > at > org.apache.hadoop.hbase.regionserver.HRegionServer.execServiceOnRegion(HRegionServer.java:3416) > at > org.apache.hadoop.hbase.regionserver.HRegionServer.execService(HRegionServer.java:3398) > at > org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29591) > at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031) > at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108) > at > org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114) > at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94) > at java.lang.Thread.run(Thread.java:745) > > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) > at java.lang.reflect.Constructor.newInstance(Constructor.java:526) > at > org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106) > at > org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95) > at > org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:304) > at > org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1627) > at > org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:93) > at > org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90) > at > org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:96) > at > org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callBlockingMethod(CoprocessorRpcChannel.java:74) > at > org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService$BlockingStub.getAuthenticationToken(AuthenticationProtos.java:4512) > at > org.apache.hadoop.hbase.security.token.TokenUtil.obtainToken(TokenUtil.java:62) > at > org.apache.hadoop.hbase.security.token.TokenUtil$2.run(TokenUtil.java:138) > at > org.apache.hadoop.hbase.security.token.TokenUtil$2.run(TokenUtil.java:136) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:415) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614) > at > org.apache.hadoop.hbase.security.token.TokenUtil.obtainTokenForJob(TokenUtil.java:135) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at org.apache.hadoop.hbase.util.Methods.call(Methods.java:39) > at > org.apache.hadoop.hbase.security.User$SecureHadoopUser.obtainAuthTokenForJob(User.java:296) > at > org.apache.hadoop.hive.hbase.HBaseStorageHandler.addHBaseDelegationToken(HBaseStorageHandler.java:371) > at > org.apache.hadoop.hive.hbase.HBaseStorageHandler.configureTableJobProperties(HBaseStorageHandler.java:340) > ... 42 more > Caused by: > org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): > org.apache.hadoop.hbase.security.AccessDeniedException: Token generation > only allowed for Kerberos authenticated clients > at > org.apache.hadoop.hbase.security.token.TokenProvider.getAuthenticationToken(TokenProvider.java:124) > at > org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService$1.getAuthenticationToken(AuthenticationProtos.java:4267) > at > org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService.callMethod(AuthenticationProtos.java:4387) > at > org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:5602) > at > org.apache.hadoop.hbase.regionserver.HRegionServer.execServiceOnRegion(HRegionServer.java:3416) > at > org.apache.hadoop.hbase.regionserver.HRegionServer.execService(HRegionServer.java:3398) > at > org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29591) > at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2031) > at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108) > at > org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114) > at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94) > at java.lang.Thread.run(Thread.java:745) > > at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1457) > at > org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1661) > at > org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1719) > at > org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:30014) > at > org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1623) > ... 64 more > {noformat} -- This message was sent by Atlassian JIRA (v6.3.4#6332)