Increase the client-side Phoenix timeout (phoenix.query.timeoutMs) and the server-side HBase timeout (hbase.regionserver.lease.period).
Thanks, James On Fri, Jun 20, 2014 at 6:30 PM, Andrew <a...@starfishzone.com> wrote: > Using Phoenix 4 & the bundled SqlLine client I am attempting the following > long-running command, where the source table has > 300M rows split of 3 > region servers. I expect this take a long time; how can I avoid the Phoenix > timeout? > > (SqlLine is executing on one of my 3 HBase boxes.) > > UPSERT INTO PP_USER (USER_ID, US.US_LOAD_STATE) > SELECT USER_ID, 'P' FROM PP_INTERACTION; > Error: (state=08000,code=101) > org.apache.phoenix.exception.PhoenixIOException > at > org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:101) > at > org.apache.phoenix.iterate.ParallelIterators.getIterators(ParallelIterators.java:289) > at > org.apache.phoenix.iterate.MergeSortResultIterator.getIterators(MergeSortResultIterator.java:48) > at > org.apache.phoenix.iterate.MergeSortResultIterator.minIterator(MergeSortResultIterator.java:63) > at > org.apache.phoenix.iterate.MergeSortResultIterator.next(MergeSortResultIterator.java:90) > at > org.apache.phoenix.compile.UpsertCompiler$2.execute(UpsertCompiler.java:607) > at > org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:226) > at > org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:919) > at sqlline.SqlLine$Commands.execute(SqlLine.java:3673) > at sqlline.SqlLine$Commands.sql(SqlLine.java:3584) > at sqlline.SqlLine.dispatch(SqlLine.java:821) > at sqlline.SqlLine.runCommands(SqlLine.java:1793) > at sqlline.SqlLine$Commands.run(SqlLine.java:4161) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at sqlline.SqlLine$ReflectiveCommandHandler.execute(SqlLine.java:2810) > at sqlline.SqlLine.dispatch(SqlLine.java:817) > at sqlline.SqlLine.initArgs(SqlLine.java:657) > at sqlline.SqlLine.begin(SqlLine.java:680) > at sqlline.SqlLine.mainWithInputRedirection(SqlLine.java:441) > at sqlline.SqlLine.main(SqlLine.java:424) > Caused by: java.util.concurrent.TimeoutException > at java.util.concurrent.FutureTask.get(FutureTask.java:201) > at > org.apache.phoenix.iterate.ParallelIterators.getIterators(ParallelIterators.java:283) > ... 21 more > Aborting command set because "force" is false and command failed: "UPSERT > INTO PP_USER (USER_ID, US.US_LOAD_STATE) > SELECT USER_ID, 'P' FROM PP_INTERACTION;" > Closing: org.apache.phoenix.jdbc.PhoenixConnection > sqlline version 1.1.2 > 14/06/20 16:07:57 WARN client.AsyncProcess: #29000, not sent: 190 > operations, > region=PP_USER,\x0B\x00\x00\x00\x00\x00\x00\x00\x00,1403278220195.4106dc6be97d99db53fbc7cb3bf2373e., > hostname=EC2-EUW1-MARKETINGCLOUD-DEV-HADOOP02.host.com,60020,1403004778862, > seqNum=1 > java.lang.InterruptedException: sleep interrupted > at java.lang.Thread.sleep(Native Method) > at > org.apache.hadoop.hbase.client.AsyncProcess.logAndResubmit(AsyncProcess.java:700) > at > org.apache.hadoop.hbase.client.AsyncProcess.receiveGlobalFailure(AsyncProcess.java:654) > at > org.apache.hadoop.hbase.client.AsyncProcess.access$100(AsyncProcess.java:90) > at > org.apache.hadoop.hbase.client.AsyncProcess$1.run(AsyncProcess.java:538) > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:744) > 14/06/20 16:07:57 WARN client.ScannerCallable: Ignore, probably already > closed > org.apache.hadoop.hbase.ipc.StoppedRpcClientException > at > org.apache.hadoop.hbase.ipc.RpcClient.getConnection(RpcClient.java:1516) > at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1435) > at > org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1654) > at > org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1712) > at > org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:29900) > at > org.apache.hadoop.hbase.client.ScannerCallable.close(ScannerCallable.java:285) > at > org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:153) > at > org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:57) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90) > at > org.apache.hadoop.hbase.client.ClientScanner.close(ClientScanner.java:431) > at > org.apache.phoenix.iterate.ScanningResultIterator.close(ScanningResultIterator.java:41) > at > org.apache.phoenix.iterate.TableResultIterator.close(TableResultIterator.java:64) > at > org.apache.phoenix.compile.UpsertCompiler.upsertSelect(UpsertCompiler.java:168) > at > org.apache.phoenix.compile.UpsertCompiler.access$000(UpsertCompiler.java:93) > at > org.apache.phoenix.compile.UpsertCompiler$UpsertingParallelIteratorFactory.mutate(UpsertCompiler.java:188) > at > org.apache.phoenix.compile.MutatingParallelIteratorFactory.newIterator(MutatingParallelIteratorFactory.java:63) > at > org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:255) > at > org.apache.phoenix.iterate.ParallelIterators$2.call(ParallelIterators.java:245) > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:744) > > plus 2 further similar stacks, probably from the 2 other region servers. > > As an aside I tied the below with a DISTINCT, since this is really what I > want - but I figured that this would keep the HBase servers busier without > communication back to Phoenix. It fails in a similar way. > UPSERT INTO PP_USER (USER_ID, US.US_LOAD_STATE) > SELECT DISTINCT USER_ID, 'P' FROM PP_INTERACTION; > > Andrew.