[ https://issues.apache.org/jira/browse/HIVE-27133?focusedWorklogId=851197&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-851197 ]
ASF GitHub Bot logged work on HIVE-27133: ----------------------------------------- Author: ASF GitHub Bot Created on: 15/Mar/23 19:04 Start Date: 15/Mar/23 19:04 Worklog Time Spent: 10m Work Description: vamshikolanu commented on PR #4110: URL: https://github.com/apache/hive/pull/4110#issuecomment-1470623258 I agree with your comments @jfsii @SourabhBadhya. I'm going to update to add support for bigint in limit clause and close this pr. Issue Time Tracking ------------------- Worklog Id: (was: 851197) Time Spent: 1h 10m (was: 1h) > Round off limit value greater than int_max to int_max; > ------------------------------------------------------ > > Key: HIVE-27133 > URL: https://issues.apache.org/jira/browse/HIVE-27133 > Project: Hive > Issue Type: Task > Reporter: vamshi kolanu > Assignee: vamshi kolanu > Priority: Major > Labels: pull-request-available > Time Spent: 1h 10m > Remaining Estimate: 0h > > Currently when the limit has a bigint value, it fails with the following > error. As part of this task, we are going to round off any value greater than > int_max to int_max. > select string_col from alltypes order by 1 limit 9223372036854775807 > > java.lang.NumberFormatException: For input string: "9223372036854775807" > at > java.lang.NumberFormatException.forInputString(NumberFormatException.java:65) > at java.lang.Integer.parseInt(Integer.java:583) > at java.lang.Integer.<init>(Integer.java:867) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.doPhase1(SemanticAnalyzer.java:1803) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.doPhase1(SemanticAnalyzer.java:1911) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.doPhase1(SemanticAnalyzer.java:1911) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:12616) > at > org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:12718) > at > org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:450) > at > org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:299) > at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:650) > at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1503) > at > org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1450) > at > org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1445) > at > org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126) > at > org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:200) > at > org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:265) > at > org.apache.hive.service.cli.operation.Operation.run(Operation.java:274) > at > org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:565) > at > org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:551) > at > org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:315) > at > org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:567) > at > org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1557) > at > org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1542) > at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39) > at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) > at > org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56) > at > org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) -- This message was sent by Atlassian Jira (v8.20.10#820010)