[ https://issues.apache.org/jira/browse/HIVE-25040?focusedWorklogId=586774&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-586774 ]
ASF GitHub Bot logged work on HIVE-25040: ----------------------------------------- Author: ASF GitHub Bot Created on: 21/Apr/21 17:51 Start Date: 21/Apr/21 17:51 Worklog Time Spent: 10m Work Description: pgaref commented on a change in pull request #2200: URL: https://github.com/apache/hive/pull/2200#discussion_r617760731 ########## File path: ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java ########## @@ -553,21 +553,19 @@ private void addFunction(String functionName, FunctionInfo function) { Integer refCount = persistent.get(functionClass); persistent.put(functionClass, Integer.valueOf(refCount == null ? 1 : refCount + 1)); } + } catch (ClassNotFoundException e) { Review comment: got it, just realized its been called from here -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org Issue Time Tracking ------------------- Worklog Id: (was: 586774) Time Spent: 40m (was: 0.5h) > Drop database cascade cannot remove persistent functions > -------------------------------------------------------- > > Key: HIVE-25040 > URL: https://issues.apache.org/jira/browse/HIVE-25040 > Project: Hive > Issue Type: Bug > Reporter: Mustafa İman > Assignee: Mustafa İman > Priority: Major > Labels: pull-request-available > Time Spent: 40m > Remaining Estimate: 0h > > Add a persistent custom function to a database using a Jar file: CREATE > FUNCTION myfunction USING JAR 'x.jar'; > Restart the session and immediately issue DROP DATABASE mydb CASCADE. It > throws ClassNotFoundException: > {code:java} > java.lang.ClassNotFoundException: DummyUDF > at java.net.URLClassLoader.findClass(URLClassLoader.java:382) > ~[?:1.8.0_282] > at java.lang.ClassLoader.loadClass(ClassLoader.java:418) ~[?:1.8.0_282] > at java.lang.ClassLoader.loadClass(ClassLoader.java:351) ~[?:1.8.0_282] > at java.lang.Class.forName0(Native Method) ~[?:1.8.0_282] > at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_282] > at > org.apache.hadoop.hive.ql.exec.Registry.getPermanentUdfClass(Registry.java:549) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.exec.Registry.removePersistentFunctionUnderLock(Registry.java:586) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.exec.Registry.unregisterFunction(Registry.java:577) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.exec.Registry.unregisterFunctions(Registry.java:607) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.exec.FunctionRegistry.unregisterPermanentFunctions(FunctionRegistry.java:1731) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.ddl.database.drop.DropDatabaseOperation.execute(DropDatabaseOperation.java:62) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:80) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:357) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:330) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:246) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:109) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:748) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:497) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:491) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166) > ~[hive-exec-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:225) > ~[hive-service-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:87) > ~[hive-service-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:322) > ~[hive-service-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at java.security.AccessController.doPrivileged(Native Method) > ~[?:1.8.0_282] > at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_282] > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1898) > ~[hadoop-common-3.1.1.7.2.10.0-36.jar:?] > at > org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:340) > ~[hive-service-3.1.3000.7.2.10.0-36.jar:3.1.3000.7.2.10.0-36] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) > ~[?:1.8.0_282] > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > ~[?:1.8.0_282] > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) > ~[?:1.8.0_282] > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > ~[?:1.8.0_282] > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > ~[?:1.8.0_282] > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > ~[?:1.8.0_282] > at java.lang.Thread.run(Thread.java:748) [?:1.8.0_282] > {code} > > Since new session did not use the custom udf before trying to drop it, > session state does not have the class loaded. Therefore it throws > ClassNotFound exception. In this case we can ignore this exception since we > do not need to reload it just to remove afterwards. > Same thing does not happen with DROP FUNCTION because compiler loads the jar > explicitly before trying to drop the function. -- This message was sent by Atlassian Jira (v8.3.4#803005)