[ https://issues.apache.org/jira/browse/HIVE-6113?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15026135#comment-15026135 ]
Hive QA commented on HIVE-6113: ------------------------------- Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12773890/HIVE-6113.4.patch {color:red}ERROR:{color} -1 due to build exiting with an error Test results: http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/6120/testReport Console output: http://ec2-174-129-184-35.compute-1.amazonaws.com/jenkins/job/PreCommit-HIVE-TRUNK-Build/6120/console Test logs: http://ec2-174-129-184-35.compute-1.amazonaws.com/logs/PreCommit-HIVE-TRUNK-Build-6120/ Messages: {noformat} **** This message was trimmed, see log for full details **** main: [INFO] Executed tasks [INFO] [INFO] --- maven-compiler-plugin:3.1:compile (default-compile) @ hive-it-util --- [INFO] Compiling 51 source files to /data/hive-ptest/working/apache-github-source-source/itests/util/target/classes [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java: Some input files use or override a deprecated API. [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java: Recompile with -Xlint:deprecation for details. [INFO] [INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ hive-it-util --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /data/hive-ptest/working/apache-github-source-source/itests/util/src/test/resources [INFO] Copying 3 resources [INFO] [INFO] --- maven-antrun-plugin:1.7:run (setup-test-dirs) @ hive-it-util --- [INFO] Executing tasks main: [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/util/target/tmp [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/util/target/warehouse [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/util/target/tmp/conf [copy] Copying 14 files to /data/hive-ptest/working/apache-github-source-source/itests/util/target/tmp/conf [INFO] Executed tasks [INFO] [INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ hive-it-util --- [INFO] No sources to compile [INFO] [INFO] --- maven-surefire-plugin:2.16:test (default-test) @ hive-it-util --- [INFO] Tests are skipped. [INFO] [INFO] --- maven-jar-plugin:2.2:jar (default-jar) @ hive-it-util --- [INFO] Building jar: /data/hive-ptest/working/apache-github-source-source/itests/util/target/hive-it-util-2.0.0-SNAPSHOT.jar [INFO] [INFO] --- maven-site-plugin:3.3:attach-descriptor (attach-descriptor) @ hive-it-util --- [INFO] [INFO] --- maven-install-plugin:2.4:install (default-install) @ hive-it-util --- [INFO] Installing /data/hive-ptest/working/apache-github-source-source/itests/util/target/hive-it-util-2.0.0-SNAPSHOT.jar to /data/hive-ptest/working/maven/org/apache/hive/hive-it-util/2.0.0-SNAPSHOT/hive-it-util-2.0.0-SNAPSHOT.jar [INFO] Installing /data/hive-ptest/working/apache-github-source-source/itests/util/pom.xml to /data/hive-ptest/working/maven/org/apache/hive/hive-it-util/2.0.0-SNAPSHOT/hive-it-util-2.0.0-SNAPSHOT.pom [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Building Hive Integration - Unit Tests 2.0.0-SNAPSHOT [INFO] ------------------------------------------------------------------------ [INFO] [INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hive-it-unit --- [INFO] Deleting /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target [INFO] Deleting /data/hive-ptest/working/apache-github-source-source/itests/hive-unit (includes = [datanucleus.log, derby.log], excludes = []) [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (enforce-no-snapshots) @ hive-it-unit --- [INFO] [INFO] --- maven-antrun-plugin:1.7:run (download-spark) @ hive-it-unit --- [INFO] Executing tasks main: [exec] + /bin/pwd [exec] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit [exec] + BASE_DIR=./target [exec] + HIVE_ROOT=./target/../../../ [exec] + DOWNLOAD_DIR=./../thirdparty [exec] + mkdir -p ./../thirdparty [exec] + download http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-1.5.0-bin-hadoop2-without-hive.tgz spark [exec] + url=http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-1.5.0-bin-hadoop2-without-hive.tgz [exec] + finalName=spark [exec] ++ basename http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-1.5.0-bin-hadoop2-without-hive.tgz [exec] + tarName=spark-1.5.0-bin-hadoop2-without-hive.tgz [exec] + rm -rf ./target/spark [exec] + [[ ! -f ./../thirdparty/spark-1.5.0-bin-hadoop2-without-hive.tgz ]] [exec] + tar -zxf ./../thirdparty/spark-1.5.0-bin-hadoop2-without-hive.tgz -C ./target [exec] + mv ./target/spark-1.5.0-bin-hadoop2-without-hive ./target/spark [exec] + cp -f ./target/../../..//data/conf/spark/log4j2.xml ./target/spark/conf/ [exec] + sed '/package /d' /data/hive-ptest/working/apache-github-source-source/itests/../contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java [exec] + javac -cp /data/hive-ptest/working/maven/org/apache/hive/hive-exec/2.0.0-SNAPSHOT/hive-exec-2.0.0-SNAPSHOT.jar /tmp/UDFExampleAdd.java -d /tmp [exec] + jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp UDFExampleAdd.class [INFO] Executed tasks [INFO] [INFO] --- maven-remote-resources-plugin:1.5:process (default) @ hive-it-unit --- [INFO] [INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ hive-it-unit --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/main/resources [INFO] Copying 3 resources [INFO] [INFO] --- maven-antrun-plugin:1.7:run (define-classpath) @ hive-it-unit --- [INFO] Executing tasks main: [INFO] Executed tasks [INFO] [INFO] --- maven-compiler-plugin:3.1:compile (default-compile) @ hive-it-unit --- [INFO] Compiling 2 source files to /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/classes [INFO] [INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ hive-it-unit --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/resources [INFO] Copying 3 resources [INFO] [INFO] --- maven-antrun-plugin:1.7:run (setup-test-dirs) @ hive-it-unit --- [INFO] Executing tasks main: [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/tmp [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/warehouse [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/tmp/conf [copy] Copying 14 files to /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/tmp/conf [INFO] Executed tasks [INFO] [INFO] --- maven-antrun-plugin:1.7:run (setup-metastore-scripts) @ hive-it-unit --- [INFO] Executing tasks main: [mkdir] Created dir: /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/tmp/scripts/metastore [copy] Copying 235 files to /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/tmp/scripts/metastore [INFO] Executed tasks [INFO] [INFO] --- maven-compiler-plugin:3.1:testCompile (default-testCompile) @ hive-it-unit --- [INFO] Compiling 98 source files to /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/target/test-classes [INFO] ------------------------------------------------------------- [WARNING] COMPILATION WARNING : [INFO] ------------------------------------------------------------- [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java: Some input files use or override a deprecated API. [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRetryingHMSHandler.java: Recompile with -Xlint:deprecation for details. [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java: Some input files use unchecked or unsafe operations. [WARNING] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java: Recompile with -Xlint:unchecked for details. [INFO] 4 warnings [INFO] ------------------------------------------------------------- [INFO] ------------------------------------------------------------- [ERROR] COMPILATION ERROR : [INFO] ------------------------------------------------------------- [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[54,41] cannot find symbol symbol: variable METASTORE_AUTO_CREATE_SCHEMA location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[55,41] cannot find symbol symbol: variable METASTORE_FIXED_DATASTORE location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[84,53] cannot find symbol symbol: variable METASTORE_AUTO_CREATE_SCHEMA location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[85,54] cannot find symbol symbol: variable METASTORE_FIXED_DATASTORE location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[96,54] cannot find symbol symbol: variable METASTORE_AUTO_CREATE_SCHEMA location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[97,53] cannot find symbol symbol: variable METASTORE_FIXED_DATASTORE location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [INFO] 6 errors [INFO] ------------------------------------------------------------- [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Hive Integration - Parent ......................... SUCCESS [7.774s] [INFO] Hive Integration - Custom Serde ................... SUCCESS [14.734s] [INFO] Hive Integration - HCatalog Unit Tests ............ SUCCESS [24.616s] [INFO] Hive Integration - Testing Utilities .............. SUCCESS [18.257s] [INFO] Hive Integration - Unit Tests ..................... FAILURE [28.255s] [INFO] Hive Integration - Test Serde ..................... SKIPPED [INFO] Hive Integration - QFile Tests .................... SKIPPED [INFO] Hive Integration - QFile Accumulo Tests ........... SKIPPED [INFO] JMH benchmark: Hive ............................... SKIPPED [INFO] Hive Integration - Unit Tests - Hadoop 2 .......... SKIPPED [INFO] Hive Integration - Unit Tests with miniKdc ........ SKIPPED [INFO] Hive Integration - QFile Spark Tests .............. SKIPPED [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 1:39.450s [INFO] Finished at: Tue Nov 24 23:04:48 EST 2015 [INFO] Final Memory: 84M/327M [INFO] ------------------------------------------------------------------------ [WARNING] The requested profile "hadoop-2" could not be activated because it does not exist. [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.1:testCompile (default-testCompile) on project hive-it-unit: Compilation failure: Compilation failure: [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[54,41] cannot find symbol [ERROR] symbol: variable METASTORE_AUTO_CREATE_SCHEMA [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[55,41] cannot find symbol [ERROR] symbol: variable METASTORE_FIXED_DATASTORE [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[84,53] cannot find symbol [ERROR] symbol: variable METASTORE_AUTO_CREATE_SCHEMA [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[85,54] cannot find symbol [ERROR] symbol: variable METASTORE_FIXED_DATASTORE [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[96,54] cannot find symbol [ERROR] symbol: variable METASTORE_AUTO_CREATE_SCHEMA [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] /data/hive-ptest/working/apache-github-source-source/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java:[97,53] cannot find symbol [ERROR] symbol: variable METASTORE_FIXED_DATASTORE [ERROR] location: class org.apache.hadoop.hive.conf.HiveConf.ConfVars [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hive-it-unit + exit 1 ' {noformat} This message is automatically generated. ATTACHMENT ID: 12773890 - PreCommit-HIVE-TRUNK-Build > Unable to instantiate org.apache.hadoop.hive.metastore.HiveMetaStoreClient > -------------------------------------------------------------------------- > > Key: HIVE-6113 > URL: https://issues.apache.org/jira/browse/HIVE-6113 > Project: Hive > Issue Type: Bug > Components: Database/Schema > Affects Versions: 0.12.0, 0.13.0, 0.14.0, 1.0.0, 1.2.1 > Environment: hadoop-0.20.2-cdh3u3,hive-0.12.0 > Reporter: William Stone > Assignee: Oleksiy Sayankin > Priority: Critical > Labels: HiveMetaStoreClient, metastore, unable_instantiate > Attachments: HIVE-6113-2.patch, HIVE-6113.3.patch, HIVE-6113.4.patch, > HIVE-6113.patch > > > When I exccute SQL "use fdm; desc formatted fdm.tableName;" in python, throw > Error as followed. > but when I tryit again , It will success. > 2013-12-25 03:01:32,290 ERROR exec.DDLTask (DDLTask.java:execute(435)) - > org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: > Unable to instantiate org.apache.hadoop.hive.metastore.HiveMetaStoreClient > at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1143) > at > org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1128) > at > org.apache.hadoop.hive.ql.exec.DDLTask.switchDatabase(DDLTask.java:3479) > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:237) > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:151) > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:65) > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1414) > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1192) > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1020) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:888) > at > org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:260) > at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:217) > at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:507) > at > org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:875) > at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:769) > at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:708) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) > at java.lang.reflect.Method.invoke(Method.java:597) > at org.apache.hadoop.util.RunJar.main(RunJar.java:197) > Caused by: java.lang.RuntimeException: Unable to instantiate > org.apache.hadoop.hive.metastore.HiveMetaStoreClient > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1217) > at > org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62) > at > org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72) > at > org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2372) > at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2383) > at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1139) > ... 20 more > Caused by: java.lang.reflect.InvocationTargetException > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39) > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27) > at java.lang.reflect.Constructor.newInstance(Constructor.java:513) > at > org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1210) > ... 25 more > Caused by: javax.jdo.JDODataStoreException: Exception thrown flushing changes > to datastore > NestedThrowables: > java.sql.BatchUpdateException: Duplicate entry 'default' for key > 'UNIQUE_DATABASE' > at > org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:451) > at > org.datanucleus.api.jdo.JDOTransaction.commit(JDOTransaction.java:165) > at > org.apache.hadoop.hive.metastore.ObjectStore.commitTransaction(ObjectStore.java:358) > at > org.apache.hadoop.hive.metastore.ObjectStore.createDatabase(ObjectStore.java:404) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) > at java.lang.reflect.Method.invoke(Method.java:597) > at > org.apache.hadoop.hive.metastore.RetryingRawStore.invoke(RetryingRawStore.java:124) > at $Proxy9.createDatabase(Unknown Source) > at > org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB_core(HiveMetaStore.java:422) > at > org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:441) > at > org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:326) > at > org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:286) > at > org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54) > at > org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59) > at > org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4060) > at > org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:121) > ... 30 more > Caused by: java.sql.BatchUpdateException: Duplicate entry 'default' for key > 'UNIQUE_DATABASE' > at > com.mysql.jdbc.PreparedStatement.executeBatchSerially(PreparedStatement.java:2028) > at > com.mysql.jdbc.PreparedStatement.executeBatch(PreparedStatement.java:1451) > at > com.jolbox.bonecp.StatementHandle.executeBatch(StatementHandle.java:469) > at > org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeBatch(ParamLoggingPreparedStatement.java:372) > at > org.datanucleus.store.rdbms.SQLController.processConnectionStatement(SQLController.java:628) > at > org.datanucleus.store.rdbms.SQLController.processStatementsForConnection(SQLController.java:596) > at > org.datanucleus.store.rdbms.SQLController$1.transactionFlushed(SQLController.java:683) > at > org.datanucleus.store.connection.AbstractManagedConnection.transactionFlushed(AbstractManagedConnection.java:86) > at > org.datanucleus.store.connection.ConnectionManagerImpl$2.transactionFlushed(ConnectionManagerImpl.java:454) > at org.datanucleus.TransactionImpl.flush(TransactionImpl.java:199) > at org.datanucleus.TransactionImpl.commit(TransactionImpl.java:263) > at org.datanucleus.api.jdo.JDOTransaction.commit(JDOTransaction.java:98) > ... 46 more > Caused by: > com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException: > Duplicate entry 'default' for key 'UNIQUE_DATABASE' > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39) > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27) > at java.lang.reflect.Constructor.newInstance(Constructor.java:513) > at com.mysql.jdbc.Util.handleNewInstance(Util.java:411) > at com.mysql.jdbc.Util.getInstance(Util.java:386) > at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1039) > at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3609) > at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3541) > at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:2002) > at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2163) > at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2624) > at > com.mysql.jdbc.PreparedStatement.executeInternal(PreparedStatement.java:2127) > at > com.mysql.jdbc.PreparedStatement.executeUpdate(PreparedStatement.java:2427) > at > com.mysql.jdbc.PreparedStatement.executeBatchSerially(PreparedStatement.java:1980) > ... 57 more -- This message was sent by Atlassian JIRA (v6.3.4#6332)