Hi all, could you please give me any hint how to overcome the following problem. I use hive 3.0.0 src, mysql 5.7, intellij and windows 7. I try to debug simple test code running under hive-exec module and I keep getting errors regarding metastore. I have configured mysql server on localhost, ran hive 3 schema sql script and added necessary entries to hive-site.xml.
hive-site: <property> <name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://localhost/hive3</value> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>hive</value> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>hive</value> </property> stack: [main] ERROR org.apache.hadoop.hive.metastore.RetryingHMSHandler - MetaException(message:Version information not found in metastore.) at org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:8820) at org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:8798) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:97) at com.sun.proxy.$Proxy17.verifySchema(Unknown Source) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:684) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:677) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:754) at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:525) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:80) at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93) at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:8547) at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:166) at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:94) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:95) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4181) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4249) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4229) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:4485) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:296) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:279) at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:440) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:380) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:360) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:406) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:402) at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:626) at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:486) at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:474) at org.apache.hadoop.hive.Test.main(Test.java:90) test code: public static void main(String[] args) { System.setProperty("hadoop.home.dir", "E:/Hadoop/hadoop-2.8.1"); Path path = new Path("E:/code/apache-hive-3.0.0-src/data/hive-site.xml"); HiveConf conf = new HiveConf(Driver.class); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.addResource(path); conf.set(String.valueOf(HiveConf.ConfVars.HIVE_CBO_ENABLED), "false"); conf.set("hadoop.tmp.dir", "/tmp/hdfs"); conf.set("fs.defaultFS", "file:///"); conf.set("mapred.job.tracker", "local"); conf.set("mapreduce.framework.name", "local"); conf.set("dfs.permissions.enabled", "false"); SessionState.start(conf); Driver driver = new Driver(conf); String q; q = "create table tbl (i int, j int) stored as orc"; int rc = driver.compile(q); CommandProcessorResponse cpr = driver.run(); } thanks!