[ https://issues.apache.org/jira/browse/HIVE-25717?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17624859#comment-17624859 ]
zhangbutao commented on HIVE-25717: ----------------------------------- {code:java} java.sql.SQLException: Parameter metadata not available for the given statement at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:129) at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97) at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:89) at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:63) at com.mysql.cj.jdbc.MysqlParameterMetadata.checkAvailable(MysqlParameterMetadata.java:86) at com.mysql.cj.jdbc.MysqlParameterMetadata.getParameterType(MysqlParameterMetadata.java:138) at org.apache.hive.storage.jdbc.DBRecordWritable.write(DBRecordWritable.java:67){code} The exception is thrown when lacking of mysql jdbc-propertity {*}generateSimpleParameterMetadata=true{*}, the default value of this propertity is false. So, if not setting it to true, Method *_MysqlParameterMetadata::getParameterType_* will always *__* fail with the exception. I think most users will not care the propertity _*generateSimpleParameterMetadata=true,*_ and{*}{*} _**_ i not sure if it has side effect when setting it to true (Getting parameter metadata for every row may affect writing performance). https://issues.apache.org/jira/browse/HIVE-22706 is seems to be a fix for derby, so i think we can create a *DerbyRecordWritable* which extends *DBRecordWritable* for derbey{*},{*} and other jdbc datasources(eg. mysql, pg) can use original ** {*}DBRecordWritable.{*}{*}{*} [~zabetak] wdyt? > INSERT INTO on external MariaDB/MySQL table fails silently > ---------------------------------------------------------- > > Key: HIVE-25717 > URL: https://issues.apache.org/jira/browse/HIVE-25717 > Project: Hive > Issue Type: Bug > Components: HiveServer2 > Affects Versions: 4.0.0 > Reporter: Stamatis Zampetakis > Assignee: Stamatis Zampetakis > Priority: Major > Labels: pull-request-available > Attachments: jdbc_table_dml_mysql.q > > Time Spent: 10m > Remaining Estimate: 0h > > +MariaDB/MySQL+ > {code:sql} > CREATE TABLE country (id int, name varchar(20)); > insert into country values (1, 'India'); > insert into country values (2, 'Russia'); > insert into country values (3, 'USA'); > {code} > +Hive+ > {code:sql} > CREATE EXTERNAL TABLE country (id int, name varchar(20)) > STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' > TBLPROPERTIES ( > "hive.sql.database.type" = "MYSQL", > "hive.sql.jdbc.driver" = "com.mysql.jdbc.Driver", > "hive.sql.jdbc.url" = "jdbc:mysql://localhost:3306/qtestDB", > "hive.sql.dbcp.username" = "root", > "hive.sql.dbcp.password" = "qtestpassword", > "hive.sql.table" = "country" > ); > INSERT INTO country VALUES (8, 'Hungary'); > SELECT * FROM country; > {code} > +Expected results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > |8| Hungary| > +Actual results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > The {{INSERT INTO}} statement finishes without showing any kind of problem in > the logs but the row is not inserted in the table. > Running the test it comes back green although the following exception is > printed in the System.err (not in the logs). > {noformat} > java.sql.SQLException: Parameter metadata not available for the given > statement > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:129) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:89) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:63) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.checkAvailable(MysqlParameterMetadata.java:86) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.getParameterType(MysqlParameterMetadata.java:138) > at > org.apache.hive.storage.jdbc.DBRecordWritable.write(DBRecordWritable.java:67) > at > org.apache.hadoop.mapreduce.lib.db.DBOutputFormat$DBRecordWriter.write(DBOutputFormat.java:122) > at > org.apache.hive.storage.jdbc.JdbcRecordWriter.write(JdbcRecordWriter.java:47) > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:1160) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.forwardUDTFOutput(UDTFOperator.java:133) > at > org.apache.hadoop.hive.ql.udf.generic.UDTFCollector.collect(UDTFCollector.java:45) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.forward(GenericUDTF.java:110) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline.process(GenericUDTFInline.java:64) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.process(UDTFOperator.java:116) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:173) > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:154) > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:552) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.processRow(MapRecordSource.java:101) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.pushRecord(MapRecordSource.java:83) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.run(MapRecordProcessor.java:414) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:311) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:277) > at > org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:82) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:69) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:69) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:39) > at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36) > at > org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > {noformat} -- This message was sent by Atlassian Jira (v8.20.10#820010)