[ https://issues.apache.org/jira/browse/HIVE-25717?focusedWorklogId=821868&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-821868 ]
ASF GitHub Bot logged work on HIVE-25717: ----------------------------------------- Author: ASF GitHub Bot Created on: 31/Oct/22 06:26 Start Date: 31/Oct/22 06:26 Worklog Time Spent: 10m Work Description: zhangbutao commented on code in PR #3709: URL: https://github.com/apache/hive/pull/3709#discussion_r1009065886 ########## jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java: ########## @@ -61,16 +60,30 @@ public void write(PreparedStatement statement) throws SQLException { if (columnValues == null) { throw new SQLException("No data available to be written"); } - ParameterMetaData parameterMetaData = statement.getParameterMetaData(); for (int i = 0; i < columnValues.length; i++) { Object value = columnValues[i]; - if ((parameterMetaData.getParameterType(i + 1) == Types.CHAR) && value != null && value instanceof Boolean) { + if ((checkParamMeta(statement) && statement.getParameterMetaData().getParameterType(i + 1) == Types.CHAR) + && value != null && value instanceof Boolean) { value = ((Boolean) value).booleanValue() ? "1" : "0"; } statement.setObject(i + 1, value); } } + private Boolean checkParamMeta(PreparedStatement statement) throws SQLException { + String dbType = statement.getConnection().toString().split(":")[1].toUpperCase(); Review Comment: Yes, i agree this change is not very elegant. I have moved the code logic into `jdbcSerDe:initialize`. Please check again. Issue Time Tracking ------------------- Worklog Id: (was: 821868) Time Spent: 2h (was: 1h 50m) > INSERT INTO on external MariaDB/MySQL table fails silently > ---------------------------------------------------------- > > Key: HIVE-25717 > URL: https://issues.apache.org/jira/browse/HIVE-25717 > Project: Hive > Issue Type: Bug > Components: HiveServer2 > Affects Versions: 4.0.0 > Reporter: Stamatis Zampetakis > Assignee: Stamatis Zampetakis > Priority: Major > Labels: pull-request-available > Attachments: jdbc_table_dml_mysql.q > > Time Spent: 2h > Remaining Estimate: 0h > > +MariaDB/MySQL+ > {code:sql} > CREATE TABLE country (id int, name varchar(20)); > insert into country values (1, 'India'); > insert into country values (2, 'Russia'); > insert into country values (3, 'USA'); > {code} > +Hive+ > {code:sql} > CREATE EXTERNAL TABLE country (id int, name varchar(20)) > STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' > TBLPROPERTIES ( > "hive.sql.database.type" = "MYSQL", > "hive.sql.jdbc.driver" = "com.mysql.jdbc.Driver", > "hive.sql.jdbc.url" = "jdbc:mysql://localhost:3306/qtestDB", > "hive.sql.dbcp.username" = "root", > "hive.sql.dbcp.password" = "qtestpassword", > "hive.sql.table" = "country" > ); > INSERT INTO country VALUES (8, 'Hungary'); > SELECT * FROM country; > {code} > +Expected results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > |8| Hungary| > +Actual results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > The {{INSERT INTO}} statement finishes without showing any kind of problem in > the logs but the row is not inserted in the table. > Running the test it comes back green although the following exception is > printed in the System.err (not in the logs). > {noformat} > java.sql.SQLException: Parameter metadata not available for the given > statement > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:129) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:89) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:63) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.checkAvailable(MysqlParameterMetadata.java:86) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.getParameterType(MysqlParameterMetadata.java:138) > at > org.apache.hive.storage.jdbc.DBRecordWritable.write(DBRecordWritable.java:67) > at > org.apache.hadoop.mapreduce.lib.db.DBOutputFormat$DBRecordWriter.write(DBOutputFormat.java:122) > at > org.apache.hive.storage.jdbc.JdbcRecordWriter.write(JdbcRecordWriter.java:47) > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:1160) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.forwardUDTFOutput(UDTFOperator.java:133) > at > org.apache.hadoop.hive.ql.udf.generic.UDTFCollector.collect(UDTFCollector.java:45) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.forward(GenericUDTF.java:110) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline.process(GenericUDTFInline.java:64) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.process(UDTFOperator.java:116) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:173) > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:154) > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:552) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.processRow(MapRecordSource.java:101) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.pushRecord(MapRecordSource.java:83) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.run(MapRecordProcessor.java:414) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:311) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:277) > at > org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:82) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:69) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:69) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:39) > at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36) > at > org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > {noformat} -- This message was sent by Atlassian Jira (v8.20.10#820010)