[ https://issues.apache.org/jira/browse/HIVE-25717?focusedWorklogId=821395&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-821395 ]
ASF GitHub Bot logged work on HIVE-25717: ----------------------------------------- Author: ASF GitHub Bot Created on: 28/Oct/22 11:19 Start Date: 28/Oct/22 11:19 Worklog Time Spent: 10m Work Description: zabetak commented on code in PR #3709: URL: https://github.com/apache/hive/pull/3709#discussion_r1007945976 ########## jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java: ########## @@ -61,16 +60,30 @@ public void write(PreparedStatement statement) throws SQLException { if (columnValues == null) { throw new SQLException("No data available to be written"); } - ParameterMetaData parameterMetaData = statement.getParameterMetaData(); for (int i = 0; i < columnValues.length; i++) { Object value = columnValues[i]; - if ((parameterMetaData.getParameterType(i + 1) == Types.CHAR) && value != null && value instanceof Boolean) { + if ((checkParamMeta(statement) && statement.getParameterMetaData().getParameterType(i + 1) == Types.CHAR) + && value != null && value instanceof Boolean) { value = ((Boolean) value).booleanValue() ? "1" : "0"; } statement.setObject(i + 1, value); } } + private Boolean checkParamMeta(PreparedStatement statement) throws SQLException { + String dbType = statement.getConnection().toString().split(":")[1].toUpperCase(); Review Comment: Relying on `getConnection().toString()` is risky since the result can change at any time without notice. Moreover, I am not sure if the snippet here is gonna work for every supported database. ########## jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/DBRecordWritable.java: ########## @@ -61,16 +60,30 @@ public void write(PreparedStatement statement) throws SQLException { if (columnValues == null) { throw new SQLException("No data available to be written"); } - ParameterMetaData parameterMetaData = statement.getParameterMetaData(); for (int i = 0; i < columnValues.length; i++) { Object value = columnValues[i]; - if ((parameterMetaData.getParameterType(i + 1) == Types.CHAR) && value != null && value instanceof Boolean) { + if ((checkParamMeta(statement) && statement.getParameterMetaData().getParameterType(i + 1) == Types.CHAR) + && value != null && value instanceof Boolean) { value = ((Boolean) value).booleanValue() ? "1" : "0"; } statement.setObject(i + 1, value); Review Comment: Here are doing a conversion based on expected and actual type. Instead of relying on parameterMetaData which may not be available in every JDBC driver can't we do the conversion earlier. E.g., when we setup `columnValues`? ########## ql/src/test/queries/clientpositive/jdbc_table_dml_mysql.q: ########## @@ -0,0 +1,15 @@ +--! qt:database:mysql:q_test_country_table.sql + +CREATE EXTERNAL TABLE country (id int, name varchar(20)) + STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' +TBLPROPERTIES ( + "hive.sql.database.type" = "MYSQL", + "hive.sql.jdbc.driver" = "com.mysql.jdbc.Driver", + "hive.sql.jdbc.url" = "jdbc:mysql://localhost:3306/qtestDB", + "hive.sql.dbcp.username" = "root", + "hive.sql.dbcp.password" = "qtestpassword", + "hive.sql.table" = "country" + ); + +INSERT INTO country VALUES (8, 'Hungary'); +SELECT * FROM country; Review Comment: Please add new line at the end of file (see https://unix.stackexchange.com/questions/18743/whats-the-point-in-adding-a-new-line-to-the-end-of-a-file). ########## ql/src/test/queries/clientpositive/jdbc_handler.q: ########## @@ -1,5 +1,4 @@ --! qt:dataset:src ---! qt:disabled:flaky HIVE-23709 Review Comment: We have no proof that this is not flaky anymore so leave it disabled for the time-being and add new tests if necessary. Issue Time Tracking ------------------- Worklog Id: (was: 821395) Time Spent: 1.5h (was: 1h 20m) > INSERT INTO on external MariaDB/MySQL table fails silently > ---------------------------------------------------------- > > Key: HIVE-25717 > URL: https://issues.apache.org/jira/browse/HIVE-25717 > Project: Hive > Issue Type: Bug > Components: HiveServer2 > Affects Versions: 4.0.0 > Reporter: Stamatis Zampetakis > Assignee: Stamatis Zampetakis > Priority: Major > Labels: pull-request-available > Attachments: jdbc_table_dml_mysql.q > > Time Spent: 1.5h > Remaining Estimate: 0h > > +MariaDB/MySQL+ > {code:sql} > CREATE TABLE country (id int, name varchar(20)); > insert into country values (1, 'India'); > insert into country values (2, 'Russia'); > insert into country values (3, 'USA'); > {code} > +Hive+ > {code:sql} > CREATE EXTERNAL TABLE country (id int, name varchar(20)) > STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' > TBLPROPERTIES ( > "hive.sql.database.type" = "MYSQL", > "hive.sql.jdbc.driver" = "com.mysql.jdbc.Driver", > "hive.sql.jdbc.url" = "jdbc:mysql://localhost:3306/qtestDB", > "hive.sql.dbcp.username" = "root", > "hive.sql.dbcp.password" = "qtestpassword", > "hive.sql.table" = "country" > ); > INSERT INTO country VALUES (8, 'Hungary'); > SELECT * FROM country; > {code} > +Expected results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > |8| Hungary| > +Actual results+ > ||ID||NAME|| > |1| India| > |2| Russia| > |3| USA| > The {{INSERT INTO}} statement finishes without showing any kind of problem in > the logs but the row is not inserted in the table. > Running the test it comes back green although the following exception is > printed in the System.err (not in the logs). > {noformat} > java.sql.SQLException: Parameter metadata not available for the given > statement > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:129) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:89) > at > com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:63) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.checkAvailable(MysqlParameterMetadata.java:86) > at > com.mysql.cj.jdbc.MysqlParameterMetadata.getParameterType(MysqlParameterMetadata.java:138) > at > org.apache.hive.storage.jdbc.DBRecordWritable.write(DBRecordWritable.java:67) > at > org.apache.hadoop.mapreduce.lib.db.DBOutputFormat$DBRecordWriter.write(DBOutputFormat.java:122) > at > org.apache.hive.storage.jdbc.JdbcRecordWriter.write(JdbcRecordWriter.java:47) > at > org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:1160) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.forwardUDTFOutput(UDTFOperator.java:133) > at > org.apache.hadoop.hive.ql.udf.generic.UDTFCollector.collect(UDTFCollector.java:45) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.forward(GenericUDTF.java:110) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline.process(GenericUDTFInline.java:64) > at > org.apache.hadoop.hive.ql.exec.UDTFOperator.process(UDTFOperator.java:116) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888) > at > org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:173) > at > org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:154) > at > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:552) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.processRow(MapRecordSource.java:101) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.pushRecord(MapRecordSource.java:83) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.run(MapRecordProcessor.java:414) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:311) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:277) > at > org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:82) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:69) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:69) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:39) > at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36) > at > org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > {noformat} -- This message was sent by Atlassian Jira (v8.20.10#820010)