This is an automated email from the ASF dual-hosted git repository.

joemcdonnell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit 907c1738a0e722809dd3befb36c0cf0bfc223c83
Author: Pranav Lodha <[email protected]>
AuthorDate: Mon Jun 24 15:24:11 2024 -0700

    IMPALA-12789: Fix unit-test code JdbcDataSourceTest.java
    
    The unit test `JdbcDataSourceTest.java` was originally
    implemented using the H2 database, which is no longer
    available in Impala's environment. The test code was
    also outdated and erroneous.
    
    This commit addresses and fixes the failure of
    JdbcDataSourceTest.java and rewrites it in
    Postgres, hence ensures compatibility with Impala's
    current environment and aligns with JDBC and external
    data source APIs. Please note, this test is moved to fe
    folder to fix the BackendConfig instance not initialized
    error.
    
    To test this file, run the following command:
    pushd fe && mvn -fae test -Dtest=JdbcDataSourceTest
    
    Please note that the tests in JdbcDataSourceTest have a
    dependency on previous tests and individual tests cannot be
    ran separately for this class.
    Change-Id: Ie07173d256d73c88f5a6c041f087db16b6ff3127
    Reviewed-on: http://gerrit.cloudera.org:8080/21805
    Reviewed-by: Impala Public Jenkins <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 .../extdatasource/jdbc/JdbcDataSourceTest.java     | 66 +++++++++++++---------
 java/ext-data-source/jdbc/pom.xml                  |  7 ---
 .../jdbc/src/test/resources/test_script.sql        | 47 ---------------
 testdata/bin/create-ext-data-source-table.sql      | 20 +++++++
 testdata/bin/load-ext-data-sources.sh              | 28 ++++++++-
 5 files changed, 87 insertions(+), 81 deletions(-)

diff --git 
a/java/ext-data-source/jdbc/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
 b/fe/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
similarity index 86%
rename from 
java/ext-data-source/jdbc/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
rename to 
fe/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
index 229367c8d..00a65d674 100644
--- 
a/java/ext-data-source/jdbc/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
+++ 
b/fe/src/test/java/org/apache/impala/extdatasource/jdbc/JdbcDataSourceTest.java
@@ -33,6 +33,8 @@ import org.apache.impala.extdatasource.thrift.TPrepareParams;
 import org.apache.impala.extdatasource.thrift.TPrepareResult;
 import org.apache.impala.extdatasource.thrift.TRowBatch;
 import org.apache.impala.extdatasource.thrift.TTableSchema;
+import org.apache.impala.service.BackendConfig;
+import org.apache.impala.thrift.TBackendGflags;
 import org.apache.impala.thrift.TColumnData;
 import org.apache.impala.thrift.TColumnType;
 import org.apache.impala.thrift.TColumnValue;
@@ -44,6 +46,8 @@ import org.apache.impala.thrift.TTypeNodeType;
 import org.apache.impala.thrift.TUniqueId;
 import org.junit.Assert;
 import org.junit.FixMethodOrder;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runners.MethodSorters;
 import org.slf4j.Logger;
@@ -51,13 +55,32 @@ import org.slf4j.LoggerFactory;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class JdbcDataSourceTest {
+  private static TBackendGflags origFlags;
+
+  @BeforeClass
+  public static void setup() {
+    // The original BackendConfig need to be mocked, we are saving the values 
here, so
+    // they can be restored and not break other tests
+    if (BackendConfig.INSTANCE == null) {
+      BackendConfig.create(new TBackendGflags());
+    }
+    origFlags = BackendConfig.INSTANCE.getBackendCfg();
+  }
+
+  @AfterClass
+  public static void teardown() {
+    BackendConfig.create(origFlags);
+  }
 
   private static final Logger LOG = 
LoggerFactory.getLogger(JdbcDataSourceTest.class);
 
-  private static String initString_ = "CACHE_CLASS::{\"database.type\":\"H2\", 
"
-      + "\"jdbc.url\":\"jdbc:h2:mem:test;MODE=MySQL;INIT=runscript from "
-      + "'classpath:test_script.sql'\", "
-      + "\"jdbc.driver\":\"org.h2.Driver\", "
+  private static String initString_ = "{\"database.type\":\"POSTGRES\", "
+      + "\"jdbc.url\":\"jdbc:postgresql://localhost:5432/functional\", "
+      + "\"jdbc.driver\":\"org.postgresql.Driver\", "
+      + "\"driver.url\":\"hdfs://localhost:20500/test-warehouse/data-sources/"
+      + "jdbc-drivers/postgresql-jdbc.jar\", "
+      + "\"dbcp.username\":\"hiveuser\", "
+      + "\"dbcp.password\":\"password\", "
       + "\"table\":\"test_strategy\","
       + "\"column.mapping\":\"id=strategy_id\"}";
 
@@ -66,9 +89,12 @@ public class JdbcDataSourceTest {
   private static String scanHandle_;
   private static TTableSchema schema_;
   private static List<List<TBinaryPredicate>> predicates_ = 
Lists.newArrayList();
-  private static List<List<TBinaryPredicate>> acceptedPredicates_ = 
Lists.newArrayList();
+  private static List<List<TBinaryPredicate>> acceptedPredicates_ =
+      Lists.newArrayList();
   private static long expectReturnRows_ = 5L;
 
+  // Please note that the unit test cases in this class have a dependency on
+  // previous tests. They must be ran sequentially.
   @Test
   public void test01Init() {
     String colName = "id";
@@ -91,7 +117,8 @@ public class JdbcDataSourceTest {
     // predicates filter
     predicates_.add(Lists.newArrayList(idPredicate));
     expectReturnRows_ = 3L;
-    LOG.info("setup predicates:{}, expectReturnRows: {}", predicates_, 
expectReturnRows_);
+    LOG.info("setup predicates:{}, expectReturnRows: {}", predicates_,
+        expectReturnRows_);
 
     boolean ret = 
jdbcDataSource_.convertInitStringToConfiguration(initString_);
     Assert.assertTrue(ret);
@@ -102,7 +129,6 @@ public class JdbcDataSourceTest {
     TPrepareParams params = new TPrepareParams();
     params.setTable_name("test_strategy");
     params.setInit_string(initString_);
-    params.setPredicates(Lists.newArrayList());
     params.setPredicates(predicates_);
     TPrepareResult resp = jdbcDataSource_.prepare(params);
     Assert.assertEquals(TErrorCode.OK, resp.getStatus().status_code);
@@ -133,7 +159,6 @@ public class JdbcDataSourceTest {
     TUniqueId unique_id = new TUniqueId();
     unique_id.hi = 0xfeedbeeff00d7777L;
     unique_id.lo = 0x2020202020202020L;
-    String str = "feedbeeff00d7777:2020202020202020";
     params.setQuery_id(unique_id);
     params.setTable_name("test_strategy");
     params.setInit_string(initString_);
@@ -177,7 +202,7 @@ public class JdbcDataSourceTest {
   private static TTableSchema initSchema() {
     // strategy_id int, name string, referrer string, landing string, priority 
 int,
     // implementation string, last_modified timestamp
-    TTableSchema schema_ = new TTableSchema();
+    TTableSchema schema = new TTableSchema();
     TColumnDesc col = new TColumnDesc();
     col.setName("id");
     TTypeNode typeNode = new TTypeNode();
@@ -188,7 +213,7 @@ public class JdbcDataSourceTest {
     TColumnType colType = new TColumnType();
     colType.setTypes(Lists.newArrayList(typeNode));
     col.setType(colType);
-    schema_.addToCols(col);
+    schema.addToCols(col);
 
     col = new TColumnDesc();
     col.setName("name");
@@ -200,7 +225,7 @@ public class JdbcDataSourceTest {
     colType = new TColumnType();
     colType.setTypes(Lists.newArrayList(typeNode));
     col.setType(colType);
-    schema_.addToCols(col);
+    schema.addToCols(col);
 
     col = new TColumnDesc();
     col.setName("priority");
@@ -212,7 +237,7 @@ public class JdbcDataSourceTest {
     colType = new TColumnType();
     colType.setTypes(Lists.newArrayList(typeNode));
     col.setType(colType);
-    schema_.addToCols(col);
+    schema.addToCols(col);
 
     col = new TColumnDesc();
     col.setName("implementation");
@@ -224,7 +249,7 @@ public class JdbcDataSourceTest {
     colType = new TColumnType();
     colType.setTypes(Lists.newArrayList(typeNode));
     col.setType(colType);
-    schema_.addToCols(col);
+    schema.addToCols(col);
 
     col = new TColumnDesc();
     col.setName("last_modified");
@@ -236,18 +261,7 @@ public class JdbcDataSourceTest {
     colType = new TColumnType();
     colType.setTypes(Lists.newArrayList(typeNode));
     col.setType(colType);
-    schema_.addToCols(col);
-    return schema_;
+    schema.addToCols(col);
+    return schema;
   }
-
-  public static void printData(List<TColumnDesc> colDescs, List<TColumnData> 
colDatas) {
-    for (int i = 0; i < colDatas.size(); ++i) {
-      TColumnDesc colDesc = colDescs.get(i);
-      TColumnData colData = colDatas.get(i);
-      System.out.println("idx: " + i);
-      System.out.println(" Name: " + colDesc);
-      System.out.println(" Data: " + colData);
-    }
-  }
-
 }
diff --git a/java/ext-data-source/jdbc/pom.xml 
b/java/ext-data-source/jdbc/pom.xml
index 2db3c863b..6b39daf96 100644
--- a/java/ext-data-source/jdbc/pom.xml
+++ b/java/ext-data-source/jdbc/pom.xml
@@ -33,7 +33,6 @@
 
   <properties>
     <commons-dbcp2.version>2.9.0</commons-dbcp2.version>
-    <h2database.version>1.3.166</h2database.version>
   </properties>
 
   <dependencies>
@@ -54,12 +53,6 @@
       <artifactId>commons-dbcp2</artifactId>
       <version>${commons-dbcp2.version}</version>
     </dependency>
-    <dependency>
-      <groupId>com.h2database</groupId>
-      <artifactId>h2</artifactId>
-      <version>${h2database.version}</version>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
diff --git a/java/ext-data-source/jdbc/src/test/resources/test_script.sql 
b/java/ext-data-source/jdbc/src/test/resources/test_script.sql
deleted file mode 100644
index f88cb5144..000000000
--- a/java/ext-data-source/jdbc/src/test/resources/test_script.sql
+++ /dev/null
@@ -1,47 +0,0 @@
---
--- Licensed to the Apache Software Foundation (ASF) under one
--- or more contributor license agreements.  See the NOTICE file
--- distributed with this work for additional information
--- regarding copyright ownership.  The ASF licenses this file
--- to you under the Apache License, Version 2.0 (the
--- "License"); you may not use this file except in compliance
--- with the License.  You may obtain a copy of the License at
---
---   http://www.apache.org/licenses/LICENSE-2.0
---
--- Unless required by applicable law or agreed to in writing,
--- software distributed under the License is distributed on an
--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
--- KIND, either express or implied.  See the License for the
--- specific language governing permissions and limitations
--- under the License.
-
-DROP TABLE IF EXISTS test_strategy;
-
-CREATE TABLE IF NOT EXISTS test_strategy (
-  strategy_id int(11) NOT NULL,
-  name varchar(50) NOT NULL,
-  referrer varchar(1024) DEFAULT NULL,
-  landing varchar(1024) DEFAULT NULL,
-  priority int(11) DEFAULT NULL,
-  implementation varchar(512) DEFAULT NULL,
-  last_modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
-  PRIMARY KEY (strategy_id)
-);
-
-
-INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, 
implementation,
-                           last_modified)
-VALUES (1, 'S1', 'aaa', 'abc', 1000, NULL, '2012-05-08 15:01:15');
-INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, 
implementation,
-                           last_modified)
-VALUES (2, 'S2', 'bbb', 'def', 990, NULL, '2012-05-08 15:01:15');
-INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, 
implementation,
-                           last_modified)
-VALUES (3, 'S3', 'ccc', 'ghi', 1000, NULL, '2012-05-08 15:01:15');
-INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, 
implementation,
-                           last_modified)
-VALUES (4, 'S4', 'ddd', 'jkl', 980, NULL, '2012-05-08 15:01:15');
-INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, 
implementation,
-                           last_modified)
-VALUES (5, 'S5', 'eee', NULL, NULL, NULL, '2012-05-08 15:01:15');
diff --git a/testdata/bin/create-ext-data-source-table.sql 
b/testdata/bin/create-ext-data-source-table.sql
index aa20d9f28..efedfd78b 100644
--- a/testdata/bin/create-ext-data-source-table.sql
+++ b/testdata/bin/create-ext-data-source-table.sql
@@ -109,3 +109,23 @@ TBLPROPERTIES (
 "dbcp.username"="hiveuser",
 "dbcp.password"="password",
 "table"="decimal_tbl");
+
+DROP TABLE IF EXISTS test_strategy;
+CREATE EXTERNAL TABLE IF NOT EXISTS test_strategy (
+  strategy_id INT,
+  name STRING,
+  referrer STRING,
+  landing STRING,
+  priority INT,
+  implementation STRING,
+  last_modified timestamp,
+  PRIMARY KEY (strategy_id) )
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://localhost:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="test_strategy");
\ No newline at end of file
diff --git a/testdata/bin/load-ext-data-sources.sh 
b/testdata/bin/load-ext-data-sources.sh
index 1ddd7687e..246961bb6 100755
--- a/testdata/bin/load-ext-data-sources.sh
+++ b/testdata/bin/load-ext-data-sources.sh
@@ -83,6 +83,31 @@ CREATE TABLE decimal_tbl
 __EOT__
 sudo -u postgres psql -U hiveuser -d functional -f /tmp/jdbc_decimal_tbl.sql
 
+# Create test_strategy1 table for unit test
+cat > /tmp/jdbc_test_strategy.sql << __EOT__
+DROP TABLE IF EXISTS test_strategy;
+CREATE TABLE test_strategy
+(
+  strategy_id INT,
+  name VARCHAR(50),
+  referrer VARCHAR(1024),
+  landing VARCHAR(1024),
+  priority INT,
+  implementation VARCHAR(512),
+  last_modified timestamp,
+  PRIMARY KEY (strategy_id)
+);
+
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority,
+  implementation, last_modified) VALUES
+  (1, 'S1', 'aaa', 'abc', 1000, NULL, '2012-05-08 15:01:15'),
+  (2, 'S2', 'bbb', 'def', 990, NULL, '2012-05-08 15:01:15'),
+  (3, 'S3', 'ccc', 'ghi', 1000, NULL, '2012-05-08 15:01:15'),
+  (4, 'S4', 'ddd', 'jkl', 980, NULL, '2012-05-08 15:01:15'),
+  (5, 'S5', 'eee', NULL, NULL, NULL, '2012-05-08 15:01:15');
+__EOT__
+sudo -u postgres psql -U hiveuser -d functional -f /tmp/jdbc_test_strategy.sql
+
 # Load data to jdbc table
 cat ${IMPALA_HOME}/testdata/target/AllTypes/* > /tmp/jdbc_alltypes.csv
 loadCmd="COPY alltypes FROM '/tmp/jdbc_alltypes.csv' DELIMITER ',' CSV"
@@ -128,4 +153,5 @@ ${IMPALA_HOME}/bin/impala-shell.sh -i ${IMPALAD} -f 
/tmp/impala_jdbc_alltypes.sq
 rm /tmp/jdbc_alltypes.*
 rm /tmp/jdbc_alltypes_with_quote.*
 rm /tmp/jdbc_decimal_tbl.*
-rm /tmp/impala_jdbc_alltypes.sql
+rm /tmp/jdbc_test_strategy.*
+rm /tmp/impala_jdbc_alltypes.sql
\ No newline at end of file

Reply via email to