This is an automated email from the ASF dual-hosted git repository.

stigahuang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit edd1e214932b7eb2d4eaa3ac66cfe005951c0af0
Author: wzhou-code <[email protected]>
AuthorDate: Wed Feb 7 08:46:21 2024 -0800

    IMPALA-12793: Create JDBC table without data source
    
    This patch changes syntax of creating JDBC table statement as
      CREATE TABLE [IF NOT EXISTS] [db_name.]table_name
      (col_name data_type
        [constraint_specification]
        [COMMENT 'col_comment']
        [, ...]
      )
      [COMMENT 'table_comment']
      STORED BY JDBC
      TBLPROPERTIES ('key1'='value1', 'key2'='value2', ...)
    
    Both "STORED BY JDBC" and "STORED AS JDBC" are acceptable. A table
    property '__IMPALA_DATA_SOURCE_NAME' is added to the JDBC table with
    value 'impalajdbcdatasource', which is shown in the output of command
    'show create table'.
    Following required JDBC parameters must be specified as table
    properties: database.type, jdbc.url, jdbc.driver, driver.url, and table.
    Otherwise, AnalysisException will be thrown.
    
    Testing:
     - Added frontend unit tests for new syntax of creating JDBC table.
     - Updated end-to-end unit tests to create JDBC tables without data
       source.
     - Passed core tests
    
    Change-Id: I765aa86b430246786ad85ab6857cefaf4332c920
    Reviewed-on: http://gerrit.cloudera.org:8080/21016
    Reviewed-by: Joe McDonnell <[email protected]>
    Tested-by: Impala Public Jenkins <[email protected]>
---
 be/src/service/query-options-test.cc               |  2 +-
 common/thrift/CatalogObjects.thrift                |  1 +
 fe/src/main/cup/sql-parser.cup                     | 10 ++-
 .../apache/impala/analysis/CreateDataSrcStmt.java  |  5 +-
 .../impala/analysis/CreateTableAsSelectStmt.java   |  3 +
 .../impala/analysis/CreateTableLikeFileStmt.java   |  3 +
 .../impala/analysis/CreateTableLikeStmt.java       |  2 +
 .../apache/impala/analysis/CreateTableStmt.java    | 37 +++++++-
 .../java/org/apache/impala/analysis/TableDef.java  |  3 +-
 .../org/apache/impala/catalog/DataSourceTable.java | 98 ++++++++++++++++++----
 .../org/apache/impala/catalog/HdfsFileFormat.java  | 11 ++-
 .../impala/catalog/local/LocalDataSourceTable.java | 72 ++++++++++++----
 .../main/java/org/apache/impala/util/JsonUtil.java | 79 +++++++++++++++++
 fe/src/main/jflex/sql-scanner.flex                 |  1 +
 .../org/apache/impala/analysis/AnalyzeDDLTest.java | 33 +++++++-
 .../org/apache/impala/analysis/ParserTest.java     |  9 ++
 .../apache/impala/customcluster/LdapHS2Test.java   | 78 ++++++++---------
 testdata/bin/create-ext-data-source-table.sql      | 42 +++++-----
 .../impala-ext-jdbc-tables-predicates.test         | 44 +++-------
 .../queries/QueryTest/impala-ext-jdbc-tables.test  | 67 +++++----------
 .../QueryTest/jdbc-data-source-with-keystore.test  | 60 +++++--------
 .../queries/QueryTest/jdbc-data-source.test        | 63 +++++---------
 .../queries/QueryTest/mysql-ext-jdbc-tables.test   | 63 +++++---------
 .../functional-query/queries/QueryTest/set.test    |  2 +-
 tests/query_test/test_ext_data_sources.py          | 11 +--
 25 files changed, 476 insertions(+), 323 deletions(-)

diff --git a/be/src/service/query-options-test.cc 
b/be/src/service/query-options-test.cc
index 75bd1867a..badeac2e7 100644
--- a/be/src/service/query-options-test.cc
+++ b/be/src/service/query-options-test.cc
@@ -231,7 +231,7 @@ TEST(QueryOptions, SetEnumOptions) {
       (THREE_LEVEL, TWO_LEVEL, TWO_LEVEL_THEN_THREE_LEVEL)), true);
   TestEnumCase(options, CASE(default_file_format, THdfsFileFormat,
       (TEXT, RC_FILE, SEQUENCE_FILE, AVRO, PARQUET, KUDU, ORC, HUDI_PARQUET, 
ICEBERG,
-      JSON)), true);
+      JSON, JDBC)), true);
   TestEnumCase(options, CASE(runtime_filter_mode, TRuntimeFilterMode,
       (OFF, LOCAL, GLOBAL)), true);
   TestEnumCase(options, CASE(kudu_read_mode, TKuduReadMode,
diff --git a/common/thrift/CatalogObjects.thrift 
b/common/thrift/CatalogObjects.thrift
index 158ee9385..6d0858c78 100644
--- a/common/thrift/CatalogObjects.thrift
+++ b/common/thrift/CatalogObjects.thrift
@@ -78,6 +78,7 @@ enum THdfsFileFormat {
   HUDI_PARQUET = 7
   ICEBERG = 8
   JSON = 9
+  JDBC = 10
 }
 
 enum TVirtualColumnType {
diff --git a/fe/src/main/cup/sql-parser.cup b/fe/src/main/cup/sql-parser.cup
index 905e7bb23..fb1b9b25f 100755
--- a/fe/src/main/cup/sql-parser.cup
+++ b/fe/src/main/cup/sql-parser.cup
@@ -315,8 +315,8 @@ terminal
   KW_GROUP, KW_GROUPING, KW_HASH, KW_HUDIPARQUET, KW_IGNORE, KW_HAVING, 
KW_ICEBERG, KW_IF,
   KW_ILIKE, KW_IN, KW_INCREMENTAL, KW_INIT_FN, KW_INNER, KW_INPATH, KW_INSERT, 
KW_INT,
   KW_INTERMEDIATE, KW_INTERSECT, KW_INTERVAL, KW_INTO, KW_INVALIDATE, 
KW_IREGEXP, KW_IS,
-  KW_JOIN, KW_JSONFILE, KW_KUDU, KW_LAST, KW_LEFT, KW_LEXICAL, KW_LIKE, 
KW_LIMIT, KW_LINES,
-  KW_LOAD, KW_LOCATION, KW_LOGICAL_OR, KW_MANAGED_LOCATION, KW_MAP, 
KW_MERGE_FN,
+  KW_JDBC, KW_JOIN, KW_JSONFILE, KW_KUDU, KW_LAST, KW_LEFT, KW_LEXICAL, 
KW_LIKE, KW_LIMIT,
+  KW_LINES, KW_LOAD, KW_LOCATION, KW_LOGICAL_OR, KW_MANAGED_LOCATION, KW_MAP, 
KW_MERGE_FN,
   KW_METADATA, KW_MINUS, KW_NON, KW_NORELY, KW_NOT,
   KW_NOVALIDATE, KW_NULL, KW_NULLS, KW_OF, KW_OFFSET, KW_ON, KW_OPTIMIZE, 
KW_OR,
   KW_ORC, KW_ORDER, KW_OUTER,
@@ -2172,6 +2172,8 @@ file_format_val ::=
   {: RESULT = THdfsFileFormat.ICEBERG; :}
   | KW_JSONFILE
   {: RESULT = THdfsFileFormat.JSON; :}
+  | KW_JDBC
+  {: RESULT = THdfsFileFormat.JDBC; :}
   ;
 
 storage_engine_val ::=
@@ -2179,6 +2181,8 @@ storage_engine_val ::=
   {: RESULT = THdfsFileFormat.KUDU; :}
   | KW_ICEBERG
   {: RESULT = THdfsFileFormat.ICEBERG; :}
+  | KW_JDBC
+  {: RESULT = THdfsFileFormat.JDBC; :}
   ;
 
 tbl_properties ::=
@@ -4397,6 +4401,8 @@ word ::=
   {: RESULT = r.toString(); :}
   | KW_IS:r
   {: RESULT = r.toString(); :}
+  | KW_JDBC:r
+  {: RESULT = r.toString(); :}
   | KW_JOIN:r
   {: RESULT = r.toString(); :}
   | KW_JSONFILE:r
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
index 55301019c..e6c80a959 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateDataSrcStmt.java
@@ -19,6 +19,7 @@ package org.apache.impala.analysis;
 
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.impala.authorization.Privilege;
+import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.compat.MetastoreShim;
 import org.apache.impala.extdatasource.ApiVersion;
@@ -53,7 +54,9 @@ public class CreateDataSrcStmt extends StatementBase {
 
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
-    if (!MetastoreShim.validateName(dataSrcName_)) {
+    if 
(dataSrcName_.equalsIgnoreCase(DataSourceTable.IMPALA_BUILTIN_JDBC_DATASOURCE)) 
{
+      throw new AnalysisException("Built-in data source name: " + 
dataSrcName_);
+    } else if (!MetastoreShim.validateName(dataSrcName_)) {
       throw new AnalysisException("Invalid data source name: " + dataSrcName_);
     }
     if (!ifNotExists_ && analyzer.getCatalog().getDataSource(dataSrcName_) != 
null) {
diff --git 
a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
index 4df4a01de..c6b33e644 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
@@ -136,6 +136,9 @@ public class CreateTableAsSelectStmt extends StatementBase {
       // TODO: Add support for CTAS on external Kudu tables (see IMPALA-4318)
       throw new AnalysisException(String.format("CREATE TABLE AS SELECT is not 
" +
           "supported for external Kudu tables."));
+    } else if (createStmt_.getFileFormat() == THdfsFileFormat.JDBC) {
+      throw new AnalysisException("CREATE TABLE AS SELECT is not supported for 
" +
+          "JDBC tables.");
     }
 
     // The analysis for CTAS happens in two phases - the first phase happens 
before
diff --git 
a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeFileStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeFileStmt.java
index 48571a4cb..00341fcbf 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeFileStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeFileStmt.java
@@ -69,6 +69,9 @@ public class CreateTableLikeFileStmt extends CreateTableStmt {
     if (getFileFormat() == THdfsFileFormat.KUDU) {
       throw new AnalysisException("CREATE TABLE LIKE FILE statement is not 
supported " +
           "for Kudu tables.");
+    } else if (getFileFormat() == THdfsFileFormat.JDBC) {
+      throw new AnalysisException("CREATE TABLE LIKE FILE statement is not 
supported " +
+          "for JDBC tables.");
     }
     schemaLocation_.analyze(analyzer, Privilege.ALL, FsAction.READ);
     switch (schemaFileFormat_) {
diff --git 
a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
index 0b66bd1a6..c5327ec2e 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
@@ -181,6 +181,8 @@ public class CreateTableLikeStmt extends StatementBase {
         srcTable.getMetaStoreTable())) {
       throw new AnalysisException(srcTable.getFullName() + " cannot be cloned 
into an "
           + "Iceberg table because it is not an Iceberg table.");
+    } else if (fileFormat_ == THdfsFileFormat.JDBC) {
+      throw new AnalysisException("CREATE TABLE LIKE is not supported for JDBC 
tables.");
     }
 
     srcDbName_ = srcTable.getDb().getName();
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java 
b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
index 8f8422555..6d74a669d 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.iceberg.TableProperties;
 import org.apache.iceberg.mr.Catalogs;
 import org.apache.impala.authorization.AuthorizationConfig;
+import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.IcebergTable;
 import org.apache.impala.catalog.RowFormat;
@@ -268,7 +269,7 @@ public class CreateTableStmt extends StatementBase {
     // schema. Likewise for external Kudu tables, the schema can be read from 
Kudu.
     if (getColumnDefs().isEmpty() && getFileFormat() != THdfsFileFormat.AVRO
         && getFileFormat() != THdfsFileFormat.KUDU && getFileFormat() !=
-        THdfsFileFormat.ICEBERG) {
+        THdfsFileFormat.ICEBERG && getFileFormat() != THdfsFileFormat.JDBC) {
       throw new AnalysisException("Table requires at least 1 column");
     }
     if (getRowFormat() != null) {
@@ -311,6 +312,10 @@ public class CreateTableStmt extends StatementBase {
       }
     }
 
+    if (getFileFormat() == THdfsFileFormat.JDBC) {
+      analyzeJdbcSchema(analyzer);
+    }
+
     // If lineage logging is enabled, compute minimal lineage graph.
     if (BackendConfig.INSTANCE.getComputeLineage() || 
RuntimeEnv.INSTANCE.isTestEnv()) {
        computeLineageGraph(analyzer);
@@ -885,6 +890,36 @@ public class CreateTableStmt extends StatementBase {
     getPartitionColumnDefs().clear();
   }
 
+  /**
+   * Analyzes the parameters of a CREATE TABLE ... STORED BY JDBC statement. 
Adds the
+   * table properties of DataSource so that JDBC table is stored as 
DataSourceTable in
+   * HMS.
+   */
+  private void analyzeJdbcSchema(Analyzer analyzer) throws AnalysisException {
+    for (ColumnDef col: getColumnDefs()) {
+      if (!DataSourceTable.isSupportedColumnType(col.getType())) {
+        throw new AnalysisException("Tables stored by JDBC do not support the 
column " +
+            "type: " + col.getType());
+      }
+    }
+
+    AnalysisUtils.throwIfNotNull(getCachingOp(),
+        "A JDBC table cannot be cached in HDFS.");
+    AnalysisUtils.throwIfNotNull(getLocation(), "LOCATION cannot be specified 
for a " +
+        "JDBC table.");
+    AnalysisUtils.throwIfNotEmpty(tableDef_.getPartitionColumnDefs(),
+        "PARTITIONED BY cannot be used in a JDBC table.");
+
+    // Set table properties of the DataSource to make the table saved as 
DataSourceTable
+    // in HMS.
+    try {
+      DataSourceTable.setJdbcDataSourceProperties(getTblProperties());
+    } catch (ImpalaRuntimeException e) {
+      throw new AnalysisException(String.format(
+          "Cannot create table '%s': %s", getTbl(), e.getMessage()));
+    }
+  }
+
   /**
    * @return true for external tables that don't have "external.table.purge" 
set to true.
    */
diff --git a/fe/src/main/java/org/apache/impala/analysis/TableDef.java 
b/fe/src/main/java/org/apache/impala/analysis/TableDef.java
index 7f83565d1..428de0a0c 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TableDef.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TableDef.java
@@ -417,7 +417,8 @@ class TableDef {
   boolean isBucketableFormat() {
     return options_.fileFormat != THdfsFileFormat.KUDU
         && options_.fileFormat != THdfsFileFormat.ICEBERG
-        && options_.fileFormat != THdfsFileFormat.HUDI_PARQUET;
+        && options_.fileFormat != THdfsFileFormat.HUDI_PARQUET
+        && options_.fileFormat != THdfsFileFormat.JDBC;
   }
 
   /**
diff --git a/fe/src/main/java/org/apache/impala/catalog/DataSourceTable.java 
b/fe/src/main/java/org/apache/impala/catalog/DataSourceTable.java
index ecc6a36e7..7b31badf6 100644
--- a/fe/src/main/java/org/apache/impala/catalog/DataSourceTable.java
+++ b/fe/src/main/java/org/apache/impala/catalog/DataSourceTable.java
@@ -17,14 +17,20 @@
 
 package org.apache.impala.catalog;
 
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import org.apache.hadoop.hive.common.ValidWriteIdList;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.impala.common.ImpalaRuntimeException;
+import org.apache.impala.extdatasource.ApiVersion;
+import org.apache.impala.extdatasource.jdbc.conf.JdbcStorageConfig;
 import org.apache.impala.extdatasource.v1.ExternalDataSource;
 import org.apache.impala.thrift.TCatalogObjectType;
 import org.apache.impala.thrift.TColumn;
@@ -35,6 +41,7 @@ import org.apache.impala.thrift.TResultSetMetadata;
 import org.apache.impala.thrift.TTable;
 import org.apache.impala.thrift.TTableDescriptor;
 import org.apache.impala.thrift.TTableType;
+import org.apache.impala.util.JsonUtil;
 import org.apache.impala.util.TResultRowBuilder;
 import com.google.common.base.Preconditions;
 
@@ -72,6 +79,17 @@ public class DataSourceTable extends Table implements 
FeDataSourceTable {
    */
   public static final String TBL_PROP_API_VER = 
"__IMPALA_DATA_SOURCE_API_VERSION";
 
+  /**
+   * Name of Impala built-in JDBC data source .
+   */
+  public static final String IMPALA_BUILTIN_JDBC_DATASOURCE = 
"impalajdbcdatasource";
+
+  /**
+   * Classname of Impala external JDBC data source.
+   */
+  public static final String IMPALA_JDBC_DATA_SRC_CLASSNAME =
+      "org.apache.impala.extdatasource.jdbc.JdbcDataSource";
+
   private String initString_;
   private TDataSource dataSource_;
 
@@ -144,6 +162,28 @@ public class DataSourceTable extends Table implements 
FeDataSourceTable {
     }
   }
 
+  /**
+   * Set table property with builtin JDBC DataSource name and check if the 
keys of input
+   * table properties are valid.
+   */
+  public static void setJdbcDataSourceProperties(Map<String, String> 
tblProperties)
+      throws ImpalaRuntimeException {
+    // Check if required JDBC/DBCP parameters are set in the table properties.
+    Set<String> tblPropertyKeys = tblProperties.keySet().stream()
+        .map(String::toLowerCase).collect(Collectors.toSet());
+    for (JdbcStorageConfig config : JdbcStorageConfig.values()) {
+      if (config.isRequired() && 
!tblPropertyKeys.contains(config.getPropertyName())) {
+        throw new ImpalaRuntimeException(String.format("Required JDBC config 
'%s' is " +
+            "not present in table properties.", config.getPropertyName()));
+      }
+    }
+    // DataSourceTable is still represented as HDFS tables in the metastore 
but has a
+    // special table property __IMPALA_DATA_SOURCE_NAME to indicate that 
Impala should
+    // use an external data source so we need to add the table property with 
the name
+    // of builtin JDBC DataSource.
+    tblProperties.put(TBL_PROP_DATA_SRC_NAME, IMPALA_BUILTIN_JDBC_DATASOURCE);
+  }
+
   /**
    * Create columns corresponding to fieldSchemas.
    * Throws a TableLoadingException if the metadata is incompatible with what 
we
@@ -179,13 +219,36 @@ public class DataSourceTable extends Table implements 
FeDataSourceTable {
     if (LOG.isTraceEnabled()) {
       LOG.trace("load table: " + db_.getName() + "." + name_);
     }
-    String dataSourceName = getRequiredTableProperty(msTbl, 
TBL_PROP_DATA_SRC_NAME, null);
-    String location = getRequiredTableProperty(msTbl, TBL_PROP_LOCATION, 
dataSourceName);
-    String className = getRequiredTableProperty(msTbl, TBL_PROP_CLASS, 
dataSourceName);
-    String apiVersionString = getRequiredTableProperty(msTbl, TBL_PROP_API_VER,
-        dataSourceName);
-    dataSource_ = new TDataSource(dataSourceName, location, className, 
apiVersionString);
-    initString_ = getRequiredTableProperty(msTbl, TBL_PROP_INIT_STRING, 
dataSourceName);
+    String dataSourceName = getTableProperty(msTbl, TBL_PROP_DATA_SRC_NAME, 
null, true);
+    if (dataSourceName.equals(IMPALA_BUILTIN_JDBC_DATASOURCE)) {
+      // The table is created with "STORED BY JDBC".
+      dataSource_ = new TDataSource(dataSourceName, /* location */ "",
+          /* className */ IMPALA_JDBC_DATA_SRC_CLASSNAME,
+          /* apiVersionString */ ApiVersion.V1.name());
+      // Serialize table properties to JSON string as initString for data 
source.
+      Map<String, String> tblProperties = new HashMap<String, String>();
+      for (JdbcStorageConfig config : JdbcStorageConfig.values()) {
+        String propertyValue = getTableProperty(msTbl, 
config.getPropertyName(),
+            IMPALA_BUILTIN_JDBC_DATASOURCE, false);
+        if (propertyValue != null) {
+          tblProperties.put(config.getPropertyName(), propertyValue);
+        }
+      }
+      try {
+        initString_ = JsonUtil.convertPropertyMapToJSON(tblProperties);
+      } catch (ImpalaRuntimeException e) {
+        throw new TableLoadingException(e.getMessage());
+      }
+    } else {
+      // The table is created with "PRODUCED BY DATA SOURCE".
+      String location = getTableProperty(msTbl, TBL_PROP_LOCATION, 
dataSourceName, true);
+      String className = getTableProperty(msTbl, TBL_PROP_CLASS, 
dataSourceName, true);
+      String apiVersionString = getTableProperty(msTbl, TBL_PROP_API_VER,
+          dataSourceName, true);
+      dataSource_ =
+          new TDataSource(dataSourceName, location, className, 
apiVersionString);
+      initString_ = getTableProperty(msTbl, TBL_PROP_INIT_STRING, 
dataSourceName, true);
+    }
 
     if (msTbl.getPartitionKeysSize() > 0) {
       Table.LOADING_TABLES.decrementAndGet();
@@ -210,14 +273,21 @@ public class DataSourceTable extends Table implements 
FeDataSourceTable {
     }
   }
 
-  private String getRequiredTableProperty(
-      org.apache.hadoop.hive.metastore.api.Table msTbl, String key, String 
dataSourceName)
-      throws TableLoadingException {
+  private String getTableProperty(org.apache.hadoop.hive.metastore.api.Table 
msTbl,
+      String key, String dataSourceName, boolean required) throws 
TableLoadingException {
     String val = msTbl.getParameters().get(key);
-    if (val == null) {
-      throw new TableLoadingException(String.format("Failed to load table %s 
produced " +
-          "by external data source %s. Missing required metadata: %s", name_,
-          dataSourceName == null ? "<unknown>" : dataSourceName, key));
+    if (val == null && required) {
+      if (key.equals(TBL_PROP_DATA_SRC_NAME)) {
+        throw new TableLoadingException(String.format("Failed to load table 
%s. " +
+            "Missing required metadata: %s", name_, key));
+      } else if (dataSourceName.equals(IMPALA_BUILTIN_JDBC_DATASOURCE)) {
+        throw new TableLoadingException(String.format("Failed to load table %s 
" +
+            "stored by JDBC. Missing required metadata: %s", name_, key));
+      } else {
+        throw new TableLoadingException(String.format("Failed to load table %s 
" +
+            "produced by external data source %s. Missing required metadata: 
%s",
+            name_, dataSourceName, key));
+      }
     }
     return val;
   }
diff --git a/fe/src/main/java/org/apache/impala/catalog/HdfsFileFormat.java 
b/fe/src/main/java/org/apache/impala/catalog/HdfsFileFormat.java
index 0d29d06f5..79d9a24cd 100644
--- a/fe/src/main/java/org/apache/impala/catalog/HdfsFileFormat.java
+++ b/fe/src/main/java/org/apache/impala/catalog/HdfsFileFormat.java
@@ -82,7 +82,11 @@ public enum HdfsFileFormat {
       "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe", true, 
true, true),
   ICEBERG("org.apache.iceberg.mr.hive.HiveIcebergInputFormat",
       "org.apache.iceberg.mr.hive.HiveIcebergOutputFormat",
-      "org.apache.iceberg.mr.hive.HiveIcebergSerDe", false, false, false);
+      "org.apache.iceberg.mr.hive.HiveIcebergSerDe", false, false, false),
+  JDBC("org.apache.hadoop.hive.jdbc.JdbcInputFormat",
+      "org.apache.hadoop.hive.jdbc.JdbcOutputFormat",
+      "org.apache.hadoop.hive.jdbc.JdbcSerDe", false, false, true);
+
 
   private final String inputFormat_;
   private final String outputFormat_;
@@ -192,6 +196,7 @@ public enum HdfsFileFormat {
       case KUDU: return HdfsFileFormat.KUDU;
       case ICEBERG: return HdfsFileFormat.ICEBERG;
       case JSON: return HdfsFileFormat.JSON;
+      case JDBC: return HdfsFileFormat.JDBC;
       default:
         throw new RuntimeException("Unknown THdfsFileFormat: "
             + thriftFormat + " - should never happen!");
@@ -210,6 +215,7 @@ public enum HdfsFileFormat {
       case KUDU: return THdfsFileFormat.KUDU;
       case ICEBERG: return THdfsFileFormat.ICEBERG;
       case JSON: return THdfsFileFormat.JSON;
+      case JDBC: return THdfsFileFormat.JDBC;
       default:
         throw new RuntimeException("Unknown HdfsFormat: "
             + this + " - should never happen!");
@@ -236,6 +242,7 @@ public enum HdfsFileFormat {
       case HUDI_PARQUET: return "HUDIPARQUET";
       case ICEBERG: return "ICEBERG";
       case JSON: return "JSONFILE";
+      case JDBC: return "JDBC";
       default:
         throw new RuntimeException("Unknown HdfsFormat: "
             + this + " - should never happen!");
@@ -260,6 +267,8 @@ public enum HdfsFileFormat {
         return true;
       case KUDU:
         return false;
+      case JDBC:
+        return false;
       default:
         throw new RuntimeException("Unknown HdfsFormat: "
             + this + " - should never happen!");
diff --git 
a/fe/src/main/java/org/apache/impala/catalog/local/LocalDataSourceTable.java 
b/fe/src/main/java/org/apache/impala/catalog/local/LocalDataSourceTable.java
index 93a2d6924..dea93bcc5 100644
--- a/fe/src/main/java/org/apache/impala/catalog/local/LocalDataSourceTable.java
+++ b/fe/src/main/java/org/apache/impala/catalog/local/LocalDataSourceTable.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.catalog.local;
 
+import java.util.HashMap;
+import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -26,6 +28,9 @@ import org.apache.impala.catalog.FeDataSourceTable;
 import org.apache.impala.catalog.local.MetaProvider.TableMetaRef;
 import org.apache.impala.catalog.TableLoadingException;
 import org.apache.impala.catalog.Type;
+import org.apache.impala.common.ImpalaRuntimeException;
+import org.apache.impala.extdatasource.ApiVersion;
+import org.apache.impala.extdatasource.jdbc.conf.JdbcStorageConfig;
 import org.apache.impala.thrift.TColumn;
 import org.apache.impala.thrift.TDataSource;
 import org.apache.impala.thrift.TDataSourceTable;
@@ -33,6 +38,7 @@ import org.apache.impala.thrift.TResultSet;
 import org.apache.impala.thrift.TResultSetMetadata;
 import org.apache.impala.thrift.TTableDescriptor;
 import org.apache.impala.thrift.TTableType;
+import org.apache.impala.util.JsonUtil;
 import org.apache.impala.util.TResultRowBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -70,27 +76,57 @@ public class LocalDataSourceTable extends LocalTable 
implements FeDataSourceTabl
       throws TableLoadingException {
     super(db, msTbl, ref);
 
-    String dataSourceName = getRequiredTableProperty(
-        msTbl, DataSourceTable.TBL_PROP_DATA_SRC_NAME, null);
-    String location = getRequiredTableProperty(
-        msTbl, DataSourceTable.TBL_PROP_LOCATION, dataSourceName);
-    String className = getRequiredTableProperty(
-        msTbl, DataSourceTable.TBL_PROP_CLASS, dataSourceName);
-    String apiVersionString = getRequiredTableProperty(
-        msTbl, DataSourceTable.TBL_PROP_API_VER, dataSourceName);
-    dataSource_ = new TDataSource(dataSourceName, location, className, 
apiVersionString);
-    initString_ = getRequiredTableProperty(
-        msTbl, DataSourceTable.TBL_PROP_INIT_STRING, dataSourceName);
+    String dataSourceName = getTableProperty(
+        msTbl, DataSourceTable.TBL_PROP_DATA_SRC_NAME, null, true);
+    if (dataSourceName.equals(DataSourceTable.IMPALA_BUILTIN_JDBC_DATASOURCE)) 
{
+      // The table is created with "STORED BY JDBC".
+      dataSource_ = new TDataSource(dataSourceName, /* location */ "",
+          /* className */ DataSourceTable.IMPALA_JDBC_DATA_SRC_CLASSNAME,
+          /* apiVersionString */ ApiVersion.V1.name());
+      // Serialize table properties to JSON string as initString for data 
source.
+      Map<String, String> tblProperties = new HashMap<String, String>();
+      for (JdbcStorageConfig config : JdbcStorageConfig.values()) {
+        String propertyValue = getTableProperty(msTbl, 
config.getPropertyName(),
+            DataSourceTable.IMPALA_BUILTIN_JDBC_DATASOURCE, false);
+        if (propertyValue != null) {
+          tblProperties.put(config.getPropertyName(), propertyValue);
+        }
+      }
+      try {
+        initString_ = JsonUtil.convertPropertyMapToJSON(tblProperties);
+      } catch (ImpalaRuntimeException e) {
+        throw new TableLoadingException(e.getMessage());
+      }
+    } else {
+      // The table is created with "PRODUCED BY DATA SOURCE".
+      String location = getTableProperty(
+          msTbl, DataSourceTable.TBL_PROP_LOCATION, dataSourceName, true);
+      String className = getTableProperty(
+          msTbl, DataSourceTable.TBL_PROP_CLASS, dataSourceName, true);
+      String apiVersionString = getTableProperty(
+          msTbl, DataSourceTable.TBL_PROP_API_VER, dataSourceName, true);
+      dataSource_ =
+          new TDataSource(dataSourceName, location, className, 
apiVersionString);
+      initString_ = getTableProperty(
+          msTbl, DataSourceTable.TBL_PROP_INIT_STRING, dataSourceName, true);
+    }
   }
 
-  private String getRequiredTableProperty(Table msTbl, String key, String 
dataSourceName)
-      throws TableLoadingException {
+  private String getTableProperty(Table msTbl, String key, String 
dataSourceName,
+      boolean required) throws TableLoadingException {
     String val = msTbl.getParameters().get(key);
-    if (val == null) {
-      throw new TableLoadingException(String.format("Failed to load table %s " 
+
-          "produced by external data source %s. Missing required metadata: %s",
-          msTbl.getTableName(),
-          dataSourceName == null ? "<unknown>" : dataSourceName, key));
+    if (val == null && required) {
+      if (key.equals(DataSourceTable.TBL_PROP_DATA_SRC_NAME)) {
+        throw new TableLoadingException(String.format("Failed to load table 
%s. " +
+            "Missing required metadata: %s", msTbl.getTableName(), key));
+      } else if 
(dataSourceName.equals(DataSourceTable.IMPALA_BUILTIN_JDBC_DATASOURCE)) {
+        throw new TableLoadingException(String.format("Failed to load table %s 
stored " +
+            "by JDBC. Missing required metadata: %s", msTbl.getTableName(), 
key));
+      } else {
+        throw new TableLoadingException(String.format("Failed to load table %s 
" +
+            "produced by external data source %s. Missing required metadata: 
%s",
+            msTbl.getTableName(), dataSourceName, key));
+      }
     }
     return val;
   }
diff --git a/fe/src/main/java/org/apache/impala/util/JsonUtil.java 
b/fe/src/main/java/org/apache/impala/util/JsonUtil.java
new file mode 100644
index 000000000..4ffbaf50a
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/util/JsonUtil.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.impala.util;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Strings;
+
+import org.apache.impala.common.ImpalaRuntimeException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility class that converts between JSON string and property map.
+ */
+public class JsonUtil {
+
+  private static final Logger LOG = LoggerFactory.getLogger(JsonUtil.class);
+
+  /**
+   * Convert string in JSON format to property map.
+   */
+  public static Map<String, String> convertJSONToPropertyMap(String 
propertyString)
+      throws ImpalaRuntimeException {
+    Map<String, String> propertyMap = null;
+    if (!Strings.isNullOrEmpty(propertyString)) {
+      try {
+        TypeReference<HashMap<String, String>> typeRef =
+            new TypeReference<HashMap<String, String>>() {};
+        propertyMap = new ObjectMapper().readValue(propertyString, typeRef);
+      } catch (JsonProcessingException e) {
+        String errorMessage = String.format(
+            "Invalid JSON string for property: '%s'", propertyString);
+        LOG.error(errorMessage, e);
+        throw new ImpalaRuntimeException(errorMessage);
+      }
+    }
+    return propertyMap;
+  }
+
+  /**
+   * Convert property map to string in JSON format.
+   */
+  public static String convertPropertyMapToJSON(Map<String, String> 
propertyMap)
+      throws ImpalaRuntimeException {
+    if (propertyMap != null && propertyMap.size() > 0) {
+      try {
+        return new ObjectMapper().writeValueAsString(propertyMap);
+      } catch (JsonProcessingException e) {
+        String errorMessage = String.format(
+            "Failed to convert property map to JSON string: %s", 
e.getMessage());
+        LOG.error(errorMessage);
+        throw new ImpalaRuntimeException(errorMessage);
+      }
+    }
+    return new String("");
+  }
+}
+
diff --git a/fe/src/main/jflex/sql-scanner.flex 
b/fe/src/main/jflex/sql-scanner.flex
index 56598c1fa..a679897b8 100644
--- a/fe/src/main/jflex/sql-scanner.flex
+++ b/fe/src/main/jflex/sql-scanner.flex
@@ -173,6 +173,7 @@ import org.apache.impala.thrift.TReservedWordsVersion;
     keywordMap.put("invalidate", SqlParserSymbols.KW_INVALIDATE);
     keywordMap.put("iregexp", SqlParserSymbols.KW_IREGEXP);
     keywordMap.put("is", SqlParserSymbols.KW_IS);
+    keywordMap.put("jdbc", SqlParserSymbols.KW_JDBC);
     keywordMap.put("join", SqlParserSymbols.KW_JOIN);
     keywordMap.put("jsonfile", SqlParserSymbols.KW_JSONFILE);
     keywordMap.put("kudu", SqlParserSymbols.KW_KUDU);
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java 
b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
index b4424097c..f84bb342f 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
@@ -2194,6 +2194,9 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("create table newtbl_kudu like parquet " +
         "'/test-warehouse/schemas/alltypestiny.parquet' stored as kudu",
         "CREATE TABLE LIKE FILE statement is not supported for Kudu tables.");
+    AnalysisError("create table newtbl_jdbc like parquet " +
+        "'/test-warehouse/schemas/alltypestiny.parquet' stored as JDBC",
+        "CREATE TABLE LIKE FILE statement is not supported for JDBC tables.");
   }
 
   @Test
@@ -2421,6 +2424,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "BIGINT_COL, DATE_STRING_COL, STRING_COL, TIMESTAMP_COL, YEAR, " +
         "MONTH FROM functional.alltypes");
 
+    // CTAS is not supported for JDBC tables.
+    AnalysisError("create table t stored as JDBC as select id, bool_col, 
tinyint_col " +
+        "from functional.alltypestiny",
+        "CREATE TABLE AS SELECT does not support the (JDBC) file format. " +
+        "Supported formats are: (PARQUET, TEXTFILE, KUDU, ICEBERG)");
+
     // IMPALA-7679: Inserting a null column type without an explicit type 
should
     // throw an error.
     AnalyzesOk("create table t as select cast(null as int) as new_col");
@@ -2570,6 +2579,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("create table kudu_jointbl_clone like 
functional_kudu.jointbl",
         "CREATE TABLE LIKE is not supported for Kudu tables having range 
partitions.");
 
+    // CREATE TABLE LIKE is not supported for JDBC tables.
+    AnalysisError("create table jdbc_tbl like functional.alltypestiny stored 
as JDBC",
+        "CREATE TABLE LIKE is not supported for JDBC tables.");
+
     // Test sort columns.
     AnalyzesOk("create table tbl sort by (int_col,id) like 
functional.alltypes");
     AnalysisError("create table tbl sort by (int_col,foo) like 
functional.alltypes",
@@ -2859,6 +2872,21 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("ALTER TABLE functional_seq_snap.alltypes SET LOCATION " +
         "'  '", "URI path cannot be empty.");
 
+    // Create JDBC tables
+    AnalyzesOk("CREATE TABLE Foo (i int) STORED BY JDBC " +
+        "TBLPROPERTIES ('database.type'='a', 'jdbc.url'='b', " +
+        "'jdbc.driver'='c', 'driver.url'='d', 'dbcp.username'='e', " +
+        "'dbcp.password'='f', 'table'='g')");
+    AnalysisError("CREATE TABLE Foo (i int) STORED BY JDBC TBLPROPERTIES 
('a'='b')",
+        "Cannot create table 'Foo': Required JDBC config 'database.type' is 
not " +
+        "present in table properties.");
+    AnalysisError("CREATE TABLE Foo (i int) STORED BY JDBC CACHED IN 
'testPool'",
+        "A JDBC table cannot be cached in HDFS.");
+    AnalysisError("CREATE TABLE Foo (i int) STORED BY JDBC LOCATION " +
+        "'/test-warehouse/new_table'", "LOCATION cannot be specified for a 
JDBC table.");
+    AnalysisError("CREATE TABLE Foo (i int) PARTITIONED BY (d decimal) STORED 
BY JDBC ",
+        "PARTITIONED BY cannot be used in a JDBC table.");
+
     // Create table PRODUCED BY DATA SOURCE
     final String DATA_SOURCE_NAME = "TestDataSource1";
     catalog_.addDataSource(new DataSource(DATA_SOURCE_NAME, "/foo.jar",
@@ -2939,13 +2967,16 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalyzesOk("CREATE TABLE functional.bucket (i int COMMENT 'hello', s 
string) " +
         "CLUSTERED BY(i) SORT BY (s) INTO 24 BUCKETS");
 
-    // Bucketed table not supported for Kudu and ICEBERG table
+    // Bucketed table not supported for Kudu, ICEBERG and JDBC table
     AnalysisError("CREATE TABLE functional.bucket (i int COMMENT 'hello', s 
string) " +
         "CLUSTERED BY (i) INTO 24 BUCKETS STORED BY KUDU", "CLUSTERED BY not " 
+
         "support fileformat: 'KUDU'");
     AnalysisError("CREATE TABLE functional.bucket (i int COMMENT 'hello', s 
string) " +
         "CLUSTERED BY (i) INTO 24 BUCKETS STORED BY ICEBERG",
         "CLUSTERED BY not support fileformat: 'ICEBERG'");
+    AnalysisError("CREATE TABLE functional.bucket (i int COMMENT 'hello', s 
string) " +
+        "CLUSTERED BY (i) INTO 24 BUCKETS STORED BY JDBC",
+        "CLUSTERED BY not support fileformat: 'JDBC'");
     // Bucketed columns must not contain partition column and don't duplicate
     AnalysisError("CREATE TABLE functional.bucket (i int COMMENT 'hello', s 
string) " +
         "PARTITIONED BY(dt string) CLUSTERED BY (dt) INTO 24 BUCKETS",
diff --git a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java 
b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
index 9d9397b59..d32e8b749 100755
--- a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
@@ -2816,9 +2816,16 @@ public class ParserTest extends FrontendTestBase {
     ParserError("CREATE TABLE foo (i INT, NON UNIQUE PRIMARY KEY) STORED AS 
KUDU");
     ParserError("CREATE TABLE foo (NON UNIQUE PRIMARY KEY(a), a INT) STORED AS 
KUDU");
 
+    // Create external JDBC tables.
+    ParsesOk("CREATE TABLE foo (i INT, j STRING) STORED AS JDBC");
+    ParsesOk("CREATE TABLE IF NOT EXISTS foo (i INT) COMMENT 'comment' " +
+        "STORED AS JDBC TBLPROPERTIES ('key1'='value1', 'key2'='value2')");
+    ParserError("CREATE TABLE foo (i INT) PRIMARY KEY (i) STORED AS JDBC");
+
     // Supported storage engines
     ParsesOk("CREATE TABLE foo (i INT) STORED BY KUDU");
     ParsesOk("CREATE TABLE foo (i INT) STORED BY ICEBERG");
+    ParsesOk("CREATE TABLE foo (i INT) STORED BY JDBC");
     ParserError("CREATE TABLE foo (i INT) STORED BY PARQUET");
     ParserError("CREATE TABLE foo (i INT) STORED BY FOOBAR");
     ParserError("CREATE TABLE foo (i INT) STORED BY");
@@ -2865,6 +2872,8 @@ public class ParserTest extends FrontendTestBase {
     }
     ParsesOk("CREATE TABLE Foo (i int) WITH SERDEPROPERTIES ('a'='b') " +
         "TBLPROPERTIES ('c'='d', 'e'='f')");
+    ParsesOk("CREATE TABLE Foo (i int) STORED BY JDBC " +
+        "TBLPROPERTIES ('c'='d', 'e'='f')");
     // TBLPROPERTIES must go after SERDEPROPERTIES
     ParserError("CREATE TABLE Foo (i int) TBLPROPERTIES ('c'='d', 'e'='f') " +
         "WITH SERDEPROPERTIES ('a'='b')");
diff --git a/fe/src/test/java/org/apache/impala/customcluster/LdapHS2Test.java 
b/fe/src/test/java/org/apache/impala/customcluster/LdapHS2Test.java
index 4e5d012fa..3385d7c19 100644
--- a/fe/src/test/java/org/apache/impala/customcluster/LdapHS2Test.java
+++ b/fe/src/test/java/org/apache/impala/customcluster/LdapHS2Test.java
@@ -738,11 +738,6 @@ public class LdapHS2Test {
     // Define queries.
     String fileSystemPrefix = System.getenv("FILESYSTEM_PREFIX");
     String internalListenHost = System.getenv("INTERNAL_LISTEN_HOST");
-
-    String dropDSQuery = "DROP DATA SOURCE IF EXISTS impala_jdbc_test_ds";
-    String createDSQuery = String.format("CREATE DATA SOURCE 
impala_jdbc_test_ds " +
-        "CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource' " +
-        "API_VERSION 'V1'");
     String dropTableQuery = "DROP TABLE IF EXISTS %s";
     // Set JDBC authentication mechanisms as LDAP (3) with username/password as
     // TEST_USER_1/TEST_PASSWORD_1.
@@ -750,57 +745,55 @@ public class LdapHS2Test {
         "id INT, bool_col BOOLEAN, tinyint_col TINYINT, smallint_col SMALLINT, 
" +
         "int_col INT, bigint_col BIGINT, float_col FLOAT, double_col DOUBLE, " 
+
         "date_string_col STRING, string_col STRING, timestamp_col TIMESTAMP) " 
+
-        "PRODUCED BY DATA SOURCE impala_jdbc_test_ds(" +
-        "'{\"database.type\":\"IMPALA\", " +
-          "\"jdbc.url\":\"jdbc:impala://%s:21050/functional\", " +
-          "\"jdbc.auth\":\"AuthMech=3\", " +
-          "\"jdbc.driver\":\"com.cloudera.impala.jdbc.Driver\", " +
-          "\"driver.url\":\"%s/test-warehouse/data-sources/jdbc-drivers/" +
-          "ImpalaJDBC42.jar\", " +
-          "\"dbcp.username\":\"%s\", " +
-          "\"dbcp.password\":\"%s\", " +
-          "\"table\":\"alltypes\"}')",
-          internalListenHost, fileSystemPrefix, TEST_USER_1, TEST_PASSWORD_1);
+        "STORED BY JDBC TBLPROPERTIES (" +
+        "\"database.type\"=\"IMPALA\", " +
+        "\"jdbc.url\"=\"jdbc:impala://%s:21050/functional\", " +
+        "\"jdbc.auth\"=\"AuthMech=3\", " +
+        "\"jdbc.driver\"=\"com.cloudera.impala.jdbc.Driver\", " +
+        "\"driver.url\"=\"%s/test-warehouse/data-sources/jdbc-drivers/" +
+        "ImpalaJDBC42.jar\", " +
+        "\"dbcp.username\"=\"%s\", " +
+        "\"dbcp.password\"=\"%s\", " +
+        "\"table\"=\"alltypes\")",
+        internalListenHost, fileSystemPrefix, TEST_USER_1, TEST_PASSWORD_1);
     // Set JDBC authentication mechanisms as LDAP with wrong password.
     String createTableWithWrongPassword =
         String.format("CREATE TABLE impala_jdbc_tbl_wrong_password (" +
         "id INT, bool_col BOOLEAN, tinyint_col TINYINT, smallint_col SMALLINT, 
" +
         "int_col INT, bigint_col BIGINT, float_col FLOAT, double_col DOUBLE, " 
+
         "date_string_col STRING, string_col STRING, timestamp_col TIMESTAMP) " 
+
-        "PRODUCED BY DATA SOURCE impala_jdbc_test_ds(" +
-        "'{\"database.type\":\"IMPALA\", " +
-          "\"jdbc.url\":\"jdbc:impala://%s:21050/functional\", " +
-          "\"jdbc.auth\":\"AuthMech=3\", " +
-          "\"jdbc.driver\":\"com.cloudera.impala.jdbc.Driver\", " +
-          "\"driver.url\":\"%s/test-warehouse/data-sources/jdbc-drivers/" +
-          "ImpalaJDBC42.jar\", " +
-          "\"dbcp.username\":\"%s\", " +
-          "\"dbcp.password\":\"wrong-password\", " +
-          "\"table\":\"alltypes\"}')",
-          internalListenHost, fileSystemPrefix, TEST_USER_1);
+       "STORED BY JDBC TBLPROPERTIES (" +
+        "\"database.type\"=\"IMPALA\", " +
+        "\"jdbc.url\"=\"jdbc:impala://%s:21050/functional\", " +
+        "\"jdbc.auth\"=\"AuthMech=3\", " +
+        "\"jdbc.driver\"=\"com.cloudera.impala.jdbc.Driver\", " +
+        "\"driver.url\"=\"%s/test-warehouse/data-sources/jdbc-drivers/" +
+        "ImpalaJDBC42.jar\", " +
+        "\"dbcp.username\"=\"%s\", " +
+        "\"dbcp.password\"=\"wrong-password\", " +
+        "\"table\"=\"alltypes\")",
+        internalListenHost, fileSystemPrefix, TEST_USER_1);
     // Set JDBC authentication mechanisms as LDAP without AuthMech.
     String createTableWithoutAuthMech =
         String.format("CREATE TABLE impala_jdbc_tbl_without_auth_mech (" +
         "id INT, bool_col BOOLEAN, tinyint_col TINYINT, smallint_col SMALLINT, 
" +
         "int_col INT, bigint_col BIGINT, float_col FLOAT, double_col DOUBLE, " 
+
         "date_string_col STRING, string_col STRING, timestamp_col TIMESTAMP) " 
+
-        "PRODUCED BY DATA SOURCE impala_jdbc_test_ds(" +
-        "'{\"database.type\":\"IMPALA\", " +
-          "\"jdbc.url\":\"jdbc:impala://%s:21050/functional\", " +
-          "\"jdbc.driver\":\"com.cloudera.impala.jdbc.Driver\", " +
-          "\"driver.url\":\"%s/test-warehouse/data-sources/jdbc-drivers/" +
-          "ImpalaJDBC42.jar\", " +
-          "\"dbcp.username\":\"%s\", " +
-          "\"dbcp.password\":\"%s\", " +
-          "\"table\":\"alltypes\"}')",
-          internalListenHost, fileSystemPrefix, TEST_USER_1, TEST_PASSWORD_1);
+        "STORED BY JDBC TBLPROPERTIES (" +
+        "\"database.type\"=\"IMPALA\", " +
+        "\"jdbc.url\"=\"jdbc:impala://%s:21050/functional\", " +
+        "\"jdbc.driver\"=\"com.cloudera.impala.jdbc.Driver\", " +
+        "\"driver.url\"=\"%s/test-warehouse/data-sources/jdbc-drivers/" +
+        "ImpalaJDBC42.jar\", " +
+        "\"dbcp.username\"=\"%s\", " +
+        "\"dbcp.password\"=\"%s\", " +
+        "\"table\"=\"alltypes\")",
+        internalListenHost, fileSystemPrefix, TEST_USER_1, TEST_PASSWORD_1);
     String selectQuery = "select string_col from %s where id=9";
 
     // Run queries.
     //
-    // Create data source and tables.
-    execAndFetch(client, session, dropDSQuery, null);
-    execAndFetch(client, session, createDSQuery, "Data source has been 
created.");
+    // Create JDBC tables.
     execAndFetch(client, session,
         String.format(dropTableQuery, "impala_jdbc_ext_test_table"), null);
     execAndFetch(client, session, createTableQuery, "Table has been created.");
@@ -835,8 +828,7 @@ public class LdapHS2Test {
           e.getMessage().contains(expectedError));
     }
 
-    // Drop data source and tables.
-    execAndFetch(client, session, dropDSQuery, "Data source has been 
dropped.");
+    // Drop JDBC tables.
     execAndFetch(client, session,
         String.format(dropTableQuery, "impala_jdbc_ext_test_table"),
         "Table has been dropped.");
@@ -848,6 +840,6 @@ public class LdapHS2Test {
         "Table has been dropped.");
 
     // Two successful authentications for each ExecAndFetch().
-    verifyMetrics(31, 0);
+    verifyMetrics(25, 0);
   }
 }
diff --git a/testdata/bin/create-ext-data-source-table.sql 
b/testdata/bin/create-ext-data-source-table.sql
index 803e4660d..30930142b 100644
--- a/testdata/bin/create-ext-data-source-table.sql
+++ b/testdata/bin/create-ext-data-source-table.sql
@@ -46,12 +46,6 @@ CREATE TABLE alltypes_datasource (
   date_col DATE)
 PRODUCED BY DATA SOURCE AllTypesDataSource("TestInitString");
 
-DROP DATA SOURCE IF EXISTS JdbcDataSource;
-CREATE DATA SOURCE JdbcDataSource
-LOCATION '/test-warehouse/data-sources/jdbc-data-source.jar'
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
-
 DROP TABLE IF EXISTS alltypes_jdbc_datasource;
 CREATE TABLE alltypes_jdbc_datasource (
  id INT,
@@ -65,14 +59,15 @@ CREATE TABLE alltypes_jdbc_datasource (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE JdbcDataSource(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://localhost:5432/functional",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://localhost:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="alltypes");
 
 DROP TABLE IF EXISTS alltypes_jdbc_datasource_2;
 CREATE TABLE alltypes_jdbc_datasource_2 (
@@ -87,12 +82,13 @@ CREATE TABLE alltypes_jdbc_datasource_2 (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE JdbcDataSource(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://localhost:5432/functional",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"hdfs://localhost:20500/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"AllTypesWithQuote",
-"column.mapping":"id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_string_col=Date_string_col, 
string_col=String_col, timestamp=Timestamp"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://localhost:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="hdfs://localhost:20500/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="AllTypesWithQuote",
+"column.mapping"="id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_string_col=Date_string_col, 
string_col=String_col, timestamp=Timestamp");
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables-predicates.test
 
b/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables-predicates.test
index ec8c7c638..4a8f49e8d 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables-predicates.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables-predicates.test
@@ -1,24 +1,5 @@
 ====
 ---- QUERY
-# Create DataSource
-DROP DATA SOURCE IF EXISTS TestJdbcDataSource;
-CREATE DATA SOURCE TestJdbcDataSource
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
----- RESULTS
-'Data source has been created.'
-====
----- QUERY
-# Show created DataSource
-SHOW DATA SOURCES LIKE 'testjdbcdatasource';
----- LABELS
-NAME,LOCATION,CLASS NAME,API VERSION
----- RESULTS
-'testjdbcdatasource','','org.apache.impala.extdatasource.jdbc.JdbcDataSource','V1'
----- TYPES
-STRING,STRING,STRING,STRING
-====
----- QUERY
 # Create external JDBC DataSource table
 DROP TABLE IF EXISTS alltypes_jdbc_datasource;
 CREATE TABLE alltypes_jdbc_datasource (
@@ -33,15 +14,16 @@ CREATE TABLE alltypes_jdbc_datasource (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"IMPALA",
-"jdbc.url":"jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
-"jdbc.auth":"AuthMech=0",
-"jdbc.driver":"com.cloudera.impala.jdbc.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
-"dbcp.username":"impala",
-"dbcp.password":"cloudera",
-"table":"alltypes_with_date"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="IMPALA",
+"jdbc.url"="jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
+"jdbc.auth"="AuthMech=0",
+"jdbc.driver"="com.cloudera.impala.jdbc.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
+"dbcp.username"="impala",
+"dbcp.password"="cloudera",
+"table"="alltypes_with_date");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -173,9 +155,3 @@ DROP TABLE alltypes_jdbc_datasource;
 ---- RESULTS
 'Table has been dropped.'
 ====
----- QUERY
-# Drop DataSource
-DROP DATA SOURCE TestJdbcDataSource;
----- RESULTS
-'Data source has been dropped.'
-====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables.test
 
b/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables.test
index 9ca4b0909..526423296 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/impala-ext-jdbc-tables.test
@@ -1,24 +1,5 @@
 ====
 ---- QUERY
-# Create DataSource
-DROP DATA SOURCE IF EXISTS TestJdbcDataSource;
-CREATE DATA SOURCE TestJdbcDataSource
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
----- RESULTS
-'Data source has been created.'
-====
----- QUERY
-# Show created DataSource
-SHOW DATA SOURCES LIKE 'testjdbcdatasource';
----- LABELS
-NAME,LOCATION,CLASS NAME,API VERSION
----- RESULTS
-'testjdbcdatasource','','org.apache.impala.extdatasource.jdbc.JdbcDataSource','V1'
----- TYPES
-STRING,STRING,STRING,STRING
-====
----- QUERY
 # Create external JDBC DataSource table
 DROP TABLE IF EXISTS alltypes_jdbc_datasource;
 CREATE TABLE alltypes_jdbc_datasource (
@@ -33,16 +14,17 @@ CREATE TABLE alltypes_jdbc_datasource (
  date_string_col STRING,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"IMPALA",
-"jdbc.url":"jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
-"jdbc.auth":"AuthMech=0",
-"jdbc.properties":"MEM_LIMIT=1000000000, MAX_ERRORS = 10000, 
ENABLED_RUNTIME_FILTER_TYPES=\"BLOOM,MIN_MAX\"",
-"jdbc.driver":"com.cloudera.impala.jdbc.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
-"dbcp.username":"impala",
-"dbcp.password":"cloudera",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="IMPALA",
+"jdbc.url"="jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
+"jdbc.auth"="AuthMech=0",
+"jdbc.properties"="MEM_LIMIT=1000000000, MAX_ERRORS = 10000, 
ENABLED_RUNTIME_FILTER_TYPES=\"BLOOM,MIN_MAX\"",
+"jdbc.driver"="com.cloudera.impala.jdbc.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
+"dbcp.username"="impala",
+"dbcp.password"="cloudera",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -61,16 +43,17 @@ CREATE TABLE alltypes_jdbc_datasource_2 (
  date_string_col STRING,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"IMPALA",
-"jdbc.url":"jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
-"jdbc.auth":"AuthMech=0",
-"jdbc.properties":"QUERY_TIMEOUT_S=600, REQUEST_POOL= \"default-pool\", 
DEBUG_ACTION",
-"jdbc.driver":"com.cloudera.impala.jdbc.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
-"dbcp.username":"impala",
-"dbcp.password":"cloudera",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="IMPALA",
+"jdbc.url"="jdbc:impala://$INTERNAL_LISTEN_HOST:21050/functional",
+"jdbc.auth"="AuthMech=0",
+"jdbc.properties"="QUERY_TIMEOUT_S=600, REQUEST_POOL= \"default-pool\", 
DEBUG_ACTION",
+"jdbc.driver"="com.cloudera.impala.jdbc.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/ImpalaJDBC42.jar",
+"dbcp.username"="impala",
+"dbcp.password"="cloudera",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -230,9 +213,3 @@ DROP TABLE alltypes_jdbc_datasource_2;
 ---- RESULTS
 'Table has been dropped.'
 ====
----- QUERY
-# Drop DataSource
-DROP DATA SOURCE TestJdbcDataSource;
----- RESULTS
-'Data source has been dropped.'
-====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source-with-keystore.test
 
b/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source-with-keystore.test
index 7e1d1bcd4..b3f8d0a26 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source-with-keystore.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source-with-keystore.test
@@ -1,24 +1,5 @@
 ====
 ---- QUERY
-# Create DataSource
-DROP DATA SOURCE IF EXISTS TestJdbcDataSourceWithKeystore;
-CREATE DATA SOURCE TestJdbcDataSourceWithKeystore
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
----- RESULTS
-'Data source has been created.'
-====
----- QUERY
-# Show created DataSource
-SHOW DATA SOURCES LIKE 'testjdbcdatasourcewithkeystore';
----- LABELS
-NAME,LOCATION,CLASS NAME,API VERSION
----- RESULTS
-'testjdbcdatasourcewithkeystore','','org.apache.impala.extdatasource.jdbc.JdbcDataSource','V1'
----- TYPES
-STRING,STRING,STRING,STRING
-====
----- QUERY
 # Create external JDBC DataSource table with username, key and keystore
 DROP TABLE IF EXISTS alltypes_jdbc_datasource_keystore;
 CREATE TABLE alltypes_jdbc_datasource_keystore (
@@ -33,15 +14,16 @@ CREATE TABLE alltypes_jdbc_datasource_keystore (
  date_string_col STRING,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSourceWithKeystore(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password.keystore":"jceks://$FILESYSTEM_URI_SCHEME/test-warehouse/data-sources/test.jceks",
-"dbcp.password.key":"hiveuser",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password.keystore"="jceks://$FILESYSTEM_URI_SCHEME/test-warehouse/data-sources/test.jceks",
+"dbcp.password.key"="hiveuser",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -84,14 +66,15 @@ CREATE TABLE alltypes_jdbc_datasource_keystore (
  date_string_col STRING,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSourceWithKeystore(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password.keystore":"jceks://$FILESYSTEM_URI_SCHEME/test-warehouse/data-sources/test.jceks",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password.keystore"="jceks://$FILESYSTEM_URI_SCHEME/test-warehouse/data-sources/test.jceks",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -118,9 +101,4 @@ BIGINT
 DROP TABLE alltypes_jdbc_datasource_keystore;
 ---- RESULTS
 'Table has been dropped.'
----- QUERY
-# Drop DataSource
-DROP DATA SOURCE TestJdbcDataSourceWithKeystore;
----- RESULTS
-'Data source has been dropped.'
 ====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source.test 
b/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source.test
index dd8d32cd2..7117b8380 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/jdbc-data-source.test
@@ -1,24 +1,5 @@
 ====
 ---- QUERY
-# Create DataSource
-DROP DATA SOURCE IF EXISTS TestJdbcDataSource;
-CREATE DATA SOURCE TestJdbcDataSource
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
----- RESULTS
-'Data source has been created.'
-====
----- QUERY
-# Show created DataSource
-SHOW DATA SOURCES LIKE 'testjdbcdatasource';
----- LABELS
-NAME,LOCATION,CLASS NAME,API VERSION
----- RESULTS
-'testjdbcdatasource','','org.apache.impala.extdatasource.jdbc.JdbcDataSource','V1'
----- TYPES
-STRING,STRING,STRING,STRING
-====
----- QUERY
 # Create external JDBC DataSource table
 DROP TABLE IF EXISTS alltypes_jdbc_datasource;
 CREATE TABLE alltypes_jdbc_datasource (
@@ -33,15 +14,16 @@ CREATE TABLE alltypes_jdbc_datasource (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
-"jdbc.properties":"connect_timeout=20, application_name=\"myapp\"",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
+"jdbc.properties"="connect_timeout=20, application_name=\"myapp\"",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -60,15 +42,16 @@ CREATE TABLE alltypes_jdbc_datasource_2 (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"POSTGRES",
-"jdbc.url":"jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
-"jdbc.driver":"org.postgresql.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"AllTypesWithQuote",
-"column.mapping":"id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_col=date_col, 
string_col=String_col, timestamp=Timestamp"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="POSTGRES",
+"jdbc.url"="jdbc:postgresql://$INTERNAL_LISTEN_HOST:5432/functional",
+"jdbc.driver"="org.postgresql.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/postgresql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="AllTypesWithQuote",
+"column.mapping"="id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_col=date_col, 
string_col=String_col, timestamp=Timestamp");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -309,9 +292,3 @@ DROP TABLE alltypes_jdbc_datasource_2;
 ---- RESULTS
 'Table has been dropped.'
 ====
----- QUERY
-# Drop DataSource
-DROP DATA SOURCE TestJdbcDataSource;
----- RESULTS
-'Data source has been dropped.'
-====
diff --git 
a/testdata/workloads/functional-query/queries/QueryTest/mysql-ext-jdbc-tables.test
 
b/testdata/workloads/functional-query/queries/QueryTest/mysql-ext-jdbc-tables.test
index 172719348..4f29a712b 100644
--- 
a/testdata/workloads/functional-query/queries/QueryTest/mysql-ext-jdbc-tables.test
+++ 
b/testdata/workloads/functional-query/queries/QueryTest/mysql-ext-jdbc-tables.test
@@ -1,24 +1,5 @@
 ====
 ---- QUERY
-# Create DataSource
-DROP DATA SOURCE IF EXISTS TestJdbcDataSource;
-CREATE DATA SOURCE TestJdbcDataSource
-CLASS 'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-API_VERSION 'V1';
----- RESULTS
-'Data source has been created.'
-====
----- QUERY
-# Show created DataSource
-SHOW DATA SOURCES LIKE 'testjdbcdatasource';
----- LABELS
-NAME,LOCATION,CLASS NAME,API VERSION
----- RESULTS
-'testjdbcdatasource','','org.apache.impala.extdatasource.jdbc.JdbcDataSource','V1'
----- TYPES
-STRING,STRING,STRING,STRING
-====
----- QUERY
 # Create external JDBC DataSource table
 DROP TABLE IF EXISTS alltypes_jdbc_datasource;
 CREATE TABLE alltypes_jdbc_datasource (
@@ -33,15 +14,16 @@ CREATE TABLE alltypes_jdbc_datasource (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"MYSQL",
-"jdbc.url":"jdbc:mysql://localhost:3306/functional",
-"jdbc.properties":"autoReconnect=false, useUnicode=false",
-"jdbc.driver":"com.mysql.cj.jdbc.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/mysql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"alltypes"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="MYSQL",
+"jdbc.url"="jdbc:mysql://localhost:3306/functional",
+"jdbc.properties"="autoReconnect=false, useUnicode=false",
+"jdbc.driver"="com.mysql.cj.jdbc.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/mysql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="alltypes");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -60,15 +42,16 @@ CREATE TABLE alltypes_jdbc_datasource_2 (
  date_col DATE,
  string_col STRING,
  timestamp_col TIMESTAMP)
-PRODUCED BY DATA SOURCE TestJdbcDataSource(
-'{"database.type":"MYSQL",
-"jdbc.url":"jdbc:mysql://localhost:3306/functional",
-"jdbc.driver":"com.mysql.cj.jdbc.Driver",
-"driver.url":"$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/mysql-jdbc.jar",
-"dbcp.username":"hiveuser",
-"dbcp.password":"password",
-"table":"AllTypesCaseSensitiveNames",
-"column.mapping":"id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_col=Date_col, 
string_col=String_col, timestamp=Timestamp"}');
+STORED BY JDBC
+TBLPROPERTIES (
+"database.type"="MYSQL",
+"jdbc.url"="jdbc:mysql://localhost:3306/functional",
+"jdbc.driver"="com.mysql.cj.jdbc.Driver",
+"driver.url"="$FILESYSTEM_PREFIX/test-warehouse/data-sources/jdbc-drivers/mysql-jdbc.jar",
+"dbcp.username"="hiveuser",
+"dbcp.password"="password",
+"table"="AllTypesCaseSensitiveNames",
+"column.mapping"="id=id, bool_col=Bool_col, tinyint_col=Tinyint_col, 
smallint_col=Smallint_col, int_col=Int_col, bigint_col=Bigint_col, 
float_col=Float_col, double_col=Double_col, date_col=Date_col, 
string_col=String_col, timestamp=Timestamp");
 ---- RESULTS
 'Table has been created.'
 ====
@@ -309,9 +292,3 @@ DROP TABLE alltypes_jdbc_datasource_2;
 ---- RESULTS
 'Table has been dropped.'
 ====
----- QUERY
-# Drop DataSource
-DROP DATA SOURCE TestJdbcDataSource;
----- RESULTS
-'Data source has been dropped.'
-====
diff --git a/testdata/workloads/functional-query/queries/QueryTest/set.test 
b/testdata/workloads/functional-query/queries/QueryTest/set.test
index c0e0ad67c..b042bd937 100644
--- a/testdata/workloads/functional-query/queries/QueryTest/set.test
+++ b/testdata/workloads/functional-query/queries/QueryTest/set.test
@@ -147,7 +147,7 @@ Invalid Kudu read mode: 'bar'. Valid values are DEFAULT(0), 
READ_LATEST(1), READ
 ---- QUERY
 set default_file_format=bar
 ---- CATCH
-Invalid default file format: 'bar'. Valid values are TEXT(0), RC_FILE(1), 
SEQUENCE_FILE(2), AVRO(3), PARQUET(4), KUDU(5), ORC(6), HUDI_PARQUET(7), 
ICEBERG(8), JSON(9).
+Invalid default file format: 'bar'. Valid values are TEXT(0), RC_FILE(1), 
SEQUENCE_FILE(2), AVRO(3), PARQUET(4), KUDU(5), ORC(6), HUDI_PARQUET(7), 
ICEBERG(8), JSON(9), JDBC(10).
 ====
 ---- QUERY
 set default_transactional_type=bar
diff --git a/tests/query_test/test_ext_data_sources.py 
b/tests/query_test/test_ext_data_sources.py
index 773ec2698..fe727e5c8 100644
--- a/tests/query_test/test_ext_data_sources.py
+++ b/tests/query_test/test_ext_data_sources.py
@@ -64,16 +64,7 @@ class TestExtDataSources(ImpalaTestSuite):
     jdbc_tbl_name = "functional.alltypes_jdbc_datasource"
     properties = self._get_tbl_properties(jdbc_tbl_name)
     # Verify data source related table properties
-    assert properties['__IMPALA_DATA_SOURCE_NAME'] == 'jdbcdatasource'
-    expected_location = "/test-warehouse/data-sources/jdbc-data-source.jar"
-    assert re.search(expected_location, 
properties['__IMPALA_DATA_SOURCE_LOCATION'])
-    assert properties['__IMPALA_DATA_SOURCE_CLASS'] == \
-        'org.apache.impala.extdatasource.jdbc.JdbcDataSource'
-    assert properties['__IMPALA_DATA_SOURCE_API_VERSION'] == 'V1'
-    assert 'database.type\\":\\"POSTGRES' \
-        in properties['__IMPALA_DATA_SOURCE_INIT_STRING']
-    assert 'table\\":\\"alltypes' \
-        in properties['__IMPALA_DATA_SOURCE_INIT_STRING']
+    assert properties['__IMPALA_DATA_SOURCE_NAME'] == 'impalajdbcdatasource'
 
   def test_data_source_tables(self, vector, unique_database):
     self.run_test_case('QueryTest/data-source-tables', vector, 
use_db=unique_database)

Reply via email to