This is an automated email from the ASF dual-hosted git repository.

jshao pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new 78c0114ab7 [#6623] fix(spark): Load JDBC driver explictly to fix `No 
suitable driver found for  mysql:xx` error (#6621)
78c0114ab7 is described below

commit 78c0114ab7eb2eb97d85ae5f54b5774a01185a26
Author: FANNG <xiaoj...@datastrato.com>
AuthorDate: Tue Mar 11 19:24:04 2025 +0800

    [#6623] fix(spark): Load JDBC driver explictly to fix `No suitable driver 
found for  mysql:xx` error (#6621)
    
    ### What changes were proposed in this pull request?
    If user set `spark.sql.hive.metastore.jars` to `path` in spark
    configuration and hive metastore uri is not set explictly, Spark will
    use Isolated client class loader to load JDBC drivers. which makes
    Iceberg couldn't load corresponding class loader.
    
    ### Why are the changes needed?
    Fix: #6623
    
    ### Does this PR introduce _any_ user-facing change?
    no
    
    ### How was this patch tested?
    test locally
---
 .../spark/connector/iceberg/GravitinoIcebergCatalog.java     | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
index ccadc69e49..e6d59c853b 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
@@ -22,6 +22,8 @@ package org.apache.gravitino.spark.connector.iceberg;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.Map;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergPropertiesUtils;
 import org.apache.gravitino.rel.Table;
 import org.apache.gravitino.spark.connector.PropertiesConverter;
@@ -58,6 +60,16 @@ public class GravitinoIcebergCatalog extends BaseCatalog
   @Override
   protected TableCatalog createAndInitSparkCatalog(
       String name, CaseInsensitiveStringMap options, Map<String, String> 
properties) {
+    String jdbcDriver = properties.get(IcebergConstants.GRAVITINO_JDBC_DRIVER);
+    if (StringUtils.isNotBlank(jdbcDriver)) {
+      // If `spark.sql.hive.metastore.jars` is set, Spark will use an isolated 
client class loader
+      // to load JDBC drivers, which makes Iceberg could not find 
corresponding JDBC driver.
+      try {
+        Class.forName(jdbcDriver);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
     String catalogBackendName = 
IcebergPropertiesUtils.getCatalogBackendName(properties);
     Map<String, String> all =
         getPropertiesConverter().toSparkCatalogProperties(options, properties);

Reply via email to