This is an automated email from the ASF dual-hosted git repository.

jshao pushed a commit to branch branch-0.8
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/branch-0.8 by this push:
     new 92600c6b88 [#6623] fix(spark): Load JDBC driver explictly to fix `No 
suitable driver found for  mysql:xx` error (#6673)
92600c6b88 is described below

commit 92600c6b88df4ca298994a07ba48c73477faec8d
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Tue Mar 11 20:12:02 2025 +0800

    [#6623] fix(spark): Load JDBC driver explictly to fix `No suitable driver 
found for  mysql:xx` error (#6673)
    
    ### What changes were proposed in this pull request?
    If user set `spark.sql.hive.metastore.jars` to `path` in spark
    configuration and hive metastore uri is not set explictly, Spark will
    use Isolated client class loader to load JDBC drivers. which makes
    Iceberg couldn't load corresponding class loader.
    
    ### Why are the changes needed?
    Fix: #6623
    
    ### Does this PR introduce _any_ user-facing change?
    no
    
    ### How was this patch tested?
    test locally
    
    Co-authored-by: FANNG <xiaoj...@datastrato.com>
---
 .../spark/connector/iceberg/GravitinoIcebergCatalog.java     | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
index ccadc69e49..e6d59c853b 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java
@@ -22,6 +22,8 @@ package org.apache.gravitino.spark.connector.iceberg;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.Map;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
 import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergPropertiesUtils;
 import org.apache.gravitino.rel.Table;
 import org.apache.gravitino.spark.connector.PropertiesConverter;
@@ -58,6 +60,16 @@ public class GravitinoIcebergCatalog extends BaseCatalog
   @Override
   protected TableCatalog createAndInitSparkCatalog(
       String name, CaseInsensitiveStringMap options, Map<String, String> 
properties) {
+    String jdbcDriver = properties.get(IcebergConstants.GRAVITINO_JDBC_DRIVER);
+    if (StringUtils.isNotBlank(jdbcDriver)) {
+      // If `spark.sql.hive.metastore.jars` is set, Spark will use an isolated 
client class loader
+      // to load JDBC drivers, which makes Iceberg could not find 
corresponding JDBC driver.
+      try {
+        Class.forName(jdbcDriver);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
     String catalogBackendName = 
IcebergPropertiesUtils.getCatalogBackendName(properties);
     Map<String, String> all =
         getPropertiesConverter().toSparkCatalogProperties(options, properties);

Reply via email to