This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 65af89257c0 branch-3.0: [fix](hive) Incorrect location conversion 
#46362 (#46515)
65af89257c0 is described below

commit 65af89257c06d4963eef5068dfe049b270635bd2
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Fri Jan 10 13:08:28 2025 +0800

    branch-3.0: [fix](hive) Incorrect location conversion #46362 (#46515)
    
    Cherry-picked from #46362
    
    Co-authored-by: wuwenchi <wuwen...@selectdb.com>
---
 .../org/apache/doris/planner/HiveTableSink.java    |   2 +-
 .../apache/doris/planner/HiveTableSinkTest.java    | 145 +++++++++++++++++++++
 2 files changed, 146 insertions(+), 1 deletion(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/HiveTableSink.java 
b/fe/fe-core/src/main/java/org/apache/doris/planner/HiveTableSink.java
index 168f92c113c..3635d10633f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/planner/HiveTableSink.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/planner/HiveTableSink.java
@@ -128,7 +128,7 @@ public class HiveTableSink extends 
BaseExternalTableDataSink {
         TFileType fileType = locationPath.getTFileTypeForBE();
         if (fileType == TFileType.FILE_S3) {
             locationParams.setWritePath(storageLocation);
-            locationParams.setOriginalWritePath(location);
+            locationParams.setOriginalWritePath(sd.getLocation());
             locationParams.setTargetPath(location);
             if (insertCtx.isPresent()) {
                 HiveInsertCommandContext context = (HiveInsertCommandContext) 
insertCtx.get();
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/planner/HiveTableSinkTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/planner/HiveTableSinkTest.java
new file mode 100644
index 00000000000..8794a56eac9
--- /dev/null
+++ b/fe/fe-core/src/test/java/org/apache/doris/planner/HiveTableSinkTest.java
@@ -0,0 +1,145 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.doris.planner;
+
+import org.apache.doris.catalog.Column;
+import org.apache.doris.catalog.PrimitiveType;
+import org.apache.doris.common.AnalysisException;
+import org.apache.doris.datasource.hive.HMSCachedClient;
+import org.apache.doris.datasource.hive.HMSExternalCatalog;
+import org.apache.doris.datasource.hive.HMSExternalDatabase;
+import org.apache.doris.datasource.hive.HMSExternalTable;
+import org.apache.doris.datasource.hive.ThriftHMSCachedClient;
+
+import mockit.Mock;
+import mockit.MockUp;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.junit.Test;
+import org.locationtech.jts.util.Assert;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+
+public class HiveTableSinkTest {
+
+    @Test
+    public void testBindDataSink() throws AnalysisException {
+
+        new MockUp<ThriftHMSCachedClient>() {
+            @Mock
+            List<Partition> listPartitions(String dbName, String tblName) {
+                return new ArrayList<Partition>() {{
+                        add(new Partition() {{
+                                setValues(new ArrayList<String>() {{
+                                        add("a");
+                                    }
+                                });
+                                setSd(new StorageDescriptor() {{
+                                        setInputFormat("orc");
+                                    }
+                                });
+                            }
+                        });
+                    }
+                };
+            }
+        };
+
+        new MockUp<HMSExternalCatalog>() {
+            @Mock
+            public HMSCachedClient getClient() {
+                return new ThriftHMSCachedClient(null, 2);
+            }
+        };
+
+        ArrayList<String> locations = new ArrayList<String>() {{
+                add("gs://abc/def");
+                add("s3://abc/def");
+                add("s3a://abc/def");
+                add("s3n://abc/def");
+                add("bos://abc/def");
+                add("oss://abc/def");
+                add("cos://abc/def");
+            }
+        };
+        for (String location : locations) {
+            mockDifferLocationTable(location);
+
+            HMSExternalCatalog hmsExternalCatalog = new HMSExternalCatalog();
+            hmsExternalCatalog.setInitialized(true);
+            HMSExternalDatabase db = new 
HMSExternalDatabase(hmsExternalCatalog, 10000, "hive_db1", "hive_db1");
+            HMSExternalTable tbl = new HMSExternalTable(10001, "hive_tbl1", 
"hive_db1", hmsExternalCatalog, db);
+            HiveTableSink hiveTableSink = new HiveTableSink(tbl);
+            hiveTableSink.bindDataSink(Optional.empty());
+
+            
Assert.equals(hiveTableSink.tDataSink.hive_table_sink.location.original_write_path,
 location);
+        }
+    }
+
+    private void mockDifferLocationTable(String location) {
+        new MockUp<HMSExternalTable>() {
+            @Mock
+            public Set<String> getPartitionColumnNames() {
+                return new HashSet<String>() {{
+                        add("a");
+                        add("b");
+                    }
+                };
+            }
+
+            @Mock
+            public List<Column> getColumns() {
+                Column a = new Column("a", PrimitiveType.INT);
+                Column b = new Column("b", PrimitiveType.INT);
+                return new ArrayList<Column>() {{
+                        add(a);
+                        add(b);
+                    }
+                };
+            }
+
+            @Mock
+            public org.apache.hadoop.hive.metastore.api.Table getRemoteTable() 
{
+                Table table = new Table();
+                table.setSd(new StorageDescriptor() {{
+                        setInputFormat("orc");
+                        setBucketCols(new ArrayList<>());
+                        setNumBuckets(1);
+                        setSerdeInfo(new SerDeInfo() {{
+                                setParameters(new HashMap<>());
+                            }
+                        });
+                        setLocation(location);
+                    }
+                });
+                table.setParameters(new HashMap<String, String>() {{
+                        put("orc.compress", "lzo");
+                    }
+                });
+                return table;
+            }
+        };
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to