This is an automated email from the ASF dual-hosted git repository.
dataroaring pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 50f10d16df6 (cloud-merge) Fix the is not same in invertedIndex and
catalog (#33183)
50f10d16df6 is described below
commit 50f10d16df632deaae3b57475c585fbadd629539
Author: Lightman <[email protected]>
AuthorDate: Thu Apr 4 10:09:59 2024 +0800
(cloud-merge) Fix the is not same in invertedIndex and catalog (#33183)
---
.../src/main/java/org/apache/doris/alter/CloudRollupJobV2.java | 4 +++-
.../main/java/org/apache/doris/alter/CloudSchemaChangeJobV2.java | 7 ++++++-
2 files changed, 9 insertions(+), 2 deletions(-)
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/alter/CloudRollupJobV2.java
b/fe/fe-core/src/main/java/org/apache/doris/alter/CloudRollupJobV2.java
index 71ba5ef35cf..2a7a2bc2f55 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/CloudRollupJobV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/CloudRollupJobV2.java
@@ -62,7 +62,9 @@ public class CloudRollupJobV2 extends RollupJobV2 {
job.write(dos);
ByteArrayInputStream bais = new
ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
- return CloudRollupJobV2.read(dis);
+ CloudRollupJobV2 ret = (CloudRollupJobV2) CloudRollupJobV2.read(dis);
+ ret.partitionIdToRollupIndex = job.partitionIdToRollupIndex;
+ return ret;
}
private CloudRollupJobV2() {}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/alter/CloudSchemaChangeJobV2.java
b/fe/fe-core/src/main/java/org/apache/doris/alter/CloudSchemaChangeJobV2.java
index 8e3a198c2a4..4f474d188d4 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/alter/CloudSchemaChangeJobV2.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/alter/CloudSchemaChangeJobV2.java
@@ -55,12 +55,17 @@ public class CloudSchemaChangeJobV2 extends
SchemaChangeJobV2 {
private static final Logger LOG =
LogManager.getLogger(SchemaChangeJobV2.class);
public static AlterJobV2 buildCloudSchemaChangeJobV2(SchemaChangeJobV2
job) throws IOException {
+ // deep copy to save repeated assignments
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
job.write(dos);
ByteArrayInputStream bais = new
ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
- return CloudSchemaChangeJobV2.read(dis);
+ // partitionIndexMap cannot be deep-copied because it is referenced
+ // by `SchemaChangeJobV2#addShadowIndexToCatalog` and
`SchemaChangeHandler.createJob`
+ CloudSchemaChangeJobV2 ret = (CloudSchemaChangeJobV2)
CloudSchemaChangeJobV2.read(dis);
+ ret.partitionIndexMap = job.partitionIndexMap;
+ return ret;
}
public CloudSchemaChangeJobV2(String rawSql, long jobId, long dbId, long
tableId,
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]