This is an automated email from the ASF dual-hosted git repository.
linxinyuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/texera.git
The following commit(s) were added to refs/heads/master by this push:
new f836a1e966 fix(ui): improve upload behavior to prevent progress
confusion (#3658)
f836a1e966 is described below
commit f836a1e966ab15189755899754bba8125a962fb1
Author: Xuan Gu <[email protected]>
AuthorDate: Wed Aug 13 17:18:50 2025 -0700
fix(ui): improve upload behavior to prevent progress confusion (#3658)
### **Purpose**
This PR improves the file upload UI to prevent progress confusion and
provide smoother progress bar updates. It now shows only the latest task
for each file, tracks chunk progress independently, and removes canceled
uploads from the list while cleaning up subscriptions.
### **Changes**
- dataset-detail.component.ts
- Before starting a new upload, cancel and remove any existing task with
the same file path
- Update task list filtering to replace old entries for the same file
- dataset.service.ts
- Track chunk progress independently for smoother progress bar updates
- Cap displayed upload progress at 99% until finalization to avoid
confusion from showing 100% before the process is truly complete
### **Demonstration**
**Before - same file could be displayed twice:**
<img width="388" height="512" alt="prev"
src="https://github.com/user-attachments/assets/a8a27401-dcfe-43c8-b340-617f4ca8b1df"
/>
**After – behavior with this PR:**
1. Start uploading a file
2. Cancel midway
3. Re-upload the same file (old entry is replaced)
https://github.com/user-attachments/assets/36be1dd3-6bcf-4d16-bef7-a000bb5917be
---
.../dataset-detail.component.ts | 21 +++++-
.../service/user/dataset/dataset.service.ts | 81 ++++++++++++----------
2 files changed, 61 insertions(+), 41 deletions(-)
diff --git
a/core/gui/src/app/dashboard/component/user/user-dataset/user-dataset-explorer/dataset-detail.component.ts
b/core/gui/src/app/dashboard/component/user/user-dataset/user-dataset-explorer/dataset-detail.component.ts
index fb40c946bc..5329037b08 100644
---
a/core/gui/src/app/dashboard/component/user/user-dataset/user-dataset-explorer/dataset-detail.component.ts
+++
b/core/gui/src/app/dashboard/component/user/user-dataset/user-dataset-explorer/dataset-detail.component.ts
@@ -40,6 +40,7 @@ import { DatasetStagedObject } from
"../../../../../common/type/dataset-staged-o
import { NzModalService } from "ng-zorro-antd/modal";
import { UserDatasetVersionCreatorComponent } from
"./user-dataset-version-creator/user-dataset-version-creator.component";
import { AdminSettingsService } from
"../../../../service/admin/settings/admin-settings.service";
+import { Subscription } from "rxjs";
export const THROTTLE_TIME_MS = 1000;
@@ -80,6 +81,7 @@ export class DatasetDetailComponent implements OnInit {
// Uploading setting
chunkSizeMB: number = 50;
maxConcurrentChunks: number = 10;
+ private uploadSubscriptions = new Map<string, Subscription>();
// List of upload tasks – each task tracked by its filePath
public uploadTasks: Array<
@@ -328,7 +330,12 @@ export class DatasetDetailComponent implements OnInit {
onNewUploadFilesChanged(files: FileUploadItem[]) {
if (this.did) {
files.forEach((file, idx) => {
- // Add an initializing task placeholder to uploadTasks.
+ // Cancel any existing upload for the same file to prevent progress
confusion
+ this.uploadSubscriptions.get(file.name)?.unsubscribe();
+ this.uploadSubscriptions.delete(file.name);
+ this.uploadTasks = this.uploadTasks.filter(t => t.filePath !==
file.name);
+
+ // Add an initializing task placeholder to uploadTasks
this.uploadTasks.push({
filePath: file.name,
percentage: 0,
@@ -337,7 +344,7 @@ export class DatasetDetailComponent implements OnInit {
physicalAddress: "",
});
// Start multipart upload
- this.datasetService
+ const subscription = this.datasetService
.multipartUpload(
this.datasetName,
file.name,
@@ -388,16 +395,19 @@ export class DatasetDetailComponent implements OnInit {
}
},
});
+ // Store the subscription for later cleanup
+ this.uploadSubscriptions.set(file.name, subscription);
});
}
}
- // Hide a task row after 3s (stores timer to clear on destroy)
+ // Hide a task row after 3s (stores timer to clear on destroy) and clean up
its subscription
private scheduleHide(idx: number) {
if (idx === -1) {
return;
}
const key = this.uploadTasks[idx].filePath;
+ this.uploadSubscriptions.delete(key);
const handle = window.setTimeout(() => {
this.uploadTasks = this.uploadTasks.filter(t => t.filePath !== key);
}, 3000);
@@ -405,6 +415,11 @@ export class DatasetDetailComponent implements OnInit {
}
onClickAbortUploadProgress(task: MultipartUploadProgress & { filePath:
string }) {
+ const subscription = this.uploadSubscriptions.get(task.filePath);
+ if (subscription) {
+ subscription.unsubscribe();
+ this.uploadSubscriptions.delete(task.filePath);
+ }
this.datasetService
.finalizeMultipartUpload(
this.datasetName,
diff --git a/core/gui/src/app/dashboard/service/user/dataset/dataset.service.ts
b/core/gui/src/app/dashboard/service/user/dataset/dataset.service.ts
index 333be35fa3..7a239a0df8 100644
--- a/core/gui/src/app/dashboard/service/user/dataset/dataset.service.ts
+++ b/core/gui/src/app/dashboard/service/user/dataset/dataset.service.ts
@@ -169,12 +169,11 @@ export class DatasetService {
): Observable<MultipartUploadProgress> {
const partCount = Math.ceil(file.size / partSize);
- // track progress bar
- let totalBytesUploaded = 0;
- let lastReportedProgress = 0;
-
return new Observable(observer => {
- this.initiateMultipartUpload(datasetName, filePath, partCount)
+ // Track upload progress for each part independently
+ const partProgress = new Map<number, number>();
+
+ const subscription = this.initiateMultipartUpload(datasetName, filePath,
partCount)
.pipe(
switchMap(initiateResponse => {
const { uploadId, presignedUrls, physicalAddress } =
initiateResponse;
@@ -197,6 +196,7 @@ export class DatasetService {
return from(presignedUrls).pipe(
// 2) Use mergeMap with concurrency limit to upload chunk by
chunk
mergeMap((url, index) => {
+ const partNumber = index + 1;
const start = index * partSize;
const end = Math.min(start + partSize, file.size);
const chunk = file.slice(start, end);
@@ -207,20 +207,21 @@ export class DatasetService {
xhr.upload.addEventListener("progress", event => {
if (event.lengthComputable) {
- const currentTotalUploaded = totalBytesUploaded +
event.loaded;
- const currentProgress = (currentTotalUploaded /
file.size) * 100;
-
- // Prevent backward progress
- if (currentProgress > lastReportedProgress) {
- lastReportedProgress = currentProgress;
- observer.next({
- filePath,
- percentage: Math.round(currentProgress),
- status: "uploading",
- uploadId,
- physicalAddress,
- });
- }
+ // Update this specific part's progress
+ partProgress.set(partNumber, event.loaded);
+
+ // Calculate total progress across all parts
+ let totalUploaded = 0;
+ partProgress.forEach(bytes => (totalUploaded += bytes));
+ const percentage = Math.round((totalUploaded /
file.size) * 100);
+
+ observer.next({
+ filePath,
+ percentage: Math.min(percentage, 99), // Cap at 99%
until finalized
+ status: "uploading",
+ uploadId,
+ physicalAddress,
+ });
}
});
@@ -228,33 +229,36 @@ export class DatasetService {
if (xhr.status === 200 || xhr.status === 201) {
const etag =
xhr.getResponseHeader("ETag")?.replace(/"/g, "");
if (!etag) {
- partObserver.error(new Error(`Missing ETag for part
${index + 1}`));
+ partObserver.error(new Error(`Missing ETag for part
${partNumber}`));
return;
}
- totalBytesUploaded += chunk.size;
- uploadedParts.push({ PartNumber: index + 1, ETag: etag
});
-
- const finalProgress = (totalBytesUploaded / file.size) *
100;
-
- // Prevent backward progress
- if (finalProgress > lastReportedProgress) {
- lastReportedProgress = finalProgress;
- observer.next({
- filePath,
- percentage: Math.round(finalProgress),
- status: "uploading",
- uploadId,
- physicalAddress,
- });
- }
+
+ // Mark this part as fully uploaded
+ partProgress.set(partNumber, chunk.size);
+ uploadedParts.push({ PartNumber: partNumber, ETag: etag
});
+
+ // Recalculate progress
+ let totalUploaded = 0;
+ partProgress.forEach(bytes => (totalUploaded += bytes));
+ const percentage = Math.round((totalUploaded /
file.size) * 100);
+
+ observer.next({
+ filePath,
+ percentage: Math.min(percentage, 99),
+ status: "uploading",
+ uploadId,
+ physicalAddress,
+ });
partObserver.complete();
} else {
- partObserver.error(new Error(`Failed to upload part
${index + 1}`));
+ partObserver.error(new Error(`Failed to upload part
${partNumber}`));
}
});
xhr.addEventListener("error", () => {
- partObserver.error(new Error(`Failed to upload part
${index + 1}`));
+ // Remove failed part from progress
+ partProgress.delete(partNumber);
+ partObserver.error(new Error(`Failed to upload part
${partNumber}`));
});
xhr.open("PUT", url);
@@ -303,6 +307,7 @@ export class DatasetService {
.subscribe({
error: (err: unknown) => observer.error(err),
});
+ return () => subscription.unsubscribe();
});
}