Skip to content

Commit 7662a3b

Browse files
committed
Consolidates upload submission logic
Removes the `useUploadQueue` composable and centralizes its responsibilities within `useUploadSubmission` and `useUploadBatchOperations`. This refactoring provides clearer separation of concerns and streamlines the upload process: - Item-level validation moves to `uploadItemTypes.ts`. - Batch-specific operations, including a new `processDirectBatch` for atomic collection creation via `/api/tools/fetch`, are now handled by `useUploadBatchOperations`. - `useUploadSubmission` now orchestrates the entire upload flow, clearly distinguishing between direct collection creation and two-step collection creation (for items like data library copies that require individual processing).
1 parent a15779c commit 7662a3b

8 files changed

Lines changed: 299 additions & 989 deletions

File tree

client/src/components/Panels/Upload/methods/CompositeFileUpload.vue

Lines changed: 60 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,15 @@ import { computed, ref, watch } from "vue";
66
import { useUploadDefaults } from "@/composables/upload/uploadDefaults";
77
import { useUploadStaging } from "@/composables/upload/useUploadStaging";
88
import type { ExtensionDetails } from "@/composables/uploadConfigurations";
9-
import { useUploadQueue } from "@/composables/uploadQueue";
9+
import {
10+
buildPreparedUploadWithOptions,
11+
createFileUploadItem,
12+
createPastedUploadItem,
13+
createUrlUploadItem,
14+
} from "@/utils/upload";
1015
import { mapToCompositeFileUpload } from "@/utils/upload/itemMappers";
1116
12-
import type { UploadMethodComponent, UploadMethodConfig } from "../types";
17+
import type { PreparedUpload, UploadMethodComponent, UploadMethodConfig } from "../types";
1318
import type { CompositeFileItem, CompositeSlot } from "../types/uploadItem";
1419
1520
import UploadTableDbKeyCell from "../shared/UploadTableDbKeyCell.vue";
@@ -30,7 +35,6 @@ const emit = defineEmits<{
3035
(e: "ready", ready: boolean): void;
3136
}>();
3237
33-
const uploadQueue = useUploadQueue();
3438
const { compositeExtensions, listDbKeys, configurationsReady, defaultDbKey } = useUploadDefaults();
3539
3640
const compositeItems = ref<CompositeFileItem[]>([]);
@@ -146,20 +150,68 @@ function clearAll() {
146150
clearStaging();
147151
}
148152
149-
function startUpload() {
153+
function prepareUpload(): PreparedUpload | null {
150154
const item = currentItem.value;
151155
if (!item) {
152-
return;
156+
return null;
157+
}
158+
159+
const uploadItem = mapToCompositeFileUpload(item, props.targetHistoryId);
160+
const baseOptions = {
161+
dbkey: uploadItem.dbkey,
162+
ext: uploadItem.extension,
163+
space_to_tab: uploadItem.spaceToTab,
164+
to_posix_lines: uploadItem.toPosixLines,
165+
deferred: false,
166+
};
167+
168+
const apiItems = uploadItem.slots
169+
.filter((slot) => slot.src !== "files" || !!slot.file)
170+
.map((slot) => {
171+
const slotOptions = {
172+
name: slot.slotName,
173+
...baseOptions,
174+
};
175+
176+
if (slot.src === "files") {
177+
return createFileUploadItem(slot.file!, uploadItem.targetHistoryId, {
178+
...slotOptions,
179+
size: slot.fileSize ?? slot.file!.size,
180+
});
181+
}
182+
183+
if (slot.src === "url") {
184+
return createUrlUploadItem(slot.url ?? "", uploadItem.targetHistoryId, {
185+
...slotOptions,
186+
size: slot.fileSize ?? 0,
187+
});
188+
}
189+
190+
return createPastedUploadItem(slot.content ?? "", uploadItem.targetHistoryId, {
191+
...slotOptions,
192+
size: slot.fileSize ?? (slot.content ?? "").length,
193+
});
194+
});
195+
196+
if (apiItems.length === 0) {
197+
return null;
153198
}
154199
155-
const queueItem = mapToCompositeFileUpload(item, props.targetHistoryId);
156-
uploadQueue.enqueue([queueItem]);
200+
return buildPreparedUploadWithOptions([uploadItem], undefined, {
201+
apiItems,
202+
uploadOptions: {
203+
composite: true,
204+
compositeName: uploadItem.name,
205+
},
206+
});
207+
}
157208
209+
function reset() {
158210
compositeItems.value = [];
159211
clearStaging();
160212
}
161213
162-
defineExpose<UploadMethodComponent>({ startUpload });
214+
defineExpose<UploadMethodComponent>({ prepareUpload, reset });
163215
</script>
164216

165217
<template>

client/src/components/Panels/Upload/types.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,13 @@ export interface PreparedUpload {
115115
collectionConfig?: UploadCollectionConfig;
116116
/** Items used for progress tracking in the upload state store. */
117117
uploadItems?: NewUploadItem[];
118+
/** Optional upload behavior flags forwarded to uploadDatasets. */
119+
uploadOptions?: {
120+
/** Treat multiple API items as one composite dataset. */
121+
composite?: boolean;
122+
/** Display name for the composite dataset. */
123+
compositeName?: string;
124+
};
118125
}
119126

120127
/**

client/src/composables/upload/uploadItemTypes.ts

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,46 @@ export type UploadItem = NewUploadItem & UploadState;
128128
/** Sources returned for uploaded history contents. */
129129
export type UploadedDatasetSource = Extract<HistoryContentSource, "hda" | "hdca">;
130130

131+
/**
132+
* Validates a UI upload item before submission.
133+
* Returns an error message if invalid, undefined if valid.
134+
*/
135+
export function validateUploadItem(item: NewUploadItem): string | undefined {
136+
switch (item.uploadMode) {
137+
case "local-file":
138+
if (!item.fileData) {
139+
return `No file selected for "${item.name}"`;
140+
}
141+
if (item.fileData.size === 0) {
142+
return `File "${item.name}" is empty`;
143+
}
144+
break;
145+
146+
case "paste-content":
147+
if (!item.content || item.content.trim().length === 0) {
148+
return `No content provided for "${item.name}"`;
149+
}
150+
break;
151+
152+
case "paste-links":
153+
case "remote-files":
154+
if (!item.url || item.url.trim().length === 0) {
155+
return `No URL provided for "${item.name}"`;
156+
}
157+
break;
158+
159+
case "data-library":
160+
if (!item.lddaId) {
161+
return `No library dataset ID provided for "${item.name}"`;
162+
}
163+
break;
164+
165+
default:
166+
return `Unknown upload mode: ${(item as NewUploadItem).uploadMode}`;
167+
}
168+
return undefined;
169+
}
170+
131171
/**
132172
* Represents a dataset that was successfully uploaded.
133173
*/

client/src/composables/upload/useUploadBatchOperations.ts

Lines changed: 95 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
11
import { createHistoryDatasetCollectionInstanceFull } from "@/api/datasetCollections";
22
import { useUploadState } from "@/components/Panels/Upload/uploadState";
33
import { buildCollectionElements } from "@/composables/upload/collectionElements";
4-
import type { UploadItem } from "@/composables/upload/uploadItemTypes";
4+
import type { NewUploadItem, UploadItem } from "@/composables/upload/uploadItemTypes";
5+
import { validateUploadItem } from "@/composables/upload/uploadItemTypes";
6+
import { useHistoryStore } from "@/stores/historyStore";
7+
import { getHistoryUploadActionErrorMessage, getHistoryUploadBlockReason } from "@/utils/historyUpload";
58
import { errorMessageAsString } from "@/utils/simple-error";
9+
import { toApiUploadItem, uploadCollectionDatasets } from "@/utils/upload";
610

711
interface UploadBatchOperationsOptions {
812
autoRecover?: boolean;
@@ -17,42 +21,58 @@ export function useUploadBatchOperations(options: UploadBatchOperationsOptions =
1721
return uploadState.activeItems.value.find((item) => item.id === id);
1822
}
1923

24+
async function validateTargetHistory(targetHistoryId: string): Promise<string | null> {
25+
const historyStore = useHistoryStore();
26+
let history = historyStore.getHistoryById(targetHistoryId, false) ?? null;
27+
if (!history) {
28+
await historyStore.loadHistoryById(targetHistoryId);
29+
history = historyStore.getHistoryById(targetHistoryId, false) ?? null;
30+
}
31+
if (!history) {
32+
return null;
33+
}
34+
const blockReason = getHistoryUploadBlockReason(history);
35+
return blockReason ? getHistoryUploadActionErrorMessage(blockReason) : null;
36+
}
37+
2038
/**
2139
* Creates a dataset collection from uploaded datasets.
2240
*
2341
* @param batchId - Batch ID in upload state
42+
* @throws {Error} If the batch is not found, has missing data, or collection creation fails
2443
*/
2544
async function createCollection(batchId: string): Promise<void> {
2645
const batch = uploadState.getBatch(batchId);
2746
if (!batch) {
28-
console.error(`Batch not found: ${batchId}`);
29-
return;
47+
const errorMsg = `Batch not found: ${batchId}`;
48+
console.error(errorMsg);
49+
throw new Error(errorMsg);
3050
}
3151

3252
if (batch.collectionId) {
3353
return;
3454
}
3555

3656
if (!batch.datasetIds || batch.datasetIds.length === 0) {
37-
uploadState.setBatchError(batchId, "No dataset IDs available for collection creation");
38-
return;
57+
const errorMsg = "No dataset IDs available for collection creation";
58+
uploadState.setBatchError(batchId, errorMsg);
59+
throw new Error(errorMsg);
3960
}
4061

4162
const items = batch.uploadIds
4263
.map((id) => findUploadItem(id))
4364
.filter((item): item is UploadItem => item !== undefined);
4465

4566
if (items.length === 0) {
46-
uploadState.setBatchError(batchId, "No upload items available for collection creation");
47-
return;
67+
const errorMsg = "No upload items available for collection creation";
68+
uploadState.setBatchError(batchId, errorMsg);
69+
throw new Error(errorMsg);
4870
}
4971

5072
if (items.length !== batch.uploadIds.length) {
51-
uploadState.setBatchError(
52-
batchId,
53-
`Cannot create collection: only ${items.length} of ${batch.uploadIds.length} upload items found. This can happen after a page refresh. Please re-upload the files or manually create the collection.`,
54-
);
55-
return;
73+
const errorMsg = `Cannot create collection: only ${items.length} of ${batch.uploadIds.length} upload items found. This can happen after a page refresh. Please re-upload the files or manually create the collection.`;
74+
uploadState.setBatchError(batchId, errorMsg);
75+
throw new Error(errorMsg);
5676
}
5777

5878
uploadState.updateBatchStatus(batchId, "creating-collection");
@@ -88,6 +108,68 @@ export function useUploadBatchOperations(options: UploadBatchOperationsOptions =
88108
item.error = "Uploaded successfully, but collection creation failed";
89109
}
90110
});
111+
112+
throw new Error(errorMsg);
113+
}
114+
}
115+
116+
/**
117+
* Processes a collection batch using the direct HDCA creation path.
118+
* All items are uploaded together in one /api/tools/fetch request that
119+
* creates the collection atomically — no separate collection creation step.
120+
*
121+
* Used for non-library batches where all items can be fed to the upload API directly.
122+
*/
123+
async function processDirectBatch(batchId: string, ids: string[], items: NewUploadItem[]): Promise<void> {
124+
const batch = uploadState.getBatch(batchId);
125+
if (!batch) {
126+
console.error(`Batch not found: ${batchId}`);
127+
return;
128+
}
129+
130+
ids.forEach((id) => uploadState.setStatus(id, "uploading"));
131+
uploadState.updateBatchStatus(batchId, "uploading");
132+
133+
try {
134+
const historyError = await validateTargetHistory(batch.historyId);
135+
if (historyError) {
136+
throw new Error(historyError);
137+
}
138+
139+
for (const item of items) {
140+
const validationError = validateUploadItem(item);
141+
if (validationError) {
142+
throw new Error(validationError);
143+
}
144+
}
145+
146+
const apiItems = items.map((item) => toApiUploadItem(item));
147+
148+
await uploadCollectionDatasets(
149+
apiItems,
150+
{
151+
collectionName: batch.name,
152+
collectionType: batch.type,
153+
},
154+
{
155+
progress: (percentage) => {
156+
ids.forEach((id) => uploadState.updateProgress(id, percentage));
157+
},
158+
success: () => {
159+
ids.forEach((id) => uploadState.updateProgress(id, 100));
160+
uploadState.updateBatchStatus(batchId, "completed");
161+
},
162+
error: (err) => {
163+
const errorMsg = errorMessageAsString(err);
164+
ids.forEach((id) => uploadState.setError(id, errorMsg));
165+
uploadState.setBatchError(batchId, errorMsg);
166+
},
167+
},
168+
);
169+
} catch (err) {
170+
const errorMsg = errorMessageAsString(err);
171+
ids.forEach((id) => uploadState.setError(id, errorMsg));
172+
uploadState.setBatchError(batchId, errorMsg);
91173
}
92174
}
93175

@@ -166,6 +248,7 @@ export function useUploadBatchOperations(options: UploadBatchOperationsOptions =
166248
clearAll,
167249
clearCompleted,
168250
createCollection,
251+
processDirectBatch,
169252
recoverIncompleteBatches,
170253
retryCollectionCreation,
171254
};

0 commit comments

Comments
 (0)