2021-09-07 15:26:05 +03:00
|
|
|
import * as FileUtilities from "~/common/file-utilities";
|
|
|
|
import * as Logging from "~/common/logging";
|
|
|
|
import * as Actions from "~/common/actions";
|
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
// NOTE(amine): utilities
|
|
|
|
export const getFileKey = ({ lastModified, name }) => `${lastModified}-${name}`;
|
|
|
|
|
2021-09-07 15:26:05 +03:00
|
|
|
let UploadStore = {
|
|
|
|
queue: [],
|
|
|
|
failedFilesCache: {},
|
|
|
|
isUploading: false,
|
2021-09-21 17:53:37 +03:00
|
|
|
uploadedFiles: {},
|
2021-09-07 15:26:05 +03:00
|
|
|
};
|
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
let UploadAbort = {
|
|
|
|
currentUploadingFile: null,
|
|
|
|
abort: null,
|
|
|
|
};
|
2021-09-09 14:21:56 +03:00
|
|
|
|
|
|
|
// NOTE(amine): queue utilities
|
2021-09-15 21:17:33 +03:00
|
|
|
const getUploadQueue = () => UploadStore.queue;
|
|
|
|
const pushToUploadQueue = ({ file, slate, bucket }) =>
|
2021-09-09 14:21:56 +03:00
|
|
|
UploadStore.queue.push({ file, slate, bucket });
|
2021-09-15 21:17:33 +03:00
|
|
|
const resetUploadQueue = () => (UploadStore.queue = []);
|
|
|
|
const removeFromUploadQueue = ({ fileKey }) =>
|
2021-09-09 14:21:56 +03:00
|
|
|
(UploadStore.queue = UploadStore.queue.filter(({ file }) => getFileKey(file) !== fileKey));
|
|
|
|
|
|
|
|
// NOTE(amine): failedFilesCache utilities
|
|
|
|
const storeFileInCache = ({ file, slate, bucketName }) =>
|
|
|
|
(UploadStore.failedFilesCache[getFileKey(file)] = { file, slate, bucketName });
|
2021-09-15 21:17:33 +03:00
|
|
|
const removeFileFromCache = ({ fileKey }) => delete UploadStore.failedFilesCache[fileKey];
|
2021-09-09 14:21:56 +03:00
|
|
|
const getFileFromCache = ({ fileKey }) => UploadStore.failedFilesCache[fileKey] || {};
|
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
// NOTE(amine): UploadAbort utilities
|
|
|
|
const registerFileUploading = ({ fileKey }) => (UploadAbort.currentUploadingFile = fileKey);
|
|
|
|
const resetAbortUploadState = () => (UploadAbort = { currentUploadingFile: null, abort: null });
|
|
|
|
const abortCurrentFileUpload = () => UploadAbort.abort();
|
|
|
|
const canCurrentFileBeAborted = () => UploadAbort.currentUploadingFile && UploadAbort.abort;
|
|
|
|
const isFileCurrentlyUploading = ({ fileKey }) =>
|
|
|
|
fileKey === UploadAbort.currentUploadingFile && UploadAbort.abort;
|
|
|
|
|
2021-09-07 15:26:05 +03:00
|
|
|
// NOTE(amine): upload factory function
|
|
|
|
export function createUploadProvider({
|
|
|
|
onStart,
|
|
|
|
onFinish,
|
|
|
|
onAddedToQueue,
|
|
|
|
onProgress,
|
|
|
|
onSuccess,
|
|
|
|
onError,
|
2021-09-09 14:21:56 +03:00
|
|
|
onCancel,
|
2021-09-07 15:26:05 +03:00
|
|
|
onDuplicate,
|
|
|
|
}) {
|
|
|
|
const scheduleQueueUpload = async () => {
|
2021-09-15 21:17:33 +03:00
|
|
|
const uploadQueue = getUploadQueue();
|
|
|
|
if (UploadStore.isUploading || uploadQueue.length === 0) return;
|
2021-09-07 15:26:05 +03:00
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
const { file, slate, bucketName } = getUploadQueue().shift() || {};
|
2021-09-07 15:26:05 +03:00
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
const fileKey = getFileKey(file);
|
2021-09-07 15:26:05 +03:00
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
UploadStore.isUploading = true;
|
|
|
|
registerFileUploading({ fileKey });
|
2021-09-07 15:26:05 +03:00
|
|
|
|
|
|
|
try {
|
2021-09-09 14:21:56 +03:00
|
|
|
let response = await FileUtilities.upload({
|
2021-09-07 15:26:05 +03:00
|
|
|
file,
|
|
|
|
bucketName,
|
2021-09-15 21:17:33 +03:00
|
|
|
uploadAbort: UploadAbort,
|
2021-09-07 15:26:05 +03:00
|
|
|
onProgress: (e) => onProgress({ fileKey, loaded: e.loaded }),
|
|
|
|
});
|
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
if (!response.aborted) {
|
|
|
|
if (!response || response.error) throw new Error(response);
|
|
|
|
// TODO(amine): merge createFile and upload endpoints
|
|
|
|
let createResponse = await Actions.createFile({ files: [response], slate });
|
|
|
|
if (!createResponse || createResponse.error) throw new Error(response);
|
|
|
|
|
|
|
|
const isDuplicate = createResponse?.data?.skipped > 0;
|
2021-09-21 17:53:37 +03:00
|
|
|
const fileCid = createResponse.data?.cid;
|
2021-09-15 21:17:33 +03:00
|
|
|
if (isDuplicate) {
|
2021-09-21 17:53:37 +03:00
|
|
|
UploadStore.uploadedFiles[fileKey] = true;
|
|
|
|
if (onDuplicate) onDuplicate({ fileKey, cid: fileCid });
|
2021-09-15 21:17:33 +03:00
|
|
|
} else {
|
2021-09-21 17:53:37 +03:00
|
|
|
UploadStore.uploadedFiles[fileKey] = true;
|
|
|
|
if (onSuccess) onSuccess({ fileKey, cid: fileCid });
|
2021-09-15 21:17:33 +03:00
|
|
|
}
|
2021-09-07 15:26:05 +03:00
|
|
|
}
|
|
|
|
} catch (e) {
|
2021-09-09 14:21:56 +03:00
|
|
|
storeFileInCache({ file, slate, bucketName });
|
2021-09-15 21:17:33 +03:00
|
|
|
|
2021-09-07 15:26:05 +03:00
|
|
|
if (onError) onError({ fileKey });
|
|
|
|
Logging.error(e);
|
|
|
|
}
|
|
|
|
|
2021-09-09 14:21:56 +03:00
|
|
|
UploadStore.isUploading = false;
|
2021-09-15 21:17:33 +03:00
|
|
|
resetAbortUploadState();
|
2021-09-09 14:21:56 +03:00
|
|
|
|
2021-09-15 21:17:33 +03:00
|
|
|
const isQueueEmpty = getUploadQueue().length === 0;
|
|
|
|
if (!isQueueEmpty) {
|
2021-09-07 15:26:05 +03:00
|
|
|
scheduleQueueUpload();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (onFinish) onFinish();
|
|
|
|
};
|
|
|
|
|
|
|
|
const addToUploadQueue = ({ files, slate, bucketName }) => {
|
2021-09-09 14:21:56 +03:00
|
|
|
if (!files || !files.length) return;
|
2021-09-07 15:26:05 +03:00
|
|
|
|
|
|
|
for (let i = 0; i < files.length; i++) {
|
2021-09-15 21:17:33 +03:00
|
|
|
const fileKey = getFileKey(files[i]);
|
|
|
|
const doesQueueIncludeFile = getUploadQueue().some(
|
|
|
|
({ file }) => getFileKey(files[i]) === getFileKey(file)
|
|
|
|
);
|
2021-09-21 17:53:37 +03:00
|
|
|
const isUploaded = fileKey in UploadStore.uploadedFiles;
|
|
|
|
// NOTE(amine): skip the file if already uploaded or is in queue
|
|
|
|
if (doesQueueIncludeFile || isUploaded) continue;
|
2021-09-15 21:17:33 +03:00
|
|
|
|
|
|
|
// NOTE(amine): if the added file has failed before, remove it from failedFilesCache
|
|
|
|
if (fileKey in UploadStore.failedFilesCache) removeFileFromCache({ fileKey });
|
|
|
|
|
2021-09-09 14:21:56 +03:00
|
|
|
if (onAddedToQueue) onAddedToQueue(files[i]);
|
2021-09-15 21:17:33 +03:00
|
|
|
pushToUploadQueue({ file: files[i], slate, bucketName });
|
|
|
|
}
|
|
|
|
|
|
|
|
const isQueueEmpty = getUploadQueue().length === 0;
|
|
|
|
if (!UploadStore.isUploading && !isQueueEmpty && onStart) {
|
|
|
|
onStart();
|
2021-09-07 15:26:05 +03:00
|
|
|
scheduleQueueUpload();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-09-09 14:21:56 +03:00
|
|
|
const retry = ({ fileKey }) => {
|
|
|
|
const { file, slate, bucketName } = getFileFromCache({ fileKey });
|
|
|
|
addToUploadQueue({ files: [file], slate, bucketName });
|
|
|
|
};
|
|
|
|
|
|
|
|
const cancel = ({ fileKey }) => {
|
|
|
|
if (onCancel) onCancel({ fileKeys: [fileKey] });
|
2021-09-15 21:17:33 +03:00
|
|
|
|
|
|
|
if (isFileCurrentlyUploading({ fileKey })) {
|
|
|
|
abortCurrentFileUpload();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
removeFromUploadQueue({ fileKey });
|
2021-09-09 14:21:56 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
const cancelAll = () => {
|
2021-09-15 21:17:33 +03:00
|
|
|
const fileKeys = getUploadQueue().map(({ file }) => getFileKey(file));
|
|
|
|
if (onCancel) onCancel({ fileKeys: [UploadAbort.currentUploadingFile, ...fileKeys] });
|
|
|
|
|
|
|
|
if (canCurrentFileBeAborted()) abortCurrentFileUpload();
|
|
|
|
resetUploadQueue();
|
2021-09-09 14:21:56 +03:00
|
|
|
};
|
|
|
|
|
2021-09-07 15:26:05 +03:00
|
|
|
return {
|
|
|
|
upload: addToUploadQueue,
|
2021-09-09 14:21:56 +03:00
|
|
|
retry,
|
|
|
|
cancel,
|
|
|
|
cancelAll,
|
2021-09-07 15:26:05 +03:00
|
|
|
};
|
|
|
|
}
|