|
14 | 14 | // See the License for the specific language governing permissions and
|
15 | 15 | // limitations under the License.
|
16 | 16 |
|
17 |
| -import { useEffect, useState } from 'react'; |
| 17 | +import { useState } from 'react'; |
18 | 18 | import { getLastKnownConfig } from '../../../config/hueConfig';
|
19 | 19 | import useSaveData from '../useSaveData/useSaveData';
|
20 | 20 | import useQueueProcessor from '../useQueueProcessor/useQueueProcessor';
|
21 | 21 | import {
|
22 | 22 | DEFAULT_CHUNK_SIZE,
|
23 |
| - DEFAULT_CONCURRENT_MAX_CONNECTIONS, |
24 |
| - FileUploadStatus |
| 23 | + DEFAULT_CONCURRENT_MAX_CONNECTIONS |
25 | 24 | } from '../../constants/storageBrowser';
|
26 | 25 | import useLoadData from '../useLoadData/useLoadData';
|
27 | 26 | import { TaskServerResponse, TaskStatus } from '../../../reactComponents/TaskBrowser/TaskBrowser';
|
28 | 27 | import {
|
29 |
| - createChunks, |
30 | 28 | getChunksCompletePayload,
|
| 29 | + getItemProgress, |
| 30 | + getItemsTotalProgress, |
31 | 31 | getChunkItemPayload,
|
32 |
| - getChunkSinglePayload, |
| 32 | + createChunks, |
33 | 33 | getStatusHashMap,
|
34 |
| - getTotalChunk, |
35 |
| - UploadChunkItem, |
36 |
| - UploadItem |
37 |
| -} from './util'; |
38 |
| -import { get } from '../../../api/utils'; |
39 |
| -import { UPLOAD_AVAILABLE_SPACE_URL } from '../../../apps/storageBrowser/api'; |
40 |
| - |
41 |
| -interface UseUploadQueueResponse { |
42 |
| - addFiles: (item: UploadItem[]) => void; |
43 |
| - removeFile: (item: UploadItem) => void; |
| 34 | + addChunkToInProcess, |
| 35 | + isSpaceAvailableInServer, |
| 36 | + isAllChunksOfFileUploaded |
| 37 | +} from './utils'; |
| 38 | +import { |
| 39 | + RegularFile, |
| 40 | + ChunkedFile, |
| 41 | + FileVariables, |
| 42 | + FileStatus, |
| 43 | + ChunkedFilesInProgress |
| 44 | +} from './types'; |
| 45 | + |
| 46 | +interface UseChunkUploadResponse { |
| 47 | + addFiles: (item: RegularFile[]) => void; |
| 48 | + cancelFile: (item: RegularFile['uuid']) => void; |
44 | 49 | isLoading: boolean;
|
45 | 50 | }
|
46 | 51 |
|
47 | 52 | interface ChunkUploadOptions {
|
48 | 53 | concurrentProcess?: number;
|
49 |
| - onStatusUpdate: (item: UploadItem, newStatus: FileUploadStatus) => void; |
| 54 | + updateFileVariables: (itemId: ChunkedFile['uuid'], variables: FileVariables) => void; |
50 | 55 | onComplete: () => void;
|
51 | 56 | }
|
52 | 57 |
|
53 | 58 | const useChunkUpload = ({
|
54 | 59 | concurrentProcess = DEFAULT_CONCURRENT_MAX_CONNECTIONS,
|
55 |
| - onStatusUpdate, |
| 60 | + updateFileVariables, |
56 | 61 | onComplete
|
57 |
| -}: ChunkUploadOptions): UseUploadQueueResponse => { |
| 62 | +}: ChunkUploadOptions): UseChunkUploadResponse => { |
58 | 63 | const config = getLastKnownConfig();
|
59 | 64 | const chunkSize = config?.storage_browser?.file_upload_chunk_size ?? DEFAULT_CHUNK_SIZE;
|
60 |
| - const [processingItem, setProcessingItem] = useState<UploadItem>(); |
61 |
| - const [pendingUploadItems, setPendingUploadItems] = useState<UploadItem[]>([]); |
62 |
| - const [awaitingStatusItems, setAwaitingStatusItems] = useState<UploadItem[]>([]); |
63 |
| - |
64 |
| - const onError = () => { |
65 |
| - if (processingItem) { |
66 |
| - onStatusUpdate(processingItem, FileUploadStatus.Failed); |
67 |
| - setProcessingItem(undefined); |
68 |
| - } |
69 |
| - }; |
70 |
| - |
71 |
| - const onSuccess = (item: UploadItem) => () => { |
72 |
| - setAwaitingStatusItems(prev => [...prev, item]); |
73 |
| - setProcessingItem(undefined); |
74 |
| - }; |
| 65 | + const [filesWaitingFinalStatus, setFilesWaitingFinalStatus] = useState<ChunkedFile['uuid'][]>([]); |
| 66 | + const [filesInProgress, setFilesInProgress] = useState<ChunkedFilesInProgress>({}); |
75 | 67 |
|
76 | 68 | const { save } = useSaveData(undefined, {
|
77 | 69 | postOptions: {
|
78 | 70 | qsEncodeData: false,
|
79 | 71 | headers: { 'Content-Type': 'multipart/form-data' }
|
80 |
| - }, |
81 |
| - onError |
| 72 | + } |
82 | 73 | });
|
83 | 74 |
|
84 |
| - const updateItemStatus = (serverResponse: TaskServerResponse[]) => { |
85 |
| - const statusMap = getStatusHashMap(serverResponse); |
86 |
| - |
87 |
| - const remainingItems = awaitingStatusItems.filter(item => { |
88 |
| - const status = statusMap[item.uuid]; |
89 |
| - if (status === TaskStatus.Success || status === TaskStatus.Failure) { |
90 |
| - const ItemStatus = |
91 |
| - status === TaskStatus.Success ? FileUploadStatus.Uploaded : FileUploadStatus.Failed; |
92 |
| - onStatusUpdate(item, ItemStatus); |
93 |
| - return false; |
| 75 | + const processTaskServerResponse = (response: TaskServerResponse[]) => { |
| 76 | + const statusMap = getStatusHashMap(response); |
| 77 | + setFilesWaitingFinalStatus(prev => { |
| 78 | + const remainingFiles = prev.filter(uuid => { |
| 79 | + const fileStatus = statusMap[uuid]; |
| 80 | + if (fileStatus === TaskStatus.Success || fileStatus === TaskStatus.Failure) { |
| 81 | + const mappedStatus = |
| 82 | + fileStatus === TaskStatus.Success ? FileStatus.Uploaded : FileStatus.Failed; |
| 83 | + updateFileVariables(uuid, { status: mappedStatus }); |
| 84 | + return false; // remove the file as final status is received |
| 85 | + } |
| 86 | + return true; |
| 87 | + }); |
| 88 | + if (remainingFiles.length === 0) { |
| 89 | + onComplete(); |
94 | 90 | }
|
95 |
| - return true; |
| 91 | + return remainingFiles; |
96 | 92 | });
|
97 |
| - if (remainingItems.length === 0) { |
98 |
| - onComplete(); |
99 |
| - } |
100 |
| - setAwaitingStatusItems(remainingItems); |
101 | 93 | };
|
102 | 94 |
|
103 |
| - const { data: tasksStatus } = useLoadData<TaskServerResponse[]>( |
104 |
| - '/desktop/api2/taskserver/get_taskserver_tasks/', |
105 |
| - { |
106 |
| - pollInterval: awaitingStatusItems.length ? 5000 : undefined, |
107 |
| - skip: !awaitingStatusItems.length, |
108 |
| - transformKeys: 'none' |
109 |
| - } |
110 |
| - ); |
| 95 | + useLoadData<TaskServerResponse[]>('/desktop/api2/taskserver/get_taskserver_tasks/', { |
| 96 | + pollInterval: 5000, |
| 97 | + skip: filesWaitingFinalStatus.length === 0, |
| 98 | + onSuccess: processTaskServerResponse, |
| 99 | + transformKeys: 'none' |
| 100 | + }); |
111 | 101 |
|
112 |
| - useEffect(() => { |
113 |
| - if (tasksStatus) { |
114 |
| - updateItemStatus(tasksStatus); |
115 |
| - } |
116 |
| - }, [tasksStatus]); |
117 |
| - |
118 |
| - const onChunksUploadComplete = async () => { |
119 |
| - if (processingItem) { |
120 |
| - const { url, payload } = getChunksCompletePayload(processingItem, chunkSize); |
121 |
| - return save(payload, { |
122 |
| - url, |
123 |
| - onSuccess: onSuccess(processingItem) |
124 |
| - }); |
125 |
| - } |
| 102 | + const handleAllChunksUploaded = (chunk: ChunkedFile) => { |
| 103 | + const { url, payload } = getChunksCompletePayload(chunk); |
| 104 | + return save(payload, { |
| 105 | + url, |
| 106 | + onSuccess: () => setFilesWaitingFinalStatus(prev => [...prev, chunk.uuid]), |
| 107 | + onError: error => updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error }) |
| 108 | + }); |
126 | 109 | };
|
127 | 110 |
|
128 |
| - const uploadChunk = async (chunkItem: UploadChunkItem) => { |
129 |
| - const { url, payload } = getChunkItemPayload(chunkItem, chunkSize); |
130 |
| - return save(payload, { url }); |
| 111 | + const onChunkUploadSuccess = (chunk: ChunkedFile) => () => { |
| 112 | + setFilesInProgress(prev => { |
| 113 | + const isAllChunksUploaded = isAllChunksOfFileUploaded(prev, chunk); |
| 114 | + if (isAllChunksUploaded) { |
| 115 | + handleAllChunksUploaded(chunk); |
| 116 | + delete prev[chunk.uuid]; |
| 117 | + } |
| 118 | + |
| 119 | + return prev; |
| 120 | + }); |
131 | 121 | };
|
132 | 122 |
|
133 |
| - const { enqueue } = useQueueProcessor<UploadChunkItem>(uploadChunk, { |
134 |
| - concurrentProcess, |
135 |
| - onSuccess: onChunksUploadComplete |
136 |
| - }); |
| 123 | + const onUploadProgress = (chunk: ChunkedFile) => (chunkProgress: ProgressEvent) => { |
| 124 | + setFilesInProgress(prev => { |
| 125 | + const allChunks = prev[chunk.uuid] || []; |
| 126 | + const chunk1 = allChunks.find(c => c.chunkNumber === chunk.chunkNumber); |
| 127 | + if (!chunk1) { |
| 128 | + return prev; |
| 129 | + } |
| 130 | + chunk1.progress = getItemProgress(chunkProgress); |
137 | 131 |
|
138 |
| - const uploadItemInChunks = (item: UploadItem) => { |
139 |
| - const chunks = createChunks(item, chunkSize); |
140 |
| - return enqueue(chunks); |
| 132 | + const totalProgress = getItemsTotalProgress(chunk, allChunks); |
| 133 | + updateFileVariables(chunk.uuid, { progress: totalProgress }); |
| 134 | + return { ...prev, [chunk.uuid]: allChunks }; |
| 135 | + }); |
141 | 136 | };
|
142 | 137 |
|
143 |
| - const uploadItemInSingleChunk = async (item: UploadItem) => { |
144 |
| - const { url, payload } = getChunkSinglePayload(item, chunkSize); |
| 138 | + const uploadChunkToServer = async (chunk: ChunkedFile) => { |
| 139 | + const { url, payload } = getChunkItemPayload(chunk); |
145 | 140 | return save(payload, {
|
146 | 141 | url,
|
147 |
| - onSuccess: onSuccess(item) |
| 142 | + onSuccess: onChunkUploadSuccess(chunk), |
| 143 | + onError: error => updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error }), |
| 144 | + postOptions: { onUploadProgress: onUploadProgress(chunk) } |
148 | 145 | });
|
149 | 146 | };
|
150 | 147 |
|
151 |
| - const checkAvailableSpace = async (fileSize: number) => { |
152 |
| - const { upload_available_space: availableSpace } = await get<{ |
153 |
| - upload_available_space: number; |
154 |
| - }>(UPLOAD_AVAILABLE_SPACE_URL); |
155 |
| - return availableSpace >= fileSize; |
156 |
| - }; |
157 |
| - |
158 |
| - const uploadItem = async (item: UploadItem) => { |
159 |
| - const isSpaceAvailable = await checkAvailableSpace(item.file.size); |
160 |
| - if (!isSpaceAvailable) { |
161 |
| - onStatusUpdate(item, FileUploadStatus.Failed); |
162 |
| - return Promise.resolve(); |
| 148 | + const processChunkedFile = async (chunk: ChunkedFile): Promise<void> => { |
| 149 | + const isFirstChunk = !filesInProgress[chunk.uuid]; |
| 150 | + if (isFirstChunk) { |
| 151 | + updateFileVariables(chunk.uuid, { status: FileStatus.Uploading }); |
| 152 | + const isUploadPossible = await isSpaceAvailableInServer(chunk.totalSize); |
| 153 | + if (!isUploadPossible) { |
| 154 | + const error = new Error('Upload server ran out of space. Try again later.'); |
| 155 | + cancelFile(chunk.uuid); |
| 156 | + return updateFileVariables(chunk.uuid, { status: FileStatus.Failed, error }); |
| 157 | + } |
163 | 158 | }
|
| 159 | + setFilesInProgress(prev => addChunkToInProcess(prev, chunk)); |
164 | 160 |
|
165 |
| - onStatusUpdate(item, FileUploadStatus.Uploading); |
166 |
| - const chunks = getTotalChunk(item.file.size, chunkSize); |
167 |
| - if (chunks === 1) { |
168 |
| - return uploadItemInSingleChunk(item); |
169 |
| - } |
170 |
| - return uploadItemInChunks(item); |
| 161 | + return uploadChunkToServer(chunk); |
171 | 162 | };
|
172 | 163 |
|
173 |
| - const addFiles = (newItems: UploadItem[]) => { |
174 |
| - setPendingUploadItems(prev => [...prev, ...newItems]); |
175 |
| - }; |
| 164 | + const { enqueue, dequeue } = useQueueProcessor<ChunkedFile>(processChunkedFile, { |
| 165 | + concurrentProcess |
| 166 | + }); |
176 | 167 |
|
177 |
| - const removeFile = (item: UploadItem) => { |
178 |
| - setPendingUploadItems(prev => prev.filter(i => i.uuid !== item.uuid)); |
| 168 | + const addFiles = (newFiles: RegularFile[]) => { |
| 169 | + newFiles.forEach(file => { |
| 170 | + const chunks = createChunks(file, chunkSize); |
| 171 | + enqueue(chunks); |
| 172 | + }); |
179 | 173 | };
|
180 | 174 |
|
181 |
| - useEffect(() => { |
182 |
| - // Ensures one file is broken down in chunks and uploaded to the server |
183 |
| - if (!processingItem && pendingUploadItems.length) { |
184 |
| - const item = pendingUploadItems[0]; |
185 |
| - setProcessingItem(item); |
186 |
| - setPendingUploadItems(prev => prev.slice(1)); |
187 |
| - uploadItem(item); |
188 |
| - } |
189 |
| - }, [pendingUploadItems, processingItem]); |
| 175 | + const cancelFile = (fileUuid: ChunkedFile['uuid']) => dequeue(fileUuid, 'uuid'); |
190 | 176 |
|
191 |
| - return { addFiles, removeFile, isLoading: !!processingItem || !!pendingUploadItems.length }; |
| 177 | + return { |
| 178 | + addFiles, |
| 179 | + cancelFile, |
| 180 | + isLoading: !!(filesWaitingFinalStatus.length || filesInProgress.length) |
| 181 | + }; |
192 | 182 | };
|
193 | 183 |
|
194 | 184 | export default useChunkUpload;
|
0 commit comments