diff --git a/desktop/core/src/desktop/settings.py b/desktop/core/src/desktop/settings.py index 5229dc5d8bb..6823578c990 100644 --- a/desktop/core/src/desktop/settings.py +++ b/desktop/core/src/desktop/settings.py @@ -660,7 +660,7 @@ def is_oidc_configured(): file_upload_handlers = [] if is_chunked_fileuploader_enabled(): file_upload_handlers = [ - 'hadoop.fs.upload.FineUploaderChunkedUploadHandler', + 'hadoop.fs.upload.CustomDocumentsUploadHandler', 'django.core.files.uploadhandler.MemoryFileUploadHandler', 'django.core.files.uploadhandler.TemporaryFileUploadHandler', ] diff --git a/desktop/core/src/desktop/urls.py b/desktop/core/src/desktop/urls.py index 33d7d68939f..d1daad6a061 100644 --- a/desktop/core/src/desktop/urls.py +++ b/desktop/core/src/desktop/urls.py @@ -165,7 +165,7 @@ re_path(r'^desktop/api2/user_preferences(?:/(?P\w+))?/?$', desktop_api2.user_preferences, name="desktop.api2.user_preferences"), re_path(r'^desktop/api2/doc/export/?$', desktop_api2.export_documents), - re_path(r'^desktop/api2/doc/import/?$', desktop_api2.import_documents), + re_path(r'^desktop/api2/doc/import/?$', desktop_api2.import_documents, name='import_documents'), re_path(r'^desktop/api2/gist/create/?$', desktop_api2.gist_create), re_path(r'^desktop/api2/gist/open/?$', desktop_api2.gist_get), diff --git a/desktop/libs/hadoop/src/hadoop/fs/upload.py b/desktop/libs/hadoop/src/hadoop/fs/upload.py index d97f9eed767..271f8c7d46d 100644 --- a/desktop/libs/hadoop/src/hadoop/fs/upload.py +++ b/desktop/libs/hadoop/src/hadoop/fs/upload.py @@ -28,6 +28,7 @@ from builtins import object from django.core.files.uploadhandler import FileUploadHandler, SkipFile, StopFutureHandlers, StopUpload, UploadFileException +from django.urls import reverse from django.utils.translation import gettext as _ import hadoop.cluster @@ -221,6 +222,43 @@ def close(self): self._file.close() +class CustomDocumentsUploadHandler(FileUploadHandler): + """ + Delegates the upload handling based on the request URL. + + When the request URL starts with "/desktop/api2/doc/import" (indicating a document + import), delegate all processing to HDFSfileUploadHandler. + Otherwise, delegate to FineUploaderChunkedUploadHandler. + """ + + def __init__(self, request, *args, **kwargs): + super().__init__(request, *args, **kwargs) + import_path = reverse('import_documents') + + if request.path.startswith(import_path): + self.delegate = HDFSfileUploadHandler(request) + else: + self.delegate = FineUploaderChunkedUploadHandler(request, *args, **kwargs) + + def new_file(self, field_name, file_name, *args, **kwargs): + try: + if hasattr(self.delegate, 'new_file'): + result = self.delegate.new_file(field_name, file_name, *args, **kwargs) + except StopFutureHandlers: + result = None + return result + + def receive_data_chunk(self, raw_data, start): + if hasattr(self.delegate, 'receive_data_chunk'): + return self.delegate.receive_data_chunk(raw_data, start) + return raw_data + + def file_complete(self, file_size): + if hasattr(self.delegate, 'file_complete'): + return self.delegate.file_complete(file_size) + return None + + class FineUploaderChunkedUploadHandler(FileUploadHandler): """ A custom file upload handler for handling chunked uploads using FineUploader.