diff --git a/frontend/src/components/file-uploader/file-uploader.js b/frontend/src/components/file-uploader/file-uploader.js index c6728478b50..6c296b2acb1 100644 --- a/frontend/src/components/file-uploader/file-uploader.js +++ b/frontend/src/components/file-uploader/file-uploader.js @@ -84,6 +84,7 @@ class FileUploader extends React.Component { if (this.props.dragAndDrop === true) { this.resumable.enableDropOnDocument(); } + this.resumable.opts.target = seafileAPI.server + '/api2/repos/' + this.props.repoID + '/upload-file/'; this.bindCallbackHandler(); this.bindEventHandler(); @@ -200,27 +201,13 @@ class FileUploader extends React.Component { }); } else { this.setUploadFileList(this.resumable.files); - let { repoID, path } = this.props; - seafileAPI.getFileServerUploadLink(repoID, path).then(res => { - this.resumable.opts.target = res.data + '?ret-json=1'; - this.resumableUpload(resumableFile); - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); - }); + this.resumableUpload(resumableFile); } } else { this.setUploadFileList(this.resumable.files); if (!this.isUploadLinkLoaded) { this.isUploadLinkLoaded = true; - let { repoID, path } = this.props; - seafileAPI.getFileServerUploadLink(repoID, path).then(res => { - this.resumable.opts.target = res.data + '?ret-json=1'; - this.resumable.upload(); - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); - }); + this.resumable.upload(); } } }; @@ -555,50 +542,35 @@ class FileUploader extends React.Component { }; onUploadRetry = (resumableFile) => { + let retryFileList = this.state.retryFileList.filter(item => { + return item.uniqueIdentifier !== resumableFile.uniqueIdentifier; + }); + let uploadFileList = this.state.uploadFileList.map(item => { + if (item.uniqueIdentifier === resumableFile.uniqueIdentifier) { + item.error = null; + this.retryUploadFile(item); + } + return item; + }); - seafileAPI.getFileServerUploadLink(this.props.repoID, this.props.path).then(res => { - this.resumable.opts.target = res.data + '?ret-json=1'; - - let retryFileList = this.state.retryFileList.filter(item => { - return item.uniqueIdentifier !== resumableFile.uniqueIdentifier; - }); - let uploadFileList = this.state.uploadFileList.map(item => { - if (item.uniqueIdentifier === resumableFile.uniqueIdentifier) { - item.error = null; - this.retryUploadFile(item); - } - return item; - }); - - this.setState({ - retryFileList: retryFileList, - uploadFileList: uploadFileList - }); - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); + this.setState({ + retryFileList: retryFileList, + uploadFileList: uploadFileList }); }; onUploadRetryAll = () => { + this.state.retryFileList.forEach(item => { + item.error = false; + this.retryUploadFile(item); + }); - seafileAPI.getFileServerUploadLink(this.props.repoID, this.props.path).then(res => { - this.resumable.opts.target = res.data + '?ret-json=1'; - this.state.retryFileList.forEach(item => { - item.error = false; - this.retryUploadFile(item); - }); - - let uploadFileList = this.state.uploadFileList.slice(0); - this.setState({ - retryFileList: [], - uploadFileList: uploadFileList - }); - - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); + let uploadFileList = this.state.uploadFileList.slice(0); + this.setState({ + retryFileList: [], + uploadFileList: uploadFileList }); + }; retryUploadFile = (resumableFile) => { @@ -634,40 +606,24 @@ class FileUploader extends React.Component { }; replaceRepetitionFile = () => { - let { repoID, path } = this.props; - seafileAPI.getUpdateLink(repoID, path).then(res => { - this.resumable.opts.target = res.data; - - let resumableFile = this.resumable.files[this.resumable.files.length - 1]; - resumableFile.formData['replace'] = 1; - resumableFile.formData['target_file'] = resumableFile.formData.parent_dir + resumableFile.fileName; - this.setState({ isUploadRemindDialogShow: false }); - this.setUploadFileList(this.resumable.files); - this.resumable.upload(); - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); - }); + let resumableFile = this.resumable.files[this.resumable.files.length - 1]; + resumableFile.formData['replace'] = 1; + resumableFile.formData['target_file'] = resumableFile.formData.parent_dir + resumableFile.fileName; + this.setState({ isUploadRemindDialogShow: false }); + this.setUploadFileList(this.resumable.files); + this.resumable.upload(); }; uploadFile = () => { let resumableFile = this.resumable.files[this.resumable.files.length - 1]; - let { repoID, path } = this.props; - seafileAPI.getFileServerUploadLink(repoID, path).then((res) => { // get upload link - this.resumable.opts.target = res.data + '?ret-json=1'; - this.setState({ - isUploadRemindDialogShow: false, - isUploadProgressDialogShow: true, - uploadFileList: [...this.state.uploadFileList, resumableFile] - }, () => { - this.resumable.upload(); - }); - Utils.registerGlobalVariable('uploader', 'isUploadProgressDialogShow', true); - - }).catch(error => { - let errMessage = Utils.getErrorMsg(error); - toaster.danger(errMessage); + this.setState({ + isUploadRemindDialogShow: false, + isUploadProgressDialogShow: true, + uploadFileList: [...this.state.uploadFileList, resumableFile] + }, () => { + this.resumable.upload(); }); + Utils.registerGlobalVariable('uploader', 'isUploadProgressDialogShow', true); }; cancelFileUpload = () => { diff --git a/seahub/api2/authentication.py b/seahub/api2/authentication.py index 460ec4ce38c..9b6e20a9c6a 100644 --- a/seahub/api2/authentication.py +++ b/seahub/api2/authentication.py @@ -2,7 +2,7 @@ import datetime import logging from rest_framework import status -from rest_framework.authentication import BaseAuthentication +from rest_framework.authentication import BaseAuthentication, SessionAuthentication from rest_framework.exceptions import APIException from seaserv import ccnet_api @@ -216,3 +216,19 @@ def authenticate(self, request): return None return user, auth[1] + + +class CsrfExemptSessionAuthentication(SessionAuthentication): + """ + request.POST is accessed by CsrfViewMiddleware which is enabled by default. + This means you will need to use csrf_exempt() + on your view to allow you to change the upload handlers. + + DRF's SessionAuthentication uses Django's session framework + for authentication which requires CSRF to be checked. + + This Class is override enforce_csrf to solve above problem + """ + + def enforce_csrf(self, request): + return # To not perform the csrf check previously happening diff --git a/seahub/api2/urls.py b/seahub/api2/urls.py index b8b07f9d069..9f96a7a8315 100644 --- a/seahub/api2/urls.py +++ b/seahub/api2/urls.py @@ -50,6 +50,7 @@ re_path(r'^repos/(?P[-0-9a-f]{36})/upload-shared-links/$', RepoUploadSharedLinks.as_view(), name="api2-repo-upload-shared-links"), re_path(r'^repos/(?P[-0-9a-f]{36})/upload-shared-links/(?P[a-f0-9]+)/$', RepoUploadSharedLink.as_view(), name="api2-repo-upload-shared-link"), re_path(r'^repos/(?P[-0-9a-f]{36})/upload-link/$', UploadLinkView.as_view()), + re_path(r'^repos/(?P[-0-9a-f]{36})/upload-file/$', UploadFile.as_view()), re_path(r'^repos/(?P[-0-9a-f]{36})/update-link/$', UpdateLinkView.as_view()), re_path(r'^repos/(?P[-0-9a-f]{36})/upload-blks-link/$', UploadBlksLinkView.as_view()), re_path(r'^repos/(?P[-0-9a-f]{36})/update-blks-link/$', UpdateBlksLinkView.as_view()), diff --git a/seahub/api2/views.py b/seahub/api2/views.py index c566349723a..627f77f23b6 100644 --- a/seahub/api2/views.py +++ b/seahub/api2/views.py @@ -8,8 +8,12 @@ import datetime import posixpath import re +import uuid from dateutil.relativedelta import relativedelta from urllib.parse import quote +import requests +import shutil +from zipfile import is_zipfile, ZipFile from rest_framework import parsers from rest_framework import status @@ -29,9 +33,10 @@ from django.template.defaultfilters import filesizeformat from django.utils import timezone from django.utils.translation import gettext as _ +from django.core.files.uploadhandler import TemporaryFileUploadHandler from .throttling import ScopedRateThrottle, AnonRateThrottle, UserRateThrottle -from .authentication import TokenAuthentication +from .authentication import TokenAuthentication, CsrfExemptSessionAuthentication from .serializers import AuthTokenSerializer from .utils import get_diff_details, to_python_boolean, \ api_error, get_file_size, prepare_starred_files, is_web_request, \ @@ -109,6 +114,7 @@ ENABLE_RESET_ENCRYPTED_REPO_PASSWORD, SHARE_LINK_EXPIRE_DAYS_MAX, \ SHARE_LINK_EXPIRE_DAYS_MIN, SHARE_LINK_EXPIRE_DAYS_DEFAULT from seahub.subscription.utils import subscription_check +from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_image_parent_path, get_seadoc_asset_upload_link try: from seahub.settings import CLOUD_MODE @@ -1999,6 +2005,145 @@ def get(self, request, repo_id, format=None): return Response(url) + +class UploadFile(APIView): + authentication_classes = (TokenAuthentication, CsrfExemptSessionAuthentication) + permission_classes = (IsAuthenticated,) + throttle_classes = (UserRateThrottle, ) + + def post(self, request, repo_id): + # use TemporaryFileUploadHandler, which contains TemporaryUploadedFile + # TemporaryUploadedFile has temporary_file_path() method + # in order to change upload_handlers, we must exempt csrf check + request.upload_handlers = [TemporaryFileUploadHandler(request=request)] + username = request.user.username + relative_path = request.data.get('relative_path', '/').strip('/') + parent_dir = request.data.get('parent_dir', '/') + replace = request.data.get('replace', 'False') + try: + replace = to_python_boolean(replace) + except ValueError: + error_msg = 'replace invalid.' + return api_error(status.HTTP_400_BAD_REQUEST, error_msg) + + file = request.FILES.get('file', None) + if not file: + error_msg = 'file can not be found.' + return api_error(status.HTTP_400_BAD_REQUEST, error_msg) + + repo = seafile_api.get_repo(repo_id) + if not repo: + error_msg = 'Library %s not found.' % repo_id + return api_error(status.HTTP_404_NOT_FOUND, error_msg) + + dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_dir) + if not dir_id: + error_msg = 'Folder %s not found.' % parent_dir + return api_error(status.HTTP_404_NOT_FOUND, error_msg) + + if parse_repo_perm(check_folder_permission(request, repo_id, parent_dir)).can_upload is False: + return api_error(status.HTTP_403_FORBIDDEN, 'You do not have permission to access this folder.') + + if check_quota(repo_id) < 0: + return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota.")) + + uploaded_temp_path = file.temporary_file_path() + + filename = file.name + extension = filename.split('.')[-1].lower() + + obj_id = json.dumps({'parent_dir': parent_dir}) + try: + token = seafile_api.get_fileserver_access_token(repo_id, obj_id, 'upload', username, use_onetime=False) + except Exception as e: + if str(e) == 'Too many files in library.': + error_msg = _("The number of files in library exceeds the limit") + return api_error(HTTP_447_TOO_MANY_FILES_IN_LIBRARY, error_msg) + else: + logger.error(e) + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + + if not token: + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + + upload_link = gen_file_upload_url(token, 'upload-api') + upload_link += '?ret-json=1' + if replace: + upload_link += '&replace=1' + + # upload file + if extension == 'zsdoc' and is_zipfile(uploaded_temp_path): + tmp_dir = str(uuid.uuid4()) + tmp_extracted_path = os.path.join('/tmp/seahub', str(repo_id), 'sdoc_zip_extracted/', tmp_dir) + try: + with ZipFile(uploaded_temp_path) as zip_file: + zip_file.extractall(tmp_extracted_path) + except Exception as e: + logger.error(e) + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') + + sdoc_file_name = filename.replace('zsdoc', 'sdoc') + new_file_path = os.path.join(parent_dir, relative_path, sdoc_file_name) + + data = {'parent_dir': parent_dir, 'target_file': new_file_path, 'relative_path': relative_path} + if replace: + data['replace'] = 1 + sdoc_file_path = os.path.join(tmp_extracted_path, 'content.json') + new_sdoc_file_path = os.path.join(tmp_extracted_path, sdoc_file_name) + os.rename(sdoc_file_path, new_sdoc_file_path) + + files = {'file': open(new_sdoc_file_path, 'rb')} + resp = requests.post(upload_link, files=files, data=data) + if not resp.ok: + logger.error('save file: %s failed: %s' % (filename, resp.text)) + return api_error(resp.status_code, resp.content) + + sdoc_name = json.loads(resp.content)[0].get('name') + new_sdoc_file_path = os.path.join(parent_dir, relative_path, sdoc_name) + doc_uuid = get_seadoc_file_uuid(repo, new_sdoc_file_path) + + # upload sdoc images + image_dir = os.path.join(tmp_extracted_path, 'images/') + batch_upload_sdoc_images(doc_uuid, repo_id, username, image_dir) + + # remove tmp file + if os.path.exists(tmp_extracted_path): + shutil.rmtree(tmp_extracted_path) + + return Response(json.loads(resp.content)) + else: + files = {'file': file} + new_file_path = posixpath.join(parent_dir, filename) + data = {'parent_dir': parent_dir, 'target_file': new_file_path, 'relative_path': relative_path} + if replace: + data['replace'] = 1 + resp = requests.post(upload_link, files=files, data=data) + if not resp.ok: + logger.error('save file: %s failed: %s' % (filename, resp.text)) + return api_error(resp.status_code, resp.content) + + return Response(json.loads(resp.content)) + + +def batch_upload_sdoc_images(doc_uuid, repo_id, username, image_dir): + parent_path = gen_seadoc_image_parent_path(doc_uuid, repo_id, username) + upload_link = get_seadoc_asset_upload_link(repo_id, parent_path, username) + + file_list = os.listdir(image_dir) + + for filename in file_list: + file_path = posixpath.join(parent_path, filename) + image_path = os.path.join(image_dir, filename) + image_file = open(image_path, 'rb') + files = {'file': image_file} + data = {'parent_dir': parent_path, 'filename': filename, 'target_file': file_path} + resp = requests.post(upload_link, files=files, data=data) + if not resp.ok: + logger.warning('upload sdoc image: %s failed: %s', filename, resp.text) + + class UpdateLinkView(APIView): authentication_classes = (TokenAuthentication, SessionAuthentication) permission_classes = (IsAuthenticated,) diff --git a/seahub/seadoc/apis.py b/seahub/seadoc/apis.py index 994dbdd8b68..4c892796f39 100644 --- a/seahub/seadoc/apis.py +++ b/seahub/seadoc/apis.py @@ -8,6 +8,7 @@ import posixpath from urllib.parse import unquote import time +import shutil from datetime import datetime, timedelta from pypinyin import lazy_pinyin @@ -17,7 +18,7 @@ from rest_framework.authentication import SessionAuthentication from rest_framework.permissions import IsAuthenticated from django.utils.translation import gettext as _ -from django.http import HttpResponseRedirect, HttpResponse +from django.http import HttpResponseRedirect, HttpResponse, FileResponse from django.core.files.base import ContentFile from django.utils import timezone from django.db import transaction @@ -32,7 +33,7 @@ from seahub.seadoc.utils import is_valid_seadoc_access_token, get_seadoc_upload_link, \ get_seadoc_download_link, get_seadoc_file_uuid, gen_seadoc_access_token, \ gen_seadoc_image_parent_path, get_seadoc_asset_upload_link, get_seadoc_asset_download_link, \ - can_access_seadoc_asset, is_seadoc_revision + can_access_seadoc_asset, is_seadoc_revision, ZSDOC, export_sdoc from seahub.seadoc.settings import SDOC_REVISIONS_DIR, SDOC_IMAGES_DIR from seahub.utils.file_types import SEADOC, IMAGE from seahub.utils.file_op import if_locked_by_online_office @@ -3023,3 +3024,46 @@ def get(self, request, file_uuid): f['doc_uuid'] = get_seadoc_file_uuid(repo, e['fullpath']) return Response(resp_json, resp.status_code) + + +class SeadocExportView(APIView): + authentication_classes = (SdocJWTTokenAuthentication, TokenAuthentication, SessionAuthentication) + permission_classes = (IsAuthenticated,) + throttle_classes = (UserRateThrottle, ) + + def get(self, request, file_uuid): + username = request.user.username + uuid_map = FileUUIDMap.objects.get_fileuuidmap_by_uuid(file_uuid) + if not uuid_map: + error_msg = 'seadoc uuid %s not found.' % file_uuid + return api_error(status.HTTP_404_NOT_FOUND, error_msg) + + filetype, fileext = get_file_type_and_ext(uuid_map.filename) + if filetype != SEADOC: + error_msg = 'seadoc file type %s invalid.' % filetype + return api_error(status.HTTP_400_BAD_REQUEST, error_msg) + + # permission check + permission = check_folder_permission(request, uuid_map.repo_id, uuid_map.parent_path) + if not permission: + error_msg = 'Permission denied.' + return api_error(status.HTTP_403_FORBIDDEN, error_msg) + + try: + tmp_zip_path = export_sdoc(uuid_map, username) + except Exception as e: + logger.error(e) + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + if not os.path.exists(tmp_zip_path): + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + + response = FileResponse(open(tmp_zip_path, 'rb'), content_type="application/x-zip-compressed", as_attachment=True) + response['Content-Disposition'] = 'attachment;filename*=UTF-8\'\'' + quote(uuid_map.filename[:-4] + ZSDOC) + + tmp_dir = os.path.join('/tmp/sdoc', str(uuid_map.uuid)) + if os.path.exists(tmp_dir): + shutil.rmtree(tmp_dir) + + return response diff --git a/seahub/seadoc/urls.py b/seahub/seadoc/urls.py index 7f63c29931c..bfcbf889c9e 100644 --- a/seahub/seadoc/urls.py +++ b/seahub/seadoc/urls.py @@ -5,7 +5,7 @@ SeadocCommentRepliesView, SeadocCommentReplyView, SeadocFileView, SeadocFileUUIDView, SeadocDirView, SdocRevisionBaseVersionContent, SeadocRevisionView, \ SdocRepoTagsView, SdocRepoTagView, SdocRepoFileTagsView, SdocRepoFileTagView, SeadocNotificationsView, SeadocNotificationView, \ SeadocFilesInfoView, DeleteSeadocOtherRevision, SeadocPublishedRevisionContent, SdocParticipantsView, SdocParticipantView, SdocRelatedUsers, SeadocEditorCallBack, \ - SeadocDailyHistoryDetail, SeadocSearchFilenameView + SeadocDailyHistoryDetail, SeadocSearchFilenameView, SeadocExportView # api/v2.1/seadoc/ urlpatterns = [ @@ -51,4 +51,5 @@ re_path(r'^notifications/(?P[-0-9a-f]{36})/$', SeadocNotificationsView.as_view(), name='seadoc_notifications'), re_path(r'^notifications/(?P[-0-9a-f]{36})/(?P\d+)/$', SeadocNotificationView.as_view(), name='seadoc_notification'), re_path(r'^search-filename/(?P[-0-9a-f]{36})/$', SeadocSearchFilenameView.as_view(), name='seadoc_search_filename'), + re_path(r'^export/(?P[-0-9a-f]{36})/$', SeadocExportView.as_view(), name='seadoc_export'), ] diff --git a/seahub/seadoc/utils.py b/seahub/seadoc/utils.py index 289cc12203e..039d36abc12 100644 --- a/seahub/seadoc/utils.py +++ b/seahub/seadoc/utils.py @@ -1,16 +1,21 @@ import os +import io import jwt import json import time import uuid import logging import posixpath +import shutil +import requests +from zipfile import ZipFile, is_zipfile from seaserv import seafile_api from seahub.tags.models import FileUUIDMap from seahub.settings import SEADOC_PRIVATE_KEY -from seahub.utils import normalize_file_path, gen_file_get_url, gen_file_upload_url, gen_inner_file_get_url +from seahub.utils import normalize_file_path, gen_file_get_url, gen_file_upload_url, gen_inner_file_get_url, \ + get_inner_fileserver_root from seahub.utils.auth import AUTHORIZATION_PREFIX from seahub.views import check_folder_permission from seahub.base.templatetags.seahub_tags import email2nickname @@ -20,6 +25,8 @@ logger = logging.getLogger(__name__) +ZSDOC = 'zsdoc' + def uuid_str_to_32_chars(file_uuid): if len(file_uuid) == 36: @@ -320,3 +327,95 @@ def move_sdoc_images(src_repo_id, src_path, dst_repo_id, dst_path, username, is_ need_progress=need_progress, synchronous=synchronous, ) return + + +def export_sdoc_clear_tmp_files_and_dirs(tmp_file_path, tmp_zip_path): + # delete tmp files/dirs + if os.path.exists(tmp_file_path): + shutil.rmtree(tmp_file_path) + if os.path.exists(tmp_zip_path): + os.remove(tmp_zip_path) + + +def export_sdoc_prepare_images_folder(repo_id, doc_uuid, images_dir_id, username): + # get file server access token + fake_obj_id = { + 'obj_id': images_dir_id, + 'dir_name': 'images', # after download and zip, folder root name is images + 'is_windows': 0 + } + try: + token = seafile_api.get_fileserver_access_token( + repo_id, json.dumps(fake_obj_id), 'download-dir', username, use_onetime=False + ) + except Exception as e: + raise e + + progress = {'zipped': 0, 'total': 1} + while progress['zipped'] != progress['total']: + time.sleep(0.5) # sleep 0.5 second + try: + progress = json.loads(seafile_api.query_zip_progress(token)) + except Exception as e: + raise e + + asset_url = '%s/zip/%s' % (get_inner_fileserver_root(), token) + try: + resp = requests.get(asset_url) + except Exception as e: + raise e + file_obj = io.BytesIO(resp.content) + if is_zipfile(file_obj): + with ZipFile(file_obj) as zp: + zp.extractall(os.path.join('/tmp/sdoc', doc_uuid, 'sdoc_asset')) + return + + +def export_sdoc(uuid_map, username): + """ + /tmp/sdoc//sdoc_asset/ + |- images/ + |- content.json + zip /tmp/sdoc//sdoc_asset/ to /tmp/sdoc//zip_file.zip + """ + doc_uuid = str(uuid_map.uuid) + repo_id = uuid_map.repo_id + + logger.info('Start prepare /tmp/sdoc/{}/zip_file.zip for export sdoc.'.format(doc_uuid)) + + tmp_file_path = os.path.join('/tmp/sdoc', doc_uuid, 'sdoc_asset/') # used to store asset files and json from file_server + tmp_zip_path = os.path.join('/tmp/sdoc', doc_uuid, 'zip_file') + '.zip' # zip path of zipped xxx.zip + + logger.info('Clear tmp dirs and files before prepare.') + export_sdoc_clear_tmp_files_and_dirs(tmp_file_path, tmp_zip_path) + os.makedirs(tmp_file_path, exist_ok=True) + + try: + download_link = get_seadoc_download_link(uuid_map, is_inner=True) + resp = requests.get(download_link) + file_obj = io.BytesIO(resp.content) + with open(os.path.join(tmp_file_path, 'content.json') , 'wb') as f: + f.write(file_obj.read()) + except Exception as e: + logger.error('prepare sdoc failed. ERROR: {}'.format(e)) + raise Exception('prepare sdoc failed. ERROR: {}'.format(e)) + + # 2. get images folder, images could be empty + parent_path = '/images/sdoc/' + doc_uuid + '/' + images_dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_path) + if images_dir_id: + logger.info('Create images folder.') + try: + export_sdoc_prepare_images_folder( + repo_id, doc_uuid, images_dir_id, username) + except Exception as e: + logger.warning('create images folder failed. ERROR: {}'.format(e)) + + logger.info('Make zip file for download...') + try: + shutil.make_archive('/tmp/sdoc/' + doc_uuid + '/zip_file', "zip", root_dir=tmp_file_path) + except Exception as e: + logger.error('make zip failed. ERROR: {}'.format(e)) + raise Exception('make zip failed. ERROR: {}'.format(e)) + logger.info('Create /tmp/sdoc/{}/zip_file.zip success!'.format(doc_uuid)) + return tmp_zip_path diff --git a/seahub/views/file.py b/seahub/views/file.py index aafc4224c65..d010c93290e 100644 --- a/seahub/views/file.py +++ b/seahub/views/file.py @@ -504,6 +504,13 @@ def view_lib_file(request, repo_id, path): if parse_repo_perm(permission).can_download is False: raise Http404 + # redirect to sdoc export + filetype, fileext = get_file_type_and_ext(filename) + if filetype == SEADOC: + file_uuid = get_seadoc_file_uuid(repo, path) + file_url = reverse('seadoc_export', args=[file_uuid]) + return HttpResponseRedirect(file_url) + operation = 'download' if dl else 'view' token = seafile_api.get_fileserver_access_token( repo_id, file_id, operation, username,