diff --git a/frontend/src/components/dirent-grid-view/dirent-grid-view.js b/frontend/src/components/dirent-grid-view/dirent-grid-view.js index e18d3183e9f..bea0bc19e5f 100644 --- a/frontend/src/components/dirent-grid-view/dirent-grid-view.js +++ b/frontend/src/components/dirent-grid-view/dirent-grid-view.js @@ -123,6 +123,14 @@ class DirentGridView extends React.Component { this.props.onItemConvert(currentObject, dstType); }; + exportDocx = () => { + const serviceUrl = window.app.config.serviceURL; + let repoID = this.props.repoID; + let filePath = this.getDirentPath(this.props.dirent); + let exportToDocxUrl = serviceUrl + '/repo/sdoc_export_to_docx/' + repoID + '/?file_path=' + filePath; + window.location.href = exportToDocxUrl; + }; + onMenuItemClick = (operation, currentObject, event) => { hideMenu(); switch(operation) { @@ -150,6 +158,12 @@ class DirentGridView extends React.Component { case 'Convert to Markdown': this.onItemConvert(currentObject, event, 'markdown'); break; + case 'Convert to docx': + this.onItemConvert(currentObject, event, 'docx'); + break; + case 'Export docx': + this.exportDocx(); + break; case 'Convert to sdoc': this.onItemConvert(currentObject, event, 'sdoc'); break; diff --git a/frontend/src/components/dirent-list-view/dirent-list-item.js b/frontend/src/components/dirent-list-view/dirent-list-item.js index fdc12e326e4..404ba036607 100644 --- a/frontend/src/components/dirent-list-view/dirent-list-item.js +++ b/frontend/src/components/dirent-list-view/dirent-list-item.js @@ -230,6 +230,14 @@ class DirentListItem extends React.Component { this.setState({isShareDialogShow: !this.state.isShareDialogShow}); }; + exportDocx = () => { + const serviceUrl = window.app.config.serviceURL; + let repoID = this.props.repoID; + let filePath = this.getDirentPath(this.props.dirent); + let exportToDocxUrl = serviceUrl + '/repo/sdoc_export_to_docx/' + repoID + '/?file_path=' + filePath; + window.location.href = exportToDocxUrl; + }; + closeSharedDialog = () => { this.setState({isShareDialogShow: !this.state.isShareDialogShow}); }; @@ -277,6 +285,12 @@ class DirentListItem extends React.Component { case 'Convert to Markdown': this.onItemConvert(event, 'markdown'); break; + case 'Convert to docx': + this.onItemConvert(event, 'docx'); + break; + case 'Export docx': + this.exportDocx(); + break; case 'Convert to sdoc': this.onItemConvert(event, 'sdoc'); break; diff --git a/frontend/src/utils/text-translation.js b/frontend/src/utils/text-translation.js index fd7962383a1..5ad34fbdc1e 100644 --- a/frontend/src/utils/text-translation.js +++ b/frontend/src/utils/text-translation.js @@ -25,6 +25,8 @@ const TextTranslation = { 'UNLOCK' : {key : 'Unlock', value : gettext('Unlock')}, 'CONVERT_TO_MARKDOWN' : {key : 'Convert to Markdown', value : gettext('Convert to Markdown')}, 'CONVERT_TO_SDOC' : {key : 'Convert to sdoc', value : gettext('Convert to sdoc')}, + 'CONVERT_TO_DOCX' : {key : 'Convert to docx', value : gettext('Convert to docx')}, + 'EXPORT_DOCX' : {key : 'Export docx', value : gettext('Export docx')}, 'MARK_AS_DRAFT' : {key : 'Mark as draft', value : gettext('Mark as draft')}, 'UNMARK_AS_DRAFT' : {key : 'Unmark as draft', value : gettext('Unmark as draft')}, 'HISTORY' : {key : 'History', value : gettext('History')}, diff --git a/frontend/src/utils/utils.js b/frontend/src/utils/utils.js index 75600cef606..b3c67d92639 100644 --- a/frontend/src/utils/utils.js +++ b/frontend/src/utils/utils.js @@ -530,7 +530,7 @@ export const Utils = { getFileOperationList: function(isRepoOwner, currentRepoInfo, dirent, isContextmenu) { let list = []; const { SHARE, DOWNLOAD, DELETE, RENAME, MOVE, COPY, TAGS, UNLOCK, LOCK, FREEZE_DOCUMENT, - HISTORY, ACCESS_LOG, PROPERTIES, OPEN_VIA_CLIENT, ONLYOFFICE_CONVERT, CONVERT_TO_MARKDOWN, CONVERT_TO_SDOC } = TextTranslation; + HISTORY, ACCESS_LOG, PROPERTIES, OPEN_VIA_CLIENT, ONLYOFFICE_CONVERT, CONVERT_TO_MARKDOWN, CONVERT_TO_DOCX, EXPORT_DOCX, CONVERT_TO_SDOC } = TextTranslation; const permission = dirent.permission; const { isCustomPermission, customPermission } = Utils.getUserPermission(permission); @@ -613,6 +613,8 @@ export const Utils = { if (dirent.name.endsWith('.sdoc')) { list.push(CONVERT_TO_MARKDOWN); + list.push(CONVERT_TO_DOCX); + list.push(EXPORT_DOCX); } } diff --git a/requirements.txt b/requirements.txt index 8954eadd4d7..d7a0311bade 100644 --- a/requirements.txt +++ b/requirements.txt @@ -25,3 +25,4 @@ openpyxl==3.0.* Markdown==3.4.* bleach==5.0.* python-ldap==3.4.* +python-docx==1.1.* diff --git a/seahub/api2/endpoints/file.py b/seahub/api2/endpoints/file.py index 52d1ffb9cd0..6bdfe10190d 100644 --- a/seahub/api2/endpoints/file.py +++ b/seahub/api2/endpoints/file.py @@ -33,7 +33,7 @@ from seahub.seadoc.models import SeadocHistoryName, SeadocDraft, SeadocCommentReply from seahub.base.models import FileComment from seahub.settings import MAX_UPLOAD_FILE_NAME_LEN, OFFICE_TEMPLATE_ROOT -from seahub.api2.endpoints.utils import convert_file +from seahub.api2.endpoints.utils import convert_file, sdoc_convert_to_docx from seahub.seadoc.utils import get_seadoc_file_uuid from seahub.drafts.models import Draft @@ -572,13 +572,16 @@ def post(self, request, repo_id, format=None): if extension == '.md': src_type = 'markdown' - filename = filename[:-2] + 'sdoc' + new_filename = filename[:-2] + 'sdoc' elif extension == '.sdoc': src_type = 'sdoc' - filename = filename[:-4] + 'md' + if dst_type == 'markdown': + new_filename = filename[:-4] + 'md' + if dst_type == 'docx': + new_filename = filename[:-4] + 'docx' - new_file_name = check_filename_or_rename(repo_id, parent_dir, filename) - new_file_path = posixpath.join(parent_dir, new_file_name) + new_filename = check_filename_or_rename(repo_id, parent_dir, new_filename) + new_file_path = posixpath.join(parent_dir, new_filename) download_token = seafile_api.get_fileserver_access_token(repo_id, file_id, 'download', username) @@ -587,15 +590,37 @@ def post(self, request, repo_id, format=None): use_onetime=True) doc_uuid = get_seadoc_file_uuid(repo, path) - try: - resp = convert_file(path, username, doc_uuid, download_token, upload_token, src_type, dst_type) + if dst_type != 'docx': + try: + resp = convert_file(path, username, doc_uuid, + download_token, upload_token, + src_type, dst_type) + except Exception as e: + logger.error(e) + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + if resp.status_code == 500: - logger.error('convert file error status: %s body: %s', resp.status_code, resp.text) - return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') - except Exception as e: - logger.error(e) - error_msg = 'Internal Server Error' - return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + logger.error('convert file error status: %s body: %s', + resp.status_code, resp.text) + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, + 'Internal Server Error') + else: + + try: + resp = sdoc_convert_to_docx(path, username, doc_uuid, + download_token, upload_token, + src_type, dst_type) + except Exception as e: + logger.error(e) + error_msg = 'Internal Server Error' + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) + + if resp.status_code != 200: + logger.error('convert file error status: %s body: %s', + resp.status_code, resp.text) + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, + 'Internal Server Error') file_info = self.get_file_info(username, repo_id, new_file_path) return Response(file_info) diff --git a/seahub/api2/endpoints/utils.py b/seahub/api2/endpoints/utils.py index a096a67dd26..70814839147 100644 --- a/seahub/api2/endpoints/utils.py +++ b/seahub/api2/endpoints/utils.py @@ -1,21 +1,33 @@ # Copyright (c) 2012-2016 Seafile Ltd. +import os +import io import re -import datetime +import jwt +import docx import time -import urllib.request, urllib.parse, urllib.error import logging import requests -import jwt -from urllib.parse import urljoin +import datetime +import urllib.request +import urllib.parse +import urllib.error +from docx import Document +from docx.shared import Pt, Inches +from docx.enum.text import WD_COLOR_INDEX + +from django.urls import reverse +from urllib.parse import urljoin from rest_framework import status from seaserv import ccnet_api, seafile_api -from pysearpc import SearpcError from seahub.api2.utils import api_error +from seahub.tags.models import FileUUIDMap from seahub.base.templatetags.seahub_tags import email2nickname, email2contact_email -from seahub.utils import get_log_events_by_time, is_pro_version, is_org_context +from seahub.utils import get_log_events_by_time, is_pro_version, is_org_context, \ + gen_inner_file_get_url, get_service_url + from seahub.settings import SEADOC_PRIVATE_KEY, FILE_CONVERTER_SERVER_URL try: @@ -25,15 +37,16 @@ logger = logging.getLogger(__name__) + def api_check_group(func): """ Decorator for check if group valid """ def _decorated(view, request, group_id, *args, **kwargs): - group_id = int(group_id) # Checked by URL Conf + group_id = int(group_id) # Checked by URL Conf try: group = ccnet_api.get_group(int(group_id)) - except SearpcError as e: + except Exception as e: logger.error(e) error_msg = 'Internal Server Error' return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg) @@ -46,6 +59,7 @@ def _decorated(view, request, group_id, *args, **kwargs): return _decorated + def add_org_context(func): def _decorated(view, request, *args, **kwargs): if is_org_context(request): @@ -56,6 +70,7 @@ def _decorated(view, request, *args, **kwargs): return _decorated + def is_org_user(username, org_id=None): """ Check if an user is an org user. @@ -78,6 +93,7 @@ def is_org_user(username, org_id=None): logger.error(e) return False + def get_user_contact_email_dict(email_list): email_list = set(email_list) user_contact_email_dict = {} @@ -87,6 +103,7 @@ def get_user_contact_email_dict(email_list): return user_contact_email_dict + def get_user_name_dict(email_list): email_list = set(email_list) user_name_dict = {} @@ -96,6 +113,7 @@ def get_user_name_dict(email_list): return user_name_dict + def get_repo_dict(repo_id_list): repo_id_list = set(repo_id_list) repo_dict = {} @@ -116,7 +134,6 @@ def get_group_dict(group_id_list): if group_id not in group_dict: group_dict[group_id] = '' group = ccnet_api.get_group(int(group_id)) - print(group) if group: group_dict[group_id] = group @@ -134,6 +151,7 @@ def check_time_period_valid(start, end): return True + def get_log_events_by_type_and_time(log_type, start, end): start_struct_time = datetime.datetime.strptime(start, "%Y-%m-%d") start_timestamp = time.mktime(start_struct_time.timetuple()) @@ -146,6 +164,7 @@ def get_log_events_by_type_and_time(log_type, start, end): events = events if events else [] return events + def generate_links_header_for_paginator(base_url, page, per_page, total_count, option_dict={}): def is_first_page(page): @@ -236,3 +255,309 @@ def convert_file(path, username, doc_uuid, download_token, upload_token, src_typ resp = requests.post(url, json=params, headers=headers, timeout=30) return resp + + +def sdoc_convert_to_docx(path, username, doc_uuid, download_token, + upload_token, src_type, dst_type): + + headers = convert_file_gen_headers() + params = { + 'path': path, + 'username': username, + 'doc_uuid': doc_uuid, + 'download_token': download_token, + 'upload_token': upload_token, + 'src_type': src_type, + 'dst_type': dst_type, + } + url = urljoin(FILE_CONVERTER_SERVER_URL, '/api/v1/sdoc-convert-to-docx/') + resp = requests.post(url, json=params, headers=headers, timeout=30) + + return resp + + +def sdoc_export_to_docx(path, username, doc_uuid, download_token, + src_type, dst_type): + + headers = convert_file_gen_headers() + params = { + 'path': path, + 'username': username, + 'doc_uuid': doc_uuid, + 'download_token': download_token, + 'src_type': src_type, + 'dst_type': dst_type, + } + url = urljoin(FILE_CONVERTER_SERVER_URL, '/api/v1/sdoc-export-to-docx/') + resp = requests.post(url, json=params, headers=headers, timeout=30) + + return resp + + +def convert_sdoc_to_docx(file_content_json, file_uuid, username): + + def add_hyperlink(paragraph, url, text, color): + """ + A function that places a hyperlink within a paragraph object. + + :param paragraph: The paragraph we are adding the hyperlink to. + :param url: A string containing the required url + :param text: The text displayed for the url + :return: The hyperlink object + """ + + # This gets access to the document.xml.rels file and gets a new relation id value + part = paragraph.part + r_id = part.relate_to(url, docx.opc.constants.RELATIONSHIP_TYPE.HYPERLINK, is_external=True) + + # Create the w:hyperlink tag and add needed values + hyperlink = docx.oxml.shared.OxmlElement('w:hyperlink') + hyperlink.set(docx.oxml.shared.qn('r:id'), r_id, ) + + # Create a w:r element + new_run = docx.oxml.shared.OxmlElement('w:r') + + # Create a new w:rPr element + rPr = docx.oxml.shared.OxmlElement('w:rPr') + + # Add color if it is given + if color: + c = docx.oxml.shared.OxmlElement('w:color') + c.set(docx.oxml.shared.qn('w:val'), color) + rPr.append(c) + + # Join all the xml elements together add add the required text to the w:r element + new_run.append(rPr) + new_run.text = text + hyperlink.append(new_run) + + paragraph._p.append(hyperlink) + + return hyperlink + + def extract_text_in_table_recursively(data): + + text_list = [] + if isinstance(data, list): + for item in data: + text_list.extend(extract_text_in_table_recursively(item)) + elif isinstance(data, dict): + if 'text' in data: + text_list.append(data['text']) + for key, value in data.items(): + text_list.extend(extract_text_in_table_recursively(value)) + + return text_list + + def search_sdoc_node_recursively(children_list, type_sq=[], top_type=''): + + if 'text' in children_list[0]: + if top_type == "ordered_list" and type_sq.count('ordered_list') == 1: + type_content_list.append(['ordered_list_2', children_list]) + elif top_type == "ordered_list" and type_sq.count('ordered_list') >= 2: + type_content_list.append(['ordered_list_3', children_list]) + elif top_type == "unordered_list" and type_sq.count('unordered_list') == 1: + type_content_list.append(['ordered_list_2', children_list]) + elif top_type == "unordered_list" and type_sq.count('unordered_list') >= 2: + type_content_list.append(['ordered_list_3', children_list]) + else: + type_content_list.append([top_type, children_list]) + else: + if top_type == 'table': + table_text_list = extract_text_in_table_recursively(children_list) + sub_length = len(children_list[0]['children']) + new_table_text_list = [] + for i in range(0, len(table_text_list), sub_length): + new_table_text_list.append(table_text_list[i:i + sub_length]) + + type_content_list.append([top_type, new_table_text_list]) + else: + for children in children_list: + current_type = children.get('type', 'no type') + sub_children_list = children.get('children', []) + search_sdoc_node_recursively(sub_children_list, + type_sq + [current_type], + top_type=top_type) + + sdoc_node_list = file_content_json.get('children', []) + type_content_list = [] + for sdoc_node in sdoc_node_list: + top_sdoc_type = sdoc_node.get('type', '') + children_list = sdoc_node.get('children', '') + search_sdoc_node_recursively(children_list, top_type=top_sdoc_type) + + document = Document() + + for type_content in type_content_list: + + sdoc_type = type_content[0] + content = type_content[1] + + if sdoc_type == 'title': + docx_paragraph = document.add_heading(level=0) + if sdoc_type == 'subtitle': + docx_paragraph = document.add_paragraph(style="Subtitle") + if sdoc_type == 'header1': + docx_paragraph = document.add_heading(level=1) + if sdoc_type == 'header2': + docx_paragraph = document.add_heading(level=2) + if sdoc_type == 'header3': + docx_paragraph = document.add_heading(level=3) + if sdoc_type == 'header4': + docx_paragraph = document.add_heading(level=4) + if sdoc_type == 'header5': + docx_paragraph = document.add_heading(level=5) + if sdoc_type == 'header6': + docx_paragraph = document.add_heading(level=6) + if sdoc_type == 'paragraph': + docx_paragraph = document.add_paragraph() + if sdoc_type == 'blockquote': + docx_paragraph = document.add_paragraph(style="Intense Quote") + if sdoc_type == 'ordered_list': + docx_paragraph = document.add_paragraph(style="List Number") + if sdoc_type == 'ordered_list_2': + docx_paragraph = document.add_paragraph(style="List Number 2") + if sdoc_type == 'ordered_list_3': + docx_paragraph = document.add_paragraph(style="List Number 3") + if sdoc_type in ('unordered_list', 'check_list_item'): + docx_paragraph = document.add_paragraph(style="List Bullet") + if sdoc_type == 'unordered_list_2': + docx_paragraph = document.add_paragraph(style="List Bullet 2") + if sdoc_type == 'unordered_list_3': + docx_paragraph = document.add_paragraph(style="List Bullet 3") + + if sdoc_type == 'code_block': + + docx_paragraph = document.add_paragraph(style="No Spacing") + docx_paragraph.paragraph_format.left_indent = Inches(0.2) + + for text_dict in content: + text = text_dict.get('text', '') + run = docx_paragraph.add_run(text) + run.font.size = Pt(10) + run.font.name = 'Courier New' + + elif sdoc_type == 'paragraph' and \ + any(item.get('type') == 'link' for item in content): + + # add hyperlink to docx + + # ['paragraph', + # [{'id': 'TQdHtyxhQfm8ipm76cVKKg', 'text': ''}, + # {'children': [{'id': 'VFGENWpbTNeMRb-16QgdNA', + # 'text': '127.0.0.1 link title'}], + # 'href': 'http://127.0.0.1/link-address/', + # 'id': 'Co9L-c-SQmWk4yxHSXu5tg', + # 'title': '127.0.0.1 link title', + # 'type': 'link'}, + # {'id': 'Pwqf3nbSTWmIFbwrFo1Eow', 'text': ''}]], + + link_href = '' + link_title = '' + for item in content: + if 'href' in item: + link_href = item['href'] + if 'title' in item: + link_title = item['title'] + + docx_paragraph = document.add_paragraph() + add_hyperlink(docx_paragraph, link_href, link_title, "0000FF") + + elif sdoc_type == 'paragraph' and \ + any(item.get('type') in ('sdoc_link', 'file_link') for item in content): + + # add sdoc/file link to docx + + # ['paragraph', + # [{'id': 'D8omdcCLR4eLB3o4f0yOxw', 'text': ' '}, + # {'children': [{'id': 'KFM5z7zvTaOcZyaT1zBhHQ', 'text': '987.sdoc'}], + # 'display_type': 'icon_link', + # 'doc_uuid': '45b266e4-17a5-475d-b601-10aa8001ea80', + # 'id': 'bIwxx0mMQVKRFo3LlYwf6A', + # 'title': '987.sdoc', + # 'type': 'sdoc_link'}, + # {'id': 'G5WmlQ4tSpO4IH5CDFCdUA', 'text': ' '}]], + + doc_uuid = '' + doc_title = '' + for item in content: + if 'doc_uuid' in item: + doc_uuid = item['doc_uuid'] + if 'title' in item: + doc_title = item['title'] + doc_url = get_service_url() + reverse('seadoc_file_view', args=[doc_uuid]) + docx_paragraph = document.add_paragraph() + add_hyperlink(docx_paragraph, doc_url, doc_title, "0000FF") + + elif sdoc_type in ('paragraph', 'image_block') and \ + any(item.get('type') == 'image' for item in content): + + # add image to docx + + # ['paragraph', + # [{'id': 'VL579VQRQdOjJCKkjRXXNA', 'text': ''}, + # {'children': [{'id': 'dp7gIr5aSEa6GtK3-vi68g', 'text': ''}], + # 'data': {'src': '/image-1702627227876.png'}, + # 'id': 'TEPevi-FQo-unZRBSlnd3A', + # 'type': 'image'}, + # {'id': 'SQjLfnvBSimn695OZtyGnw', 'text': ''}]], + + image_file_path = '' + for item in content: + if 'data' in item: + image_file_path = item['data']['src'] + + uuid_map = FileUUIDMap.objects.get_fileuuidmap_by_uuid(file_uuid) + repo_id = uuid_map.repo_id + image_file_path = f"/images/sdoc/{file_uuid}{image_file_path}" + image_file_id = seafile_api.get_file_id_by_path(repo_id, + image_file_path) + download_token = seafile_api.get_fileserver_access_token(repo_id, image_file_id, + 'download', username, + use_onetime=False) + image_file_name = os.path.basename(image_file_path) + image_url = gen_inner_file_get_url(download_token, image_file_name) + resp = requests.get(image_url) + image_content = resp.content + document.add_picture(io.BytesIO(image_content), width=Inches(5)) + + elif sdoc_type == 'table': + + # add table to docx + + # ['table', [['1', '2', '3', '4'], ['a', 'b', 'c', 'd']]] + + table = document.add_table(rows=len(content), cols=len(content[0])) + + def fulfill_table(table, content): + for i, row in enumerate(content): + for j, value in enumerate(row): + table.cell(i, j).text = value + + fulfill_table(table, content) + + elif sdoc_type == 'callout': + + docx_paragraph = document.add_paragraph() + for text_dict in content: + text = text_dict.get('text', '') + run = docx_paragraph.add_run(text) + run.font.highlight_color = WD_COLOR_INDEX.GRAY_25 + + else: + + for text_dict in content: + + text = text_dict.get('text', '') or text_dict.get('href', '') + run = docx_paragraph.add_run(text) + + bold = text_dict.get('bold', False) + run.bold = True if bold else False + + italic = text_dict.get('italic', False) + run.italic = True if italic else False + + memory_stream = io.BytesIO() + document.save(memory_stream) + docx_content = memory_stream.getvalue() + return docx_content diff --git a/seahub/seadoc/apis.py b/seahub/seadoc/apis.py index 6c61e496077..2984a79f87f 100644 --- a/seahub/seadoc/apis.py +++ b/seahub/seadoc/apis.py @@ -36,14 +36,14 @@ from seahub.utils.file_types import SEADOC, IMAGE from seahub.utils.file_op import if_locked_by_online_office from seahub.utils import get_file_type_and_ext, normalize_file_path, \ - normalize_dir_path, PREVIEW_FILEEXT, get_file_history, \ - gen_inner_file_get_url, gen_inner_file_upload_url, \ - get_service_url, is_valid_username, is_pro_version, get_file_history_by_day, get_file_daily_history_detail + normalize_dir_path, PREVIEW_FILEEXT, \ + gen_inner_file_get_url, gen_inner_file_upload_url, gen_file_get_url, \ + get_service_url, is_valid_username, is_pro_version, \ + get_file_history_by_day, get_file_daily_history_detail from seahub.tags.models import FileUUIDMap from seahub.utils.error_msg import file_type_error_msg from seahub.utils.repo import parse_repo_perm from seahub.seadoc.models import SeadocHistoryName, SeadocDraft, SeadocRevision, SeadocCommentReply, SeadocNotification -from seahub.utils.file_revisions import get_file_revisions_within_limit from seahub.avatar.templatetags.avatar_tags import api_avatar_url from seahub.base.templatetags.seahub_tags import email2nickname, \ email2contact_email @@ -243,6 +243,44 @@ def get(self, request, file_uuid): return Response({'download_link': download_link}) +class SeadocImageDownloadLink(APIView): + + authentication_classes = () + throttle_classes = (UserRateThrottle,) + + def get(self, request, file_uuid): + + # jwt permission check + auth = request.headers.get('authorization', '').split() + if not is_valid_seadoc_access_token(auth, file_uuid): + error_msg = 'Permission denied.' + return api_error(status.HTTP_403_FORBIDDEN, error_msg) + + # argument check + image_name = request.GET.get('image_name') + if not image_name: + error_msg = 'image_name invalid.' + return api_error(status.HTTP_400_BAD_REQUEST, error_msg) + + # recource check + uuid_map = FileUUIDMap.objects.get_fileuuidmap_by_uuid(file_uuid) + if not uuid_map: + error_msg = 'seadoc uuid %s not found.' % file_uuid + return api_error(status.HTTP_404_NOT_FOUND, error_msg) + + # TODO + username = "" + repo_id = uuid_map.repo_id + image_path = f'{SDOC_IMAGES_DIR}{file_uuid}/{image_name}' + + file_id = seafile_api.get_file_id_by_path(repo_id, image_path) + token = seafile_api.get_fileserver_access_token(repo_id, file_id, + 'download', username) + + download_link = gen_file_get_url(token, image_name) + return Response({'download_link': download_link}) + + class SeadocOriginFileContent(APIView): authentication_classes = () throttle_classes = (UserRateThrottle,) diff --git a/seahub/seadoc/urls.py b/seahub/seadoc/urls.py index a169234c031..f715a4285e7 100644 --- a/seahub/seadoc/urls.py +++ b/seahub/seadoc/urls.py @@ -1,5 +1,5 @@ from django.urls import re_path -from .apis import SeadocAccessToken, SeadocUploadLink, SeadocDownloadLink, SeadocOriginFileContent, SeadocUploadFile, \ +from .apis import SeadocAccessToken, SeadocUploadLink, SeadocDownloadLink, SeadocImageDownloadLink, SeadocOriginFileContent, SeadocUploadFile, \ SeadocUploadImage, SeadocDownloadImage, SeadocAsyncCopyImages, SeadocQueryCopyMoveProgressView, SeadocCopyHistoryFile, SeadocHistory, SeadocDrafts, SeadocMaskAsDraft, \ SeadocCommentsView, SeadocCommentView, SeadocStartRevise, SeadocPublishRevision, SeadocRevisionsCount, SeadocRevisions, \ SeadocCommentRepliesView, SeadocCommentReplyView, SeadocFileView, SeadocFileUUIDView, SeadocDirView, SdocRevisionBaseVersionContent, SeadocRevisionView, \ @@ -13,6 +13,7 @@ re_path(r'^upload-file/(?P[-0-9a-f]{36})/$', SeadocUploadFile.as_view(), name='seadoc_upload_file'), re_path(r'^upload-link/(?P[-0-9a-f]{36})/$', SeadocUploadLink.as_view(), name='seadoc_upload_link'), re_path(r'^download-link/(?P[-0-9a-f]{36})/$', SeadocDownloadLink.as_view(), name='seadoc_download_link'), + re_path(r'^image-download-link/(?P[-0-9a-f]{36})/$', SeadocImageDownloadLink.as_view(), name='seadoc_image_download_link'), re_path(r'^upload-image/(?P[-0-9a-f]{36})/$', SeadocUploadImage.as_view(), name='seadoc_upload_image'), re_path(r'^download-image/(?P[-0-9a-f]{36})/(?P.*)$', SeadocDownloadImage.as_view(), name='seadoc_download_image'), re_path(r'^async-copy-images/(?P[-0-9a-f]{36})/$', SeadocAsyncCopyImages.as_view(), name='seadoc_async_copy_images'), diff --git a/seahub/seadoc/views.py b/seahub/seadoc/views.py index 4b949d76c29..09d06e0599d 100644 --- a/seahub/seadoc/views.py +++ b/seahub/seadoc/views.py @@ -1,16 +1,19 @@ import os +import json +from urllib.parse import quote from django.shortcuts import render +from django.http import HttpResponse from django.utils.translation import gettext as _ -from seaserv import get_repo -from urllib.parse import quote -import json + +from seaserv import get_repo, seafile_api from seahub.auth.decorators import login_required -from seahub.utils import render_error +from seahub.utils import render_error, normalize_file_path from seahub.views import check_folder_permission, validate_owner, get_seadoc_file_uuid from seahub.tags.models import FileUUIDMap from seahub.seadoc.models import SeadocRevision +from seahub.api2.endpoints.utils import sdoc_export_to_docx from .utils import is_seadoc_revision, get_seadoc_download_link, gen_path_link @@ -137,3 +140,48 @@ def sdoc_revisions(request, repo_id): 'per_page': per_page, 'page_next': page_next, }) + + +@login_required +def sdoc_to_docx(request, repo_id): + + # argument check + file_path = request.GET.get('file_path') + file_path = normalize_file_path(file_path) + if not file_path: + error_msg = _("File path invalid.") + return render_error(request, error_msg) + + # resource check + repo = seafile_api.get_repo(repo_id) + if not repo: + error_msg = _("Library does not exist") + return render_error(request, error_msg) + + file_id = seafile_api.get_file_id_by_path(repo_id, file_path) + if not file_id: + error_msg = 'File %s not found.' % file_path + return render_error(request, error_msg) + + # permission check + if not check_folder_permission(request, repo_id, '/'): + error_msg = _("Permission denied.") + return render_error(request, error_msg) + + username = request.user.username + filename = os.path.basename(file_path) + doc_uuid = get_seadoc_file_uuid(repo, file_path) + download_token = seafile_api.get_fileserver_access_token(repo_id, file_id, + 'download', username) + + src_type = 'sdoc' + dst_type = 'docx' + resp_with_docx_file = sdoc_export_to_docx(file_path, username, doc_uuid, + download_token, src_type, dst_type) + + docx_mime_type = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document' + response = HttpResponse(content_type=docx_mime_type) + new_file_name = quote(f'{filename[:-5]}.docx') + response['Content-Disposition'] = f'attachment; filename={new_file_name}' + response.write(resp_with_docx_file.content) + return response diff --git a/seahub/urls.py b/seahub/urls.py index 2296c369fc8..cded3858cda 100644 --- a/seahub/urls.py +++ b/seahub/urls.py @@ -198,8 +198,7 @@ from seahub.api2.endpoints.file_participants import FileParticipantsView, FileParticipantView from seahub.api2.endpoints.repo_related_users import RepoRelatedUsersView from seahub.api2.endpoints.repo_auto_delete import RepoAutoDeleteView -from seahub.seadoc.views import sdoc_revision, sdoc_revisions - +from seahub.seadoc.views import sdoc_revision, sdoc_revisions, sdoc_to_docx from seahub.ocm.settings import OCM_ENDPOINT from seahub.ai.apis import LibrarySdocIndexes, Search, LibrarySdocIndex, TaskStatus, \ @@ -231,6 +230,7 @@ re_path(r'^repo/file_revisions/(?P[-0-9a-f]{36})/$', file_revisions, name='file_revisions'), re_path(r'^repo/sdoc_revision/(?P[-0-9a-f]{36})/$', sdoc_revision, name='sdoc_revision'), re_path(r'^repo/sdoc_revisions/(?P[-0-9a-f]{36})/$', sdoc_revisions, name='sdoc_revisions'), + re_path(r'^repo/sdoc_export_to_docx/(?P[-0-9a-f]{36})/$', sdoc_to_docx, name='sdoc_export_to_docx'), re_path(r'^repo/file-access/(?P[-0-9a-f]{36})/$', file_access, name='file_access'), re_path(r'^repo/text_diff/(?P[-0-9a-f]{36})/$', text_diff, name='text_diff'), re_path(r'^repo/history/(?P[-0-9a-f]{36})/$', repo_history, name='repo_history'), diff --git a/seahub/utils/file_types.py b/seahub/utils/file_types.py index 878fd4061db..18961eab795 100644 --- a/seahub/utils/file_types.py +++ b/seahub/utils/file_types.py @@ -13,4 +13,4 @@ MARKDOWN_SUPPORT_CONVERT_TYPES = ['sdoc'] -SDOC_SUPPORT_CONVERT_TYPES = ['markdown'] +SDOC_SUPPORT_CONVERT_TYPES = ['markdown', 'docx']