Skip to content

Commit

Permalink
Merge pull request #45 from unbekanntes-pferd/bugfix/1.4.1
Browse files Browse the repository at this point in the history
fixed bugs: stuck uploads, folder upload, types
  • Loading branch information
unbekanntes-pferd authored May 17, 2022
2 parents aeeab5f + 0a2d70d commit 8b0e71a
Show file tree
Hide file tree
Showing 3 changed files with 164 additions and 21 deletions.
20 changes: 10 additions & 10 deletions dracoon/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,43 +57,43 @@ def __init__(self, base_url: str, client_id: str = 'dracoon_legacy_scripting', c
self.user_info = None

@property
def nodes(self):
def nodes(self) -> DRACOONNodes:
return DRACOONNodes(self.client)

@property
def public(self):
def public(self) -> DRACOONPublic:
return DRACOONPublic(self.client)

@property
def user(self):
def user(self) -> DRACOONUser:
return DRACOONUser(self.client)

@property
def reports(self):
def reports(self) -> DRACOONReports:
return DRACOONReports(self.client)

@property
def settings(self):
def settings(self) -> DRACOONSettings:
return DRACOONSettings(self.client)

@property
def users(self):
def users(self) -> DRACOONUsers:
return DRACOONUsers(self.client)

@property
def groups(self):
def groups(self) -> DRACOONGroups:
return DRACOONGroups(self.client)

@property
def eventlog(self):
def eventlog(self) -> DRACOONEvents:
return DRACOONEvents(self.client)

@property
def shares(self):
def shares(self) -> DRACOONShares:
return DRACOONShares(self.client)

@property
def downloads(self):
def downloads(self) -> DRACOONDownloads:
return DRACOONDownloads(self.client)


Expand Down
40 changes: 29 additions & 11 deletions dracoon/nodes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
FileConflictError, InvalidFileError, InvalidArgumentError)
from dracoon.uploads.models import UploadChannelResponse
from .models import (CompleteS3Upload, CompleteUpload, ConfigRoom, CreateFolder, CreateRoom, CreateUploadChannel,
GetS3Urls, LogEventList, MissingKeysResponse, Node, Permissions, ProcessRoomPendingUsers, S3Part,
GetS3Urls, LogEventList, MissingKeysResponse, Node, NodeItem, Permissions, ProcessRoomPendingUsers, S3Part,
SetFileKeys, SetFileKeysItem, TransferNode, CommentNode, RestoreNode, UpdateFile, UpdateFiles,
UpdateFolder, UpdateRoom, UpdateRoomGroupItem, UpdateRoomGroups, UpdateRoomHooks,
UpdateRoomUserItem, UpdateRoomUsers)
Expand Down Expand Up @@ -238,6 +238,11 @@ async def complete_s3_upload(self, upload_id: str, upload: CompleteS3Upload, rai
await self.dracoon.handle_http_error(err=e, raise_on_err=raise_on_err)

self.logger.info("Completed S3 upload.")

# handle resolutionStrategy fail and raise_on_err True with conflict
if res.status_code == 409:
return res.json()

return None

def make_s3_upload_complete(self, parts: List[S3Part], resolution_strategy: str = None, keep_share_links: str = None, file_name: str = None,
Expand Down Expand Up @@ -371,7 +376,7 @@ async def upload_unencrypted(self, file_path: str, upload_channel: CreateFileUpl
if display_progress: progress.close()


if display_progress: progress.close()
if display_progress: progress.close()

complete_upload = self.make_upload_complete(file_name=file_name, keep_shares=keep_shares,
resolution_strategy=resolution_strategy)
Expand Down Expand Up @@ -488,10 +493,15 @@ async def upload_encrypted(self, file_path: str, upload_channel: CreateFileUploa

if display_progress: progress.close()


# encrypt file key
file_key = encrypt_file_key(plain_file_key=plain_file_key, keypair=plain_keypair)

# handle file conflicts on raise_on_err False
if res.status_code == 409 and resolution_strategy == 'fail':
self.logger.debug('Upload failed: File already exists')
return

complete_upload = self.make_upload_complete(file_name=file_name, keep_shares=keep_shares,
resolution_strategy=resolution_strategy, file_key=file_key)

Expand Down Expand Up @@ -530,7 +540,7 @@ async def complete_upload(self, upload_channel: UploadChannelResponse, payload:
self.logger.error("Uploading file failed.")
await self.dracoon.http.delete(upload_channel.uploadUrl)
await self.dracoon.handle_http_error(err=e, raise_on_err=raise_on_err)

return Node(**res.json())

async def upload_s3_unencrypted(self, file_path: str, upload_channel: CreateFileUploadResponse, keep_shares: bool = False,
Expand Down Expand Up @@ -659,7 +669,11 @@ async def upload_s3_unencrypted(self, file_path: str, upload_channel: CreateFile
s3_complete = self.make_s3_upload_complete(parts=parts, file_name=file_name, keep_share_links=keep_shares,
resolution_strategy=resolution_strategy)

await self.complete_s3_upload(upload_id=upload_channel.uploadId, upload=s3_complete, raise_on_err=raise_on_err)
upload = await self.complete_s3_upload(upload_id=upload_channel.uploadId, upload=s3_complete, raise_on_err=raise_on_err)

# handle resolutionStrategy fail and raise_on_err True with conflict (409)
if upload is not None:
return

time = POLL_WAIT

Expand Down Expand Up @@ -827,7 +841,11 @@ async def upload_s3_encrypted(self, file_path: str, upload_channel: CreateFileUp
s3_complete = self.make_s3_upload_complete(parts=parts, file_name=file_name, keep_share_links=keep_shares,
resolution_strategy=resolution_strategy, file_key=file_key)

await self.complete_s3_upload(upload_id=upload_channel.uploadId, upload=s3_complete, raise_on_err=raise_on_err)
upload = await self.complete_s3_upload(upload_id=upload_channel.uploadId, upload=s3_complete, raise_on_err=raise_on_err)

# handle resolutionStrategy fail and raise_on_err True with conflict (409)
if upload is not None:
return

time = POLL_WAIT

Expand Down Expand Up @@ -1056,17 +1074,17 @@ async def copy_nodes(self, target_id: int, copy_node: TransferNode, raise_on_err

self.logger.info("Copied nodes.")
return Node(**res.json())



def make_node_transfer(self, items: List[int], resolution_strategy: str = None, keep_share_links: bool = None, parent_id: int = None) -> TransferNode:
def make_node_transfer(self, items: List[NodeItem], resolution_strategy: str = None, keep_share_links: bool = None, parent_id: int = None) -> TransferNode:
""" make a node transfer payload for copy_nodes() and move_nodes() """
node_transfer = {
"items": items
}

if resolution_strategy: node_transfer["resolutionStrategy"] = resolution_strategy
if resolution_strategy is not None: node_transfer["resolutionStrategy"] = resolution_strategy
if keep_share_links is not None: node_transfer["keepShareLinks"] = keep_share_links
if parent_id: node_transfer["parentId"] = parent_id
if parent_id is not None: node_transfer["parentId"] = parent_id

return TransferNode(**node_transfer)

Expand Down Expand Up @@ -1534,7 +1552,7 @@ def make_folder_update(self, name: str = None, notes: str = None, creation_date:
"""" make a folder update payload for update_folder() """
folder = {}

if name: folder["name"] = notes
if name: folder["name"] = name
if notes: folder["notes"] = notes
if creation_date: folder["timestampCreation"] = creation_date
if modified_date: folder["timestampModification"] = modified_date
Expand Down
125 changes: 125 additions & 0 deletions examples/csv_permissions_standalone.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
""""
Script to generate CSV files with all permissions per node
Outputs individual CSVs per room with permissions
Usage: csv_permissions_standalone.py -p /your/output/path
17.05.2022 Octavio Simone
"""

import argparse
import sys
from typing import List
from dracoon import DRACOON, OAuth2ConnectionType
import asyncio
import csv
from pathlib import Path

from dracoon.eventlog.responses import AuditNodeInfo, AuditNodeResponse


client_id = 'xxxxxxxxxxxxxxxxxxx'
client_secret = 'xxxxxxxxxxxxxxxxxxx'
base_url = "https://staging.dracoon.com"

# get all rooms
async def get_rooms(room_id: int, dracoon: DRACOON) -> List[AuditNodeInfo]:
subroom_list = await dracoon.eventlog.get_rooms(parent_id=room_id, offset=0)

if subroom_list.range.total > 500:
# collect all items if more than 500 (requests)
subroom_reqs = [dracoon.eventlog.get_rooms(parent_id=room_id, offset=offset) for offset in range(500, subroom_list.range.total, 500)]
for batch in dracoon.batch_process(subroom_reqs):
responses = await asyncio.gather(*batch)
for response in responses:
if "items" in response:
subroom_list.items.extend(response.items)

# collect all sub rooms (requests) for given node id
sub_reqs = [get_rooms(room_id=subroom.nodeId,dracoon=dracoon) for subroom in subroom_list.items]

# batches of 10 / get parallel
for batch in dracoon.batch_process(coro_list=sub_reqs, batch_size=10):
responses = await asyncio.gather(*batch)
for response in responses:
if "items" in response:
subroom_list.items.extend(response.items)

return subroom_list.items

# get room permissions
async def get_room_permissions(room_id: int, dracoon: DRACOON) -> List[AuditNodeResponse]:
permissions = await dracoon.eventlog.get_permissions(filter=f'nodeParentId:eq:{str(room_id)}')

return permissions


def create_csv(permissions: List[AuditNodeResponse], path: str):

if len(permissions) < 1:
raise ValueError('No content.')

if len(permissions[0].auditUserPermissionList) < 1:
raise ValueError('No content.')

if path[-1] == '/' or path[-1] == '\\':
path = path[:-1]

path = path.replace('\\', '/')


file_path = f'{path}/{permissions[0].nodeId}_{permissions[0].nodeName}.csv'
with open(file_path, 'w', encoding='utf-8') as f:
csv_writer = csv.writer(f, delimiter=',')
csv_writer.writerow(['roomId', 'roomName', 'path', 'userId', 'firstName', 'lastName', 'login', 'manage', 'read', 'create', 'change', 'delete',
'manageShares', 'manageFileRequests', 'readRecycleBin', 'restoreRecycleBin', 'deleteRecycleBin'])

for permission in permissions[0].auditUserPermissionList:
csv_writer.writerow([permissions[0].nodeId, permissions[0].nodeName, permissions[0].nodeParentPath, permission.userId, permission.userFirstName,
permission.userLastName, permission.userLogin,
permission.permissions.manage, permission.permissions.read, permission.permissions.create,
permission.permissions.change, permission.permissions.delete, permission.permissions.manageDownloadShare, permission.permissions.manageUploadShare,
permission.permissions.readRecycleBin, permission.permissions.restoreRecycleBin, permission.permissions.deleteRecycleBin])


# parse CLI arguments
def parse_arguments() -> str:
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--path", required=True, help="Path to store CSV files with permissions")

args = vars(ap.parse_args())

# if no path is given, exit
if args["path"] is None:
print("Providing a path is mandatory.")
sys.exit(1)

path = args['path']

target_folder = Path(path)

if not target_folder.is_dir():
print("Provided path is not a valid directory.")
sys.exit(1)

return path

async def main():

path = parse_arguments()
dracoon = DRACOON(base_url=base_url, client_id=client_id, client_secret=client_secret, raise_on_err=True)
print(dracoon.get_code_url())
auth_code = input("Enter auth code: ")
await dracoon.connect(connection_type=OAuth2ConnectionType.auth_code, auth_code=auth_code)
room_list = await get_rooms(room_id=0, dracoon=dracoon)

for room in room_list:
permissions = await get_room_permissions(room_id=room.nodeId, dracoon=dracoon)

try:
create_csv(permissions=permissions, path=path)
except ValueError:
continue

if __name__ == '__main__':
asyncio.run(main())

0 comments on commit 8b0e71a

Please sign in to comment.