diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 8e72bf3..1ddd8e4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ Unreleased ========== +0.2.6 (2020-05-12) +================== +- fix api cannot access cloudian with secure url +- refactor upload file to use tempdir +- change acl message when success + 0.2.5 (2020-04-29) ================== - Add directory download function diff --git a/obs/api/app/controllers/api/storage.py b/obs/api/app/controllers/api/storage.py index 0337645..ebdb27e 100644 --- a/obs/api/app/controllers/api/storage.py +++ b/obs/api/app/controllers/api/storage.py @@ -4,6 +4,7 @@ import zipfile import tempfile import xmltodict +import mimetypes from obs.libs import bucket from obs.libs import gmt @@ -17,7 +18,7 @@ def get_resources(access_key, secret_key): - endpoint = auth.get_endpoint() + endpoint = auth.get_endpoint("storage") sess = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key) s3_resource = sess.resource("s3", endpoint_url=endpoint) return s3_resource @@ -301,37 +302,38 @@ def post(self, bucket_name): args = parser.parse_args() secret_key = args["secret_key"].replace(" ", "+") - file = request.files["files"] - filename = secure_filename(file.filename) - file.save(filename) - - object_name = args["object_name"] if args["object_name"] else filename - - try: - regex = r"[\"\{}^%`\]\[~<>|#]|[^\x00-\x7F]" - object_name = re.sub(regex, "", object_name) + with tempfile.TemporaryDirectory() as tempdir: + file = request.files["files"] + filename = secure_filename(file.filename) + object_name = args["object_name"] if args["object_name"] else filename + filename = os.path.join(tempdir, filename) + file.save(filename) - result = bucket.upload_object( - resource=get_resources(args["access_key"], secret_key), - bucket_name=bucket_name, - local_path=filename, - object_name=object_name, - content_type=file.content_type, - ) - os.remove(filename) + try: + regex = r"[\"\{}^%`\]\[~<>|#]|[^\x00-\x7F]" + object_name = re.sub(regex, "", object_name) - if args["acl"]: - bucket.set_acl( + result = bucket.upload_object( resource=get_resources(args["access_key"], secret_key), bucket_name=bucket_name, + local_path=filename, object_name=object_name, - acl_type="object", - acl=args["acl"], + content_type=mimetypes.guess_type(filename)[0], ) - return response(201, f"Object {object_name} uploaded successfully.", result) - except Exception as e: - current_app.logger.error(f"{e}") - return response(500, f"{e}") + if args["acl"]: + bucket.set_acl( + resource=get_resources(args["access_key"], secret_key), + bucket_name=bucket_name, + object_name=object_name, + acl_type="object", + acl=args["acl"], + ) + return response( + 201, f"Object {object_name} uploaded successfully.", result + ) + except Exception as e: + current_app.logger.error(f"{e}") + return response(500, f"{e}") class usage(Resource): @@ -395,7 +397,9 @@ def post(self): acl_type=acl_type, acl=args["acl"], ) - return response(200, f"Added {acl} access to {acl_type} {name}.", result) + return response( + 200, f"Added {args['acl']} access to {acl_type} {name}.", result + ) except Exception as e: current_app.logger.error(f"{e}") return response(500, f"{e}") diff --git a/obs/libs/auth.py b/obs/libs/auth.py index a975484..727b91d 100644 --- a/obs/libs/auth.py +++ b/obs/libs/auth.py @@ -5,17 +5,20 @@ from requests_aws4auth import AWS4Auth -def get_endpoint(bucket=None): +def get_endpoint(url, bucket=None): """generate endpoint. Use `http` if ssl False otherwise `https` Use `example.com` if bucket False otherwise `bucketname.example.com` """ - hostname = os.environ.get("OBS_USER_URL") + hostname = { + "storage": os.environ.get("OBS_USER_URL"), + "admin": os.environ.get("OBS_ADMIN_URL"), + } ssl = strtobool(os.environ.get("OBS_USE_HTTPS")) protocol = f"http{ssl and 's' or ''}://" bucket_name = f"{bucket and bucket or ''}{bucket and '.' or ''}" - endpoint = f"{protocol}{bucket_name}{hostname}" + endpoint = f"{protocol}{bucket_name}{hostname[url]}" return endpoint @@ -26,7 +29,7 @@ def resource(): """ access_key = os.environ.get("OBS_USER_ACCESS_KEY") secret_key = os.environ.get("OBS_USER_SECRET_KEY") - endpoint = get_endpoint() + endpoint = get_endpoint("storage") sess = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key) s3_resource = sess.resource("s3", endpoint_url=endpoint) @@ -50,8 +53,7 @@ def admin_client(): """:return: CloudianApiClient object""" user = os.environ.get("OBS_ADMIN_USERNAME") passwd = os.environ.get("OBS_ADMIN_PASSWORD") - endpoint = os.environ.get("OBS_ADMIN_URL") - endpoint_url = f"http://{endpoint}" + endpoint = get_endpoint("admin") port = os.environ.get("OBS_ADMIN_PORT") - client = CloudianAPIClient(url=endpoint_url, user=user, key=passwd, port=port) + client = CloudianAPIClient(url=endpoint, user=user, key=passwd, port=port) return client diff --git a/obs/libs/bucket.py b/obs/libs/bucket.py index b360ade..6f50977 100644 --- a/obs/libs/bucket.py +++ b/obs/libs/bucket.py @@ -36,7 +36,7 @@ def create_bucket(**kwargs): if kwargs.get("random_name"): bucket_name = gen_random_name(bucket_name) - endpoint = auth_lib.get_endpoint(bucket_name) + endpoint = auth_lib.get_endpoint("storage", bucket_name) headers = {"x-gmt-policyid": policy_id, "x-amz-acl": acl} response = requests.put(endpoint, auth=auth, headers=headers) @@ -123,9 +123,13 @@ def upload_object(**kwargs): resource = kwargs.get("resource") bucket_name = kwargs.get("bucket_name") - resource.Object(bucket_name, filename).upload_file( - Filename=local_path, ExtraArgs={"ContentType": kwargs.get("content_type")} - ) + resource_upload = resource.Object(bucket_name, filename) + if kwargs.get("content_type"): + resource_upload.upload_file( + Filename=local_path, ExtraArgs={"ContentType": kwargs.get("content_type")} + ) + else: + resource_upload.upload_file(Filename=local_path) def copy_object(resource, src_bucket, src_object_name, dest_bucket, dest_object_name): diff --git a/obs/libs/gmt.py b/obs/libs/gmt.py index c83a932..4dd783d 100644 --- a/obs/libs/gmt.py +++ b/obs/libs/gmt.py @@ -28,7 +28,7 @@ def get_policies(): def policy_id(bucket_name, auth): """Get GMT-Policy id from S3 API response headers.""" - endpoint = auth_lib.get_endpoint(bucket_name) + endpoint = auth_lib.get_endpoint("storage", bucket_name) response = requests.get(endpoint, auth=auth) policy_id = response.headers.get("x-gmt-policyid") diff --git a/tests/test_libs/test_auth.py b/tests/test_libs/test_auth.py index 7637ab6..2619223 100644 --- a/tests/test_libs/test_auth.py +++ b/tests/test_libs/test_auth.py @@ -17,5 +17,5 @@ def fake_session(**kwargs): def test_resource(monkeypatch): monkeypatch.setattr(config, "load_config_file", fake_config) monkeypatch.setattr(boto3, "Session", fake_session) - monkeypatch.setattr(auth, "get_endpoint", lambda: None) + monkeypatch.setattr(auth, "get_endpoint", lambda url: None) assert auth.resource() == "s3_resource" diff --git a/tests/test_libs/test_gmt.py b/tests/test_libs/test_gmt.py index 9156606..5e489d7 100644 --- a/tests/test_libs/test_gmt.py +++ b/tests/test_libs/test_gmt.py @@ -82,7 +82,7 @@ def fake_auth(): def test_policy_id(monkeypatch): monkeypatch.setattr(requests, "get", fake_request) - monkeypatch.setattr(auth_lib, "get_endpoint", lambda bucket_name: None) + monkeypatch.setattr(auth_lib, "get_endpoint", lambda url, bucket_name: None) assert "dd7e84cfe467c0fc11b5b075ac9acd73" == gmt.policy_id( "awesome-bucket", fake_auth() )