Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use newer versions of AWS libraries #4

Open
wants to merge 16 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@ jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.8, 3.9, "3.10", "3.11"]
python-version: [3.9, "3.10", "3.11"]

steps:
- uses: actions/checkout@v3
Expand Down
19 changes: 10 additions & 9 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ def readme():
description='Thumbor AWS extensions',
long_description=readme(),
long_description_content_type='text/markdown',
author='Thumbor-Community & William King',
author_email='h.briand@gmail.com', # Original author email is: [email protected]
author='Noun Project & Thumbor-Community & William King',
author_email='drew@thenounproject.com', # Original author email is: willtrking@gmail.com, thumbor-community email h.briand@gmail.com
zip_safe=False,
include_package_data=True,
packages=find_packages(),
Expand All @@ -50,23 +50,24 @@ def readme():
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
],
keywords='thumbor aws',
install_requires=[
'python-dateutil>=2.8',
'thumbor>=7.0.0a2,<8',
'aiobotocore==0.12.0',
'boto3>=1.9,<1.13',
"python-dateutil>=2.8",
"thumbor>=7.0.0a2,<8",
"aiobotocore>=2.0",
"boto3",
],
extras_require={
'tests': [
'coverage>=6.5',
'moto[server]>=4.0',
'mock>=4.0',
'pytest>=7.2',
'pytest-retry',
],
},
)
86 changes: 46 additions & 40 deletions tc_aws/aws/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
# found in the LICENSE file.

import aiobotocore
from aiobotocore.session import ClientCreatorContext
from botocore.client import Config
from thumbor.utils import logger
from thumbor.engines import BaseEngine
Expand All @@ -26,6 +27,7 @@ def __new__(cls, bucket, region, endpoint, *args, **kwargs):
"""
This handles all communication with AWS API
"""

def __init__(self, bucket, region, endpoint, max_retry=None):
"""
Constructor
Expand All @@ -38,30 +40,25 @@ def __init__(self, bucket, region, endpoint, max_retry=None):

config = None
if max_retry is not None:
config = Config(
retries=dict(
max_attempts=max_retry
)
)
config = Config(retries=dict(max_attempts=max_retry))

if self._client is None:
self._client = aiobotocore.get_session().create_client(
's3',
region_name=region,
endpoint_url=endpoint,
config=config
)
self.region = region
self.config = config
self.endpoint = endpoint

async def exists(self, path):
"""
Checks if an object exists at a given path
:param string path: Path or 'key' to retrieve AWS object
"""
try:
await self._client.head_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)
async with aiobotocore.session.get_session().create_client(
"s3", region_name=self.region, endpoint_url=self.endpoint, config=self.config,
) as client:
await client.head_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)
except Exception:
return False
return True
Expand All @@ -71,31 +68,35 @@ async def get(self, path):
Returns object at given path
:param string path: Path or 'key' to retrieve AWS object
"""
async with aiobotocore.session.get_session().create_client(
"s3", region_name=self.region, endpoint_url=self.endpoint, config=self.config,
) as client:
return await client.get_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)

return await self._client.get_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)

async def get_url(self, path, method='GET', expiry=3600):
async def get_url(self, path, method="GET", expiry=3600):
"""
Generates the presigned url for given key & methods
:param string path: Path or 'key' for requested object
:param string method: Method for requested URL
:param int expiry: URL validity time
"""
async with aiobotocore.session.get_session().create_client(
"s3", region_name=self.region, endpoint_url=self.endpoint, config=self.config,
) as client:
url = await client.generate_presigned_url(
ClientMethod="get_object",
Params={
"Bucket": self._bucket,
"Key": self._clean_key(path),
},
ExpiresIn=expiry,
HttpMethod=method,
)

url = await self._client.generate_presigned_url(
ClientMethod='get_object',
Params={
'Bucket': self._bucket,
'Key': self._clean_key(path),
},
ExpiresIn=expiry,
HttpMethod=method,
)

return url
return url

async def put(self, path, data, metadata=None, reduced_redundancy=False, encrypt_key=False):
"""
Expand All @@ -121,19 +122,24 @@ async def put(self, path, data, metadata=None, reduced_redundancy=False, encrypt
args['ServerSideEncryption'] = 'AES256'

if metadata is not None:
args['Metadata'] = metadata

return await self._client.put_object(**args)
args["Metadata"] = metadata
async with aiobotocore.session.get_session().create_client(
"s3", region_name=self.region, endpoint_url=self.endpoint, config=self.config,
) as client:
return await client.put_object(**args)

async def delete(self, path):
"""
Deletes key at given path
:param string path: Path or 'key' to delete
"""
return await self._client.delete_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)
async with aiobotocore.session.get_session().create_client(
"s3", region_name=self.region, endpoint_url=self.endpoint, config=self.config,
) as client:
return await client.delete_object(
Bucket=self._bucket,
Key=self._clean_key(path),
)

def _clean_key(self, path):
logger.debug('Cleaning key: {path!r}'.format(path=path))
Expand Down
14 changes: 9 additions & 5 deletions tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
from tc_aws.aws.bucket import Bucket
from tests.fixtures.storage_fixture import s3_bucket

logging.basicConfig(level=logging.CRITICAL)
logging.basicConfig(level=logging.DEBUG)

LOG = logging.getLogger(__name__)

os.environ["TEST_SERVER_MODE"] = "true"

Expand All @@ -34,12 +36,13 @@ def start_service(host, port):
args = [sys.executable, "-m", "moto.server", "-H", host,
"-p", str(port)]

process = sp.Popen(args, stderr=sp.PIPE)
process = sp.Popen(args, stderr=sp.PIPE, stdout=sp.PIPE)
url = "http://{host}:{port}".format(host=host, port=port)

for i in range(0, 30):
if process.poll() is not None:
process.communicate()
out, err = process.communicate()
LOG.debug('Received stdout %s, stderr %s', out, err)
break

try:
Expand All @@ -54,10 +57,11 @@ def start_service(host, port):
return process


def stop_process(process):
def stop_process(process: sp.Popen):
try:
process.send_signal(signal.SIGTERM)
process.communicate()
stdout, stderr = process.communicate()
LOG.debug('Received while stopping: stdout %s, stderr %s', stdout, stderr)
except Exception:
process.kill()
outs, errors = process.communicate()
Expand Down
10 changes: 7 additions & 3 deletions tests/test_result_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from thumbor.config import Config
from thumbor.context import Context
from tornado.testing import gen_test
from pytest import mark

from .fixtures.storage_fixture import IMAGE_BYTES, get_server, s3_bucket
from tc_aws.result_storages.s3_storage import Storage
Expand All @@ -23,7 +24,8 @@ class Request(object):

class S3StorageTestCase(S3MockedAsyncTestCase):

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_can_get_image(self):
config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket)
ctx = Context(config=config, server=get_server('ACME-SEC'))
Expand All @@ -37,7 +39,8 @@ async def test_can_get_image(self):

self.assertEqual(topic.buffer, IMAGE_BYTES)

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_can_get_randomized_image(self):
config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_RANDOMIZE_KEYS=True)
ctx = Context(config=config, server=get_server('ACME-SEC'))
Expand All @@ -51,7 +54,8 @@ async def test_can_get_randomized_image(self):

self.assertEqual(topic.buffer, IMAGE_BYTES)

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_can_get_image_with_metadata(self):
config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_STORE_METADATA=True)
ctx = Context(config=config, server=get_server('ACME-SEC'))
Expand Down
16 changes: 11 additions & 5 deletions tests/test_s3_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from thumbor.context import Context
from thumbor.loaders import LoaderResult
from tornado.testing import gen_test
from pytest import mark

from .fixtures.storage_fixture import IMAGE_PATH, IMAGE_BYTES, s3_bucket
from tc_aws.loaders import s3_loader
Expand All @@ -18,7 +19,8 @@

class S3LoaderTestCase(S3MockedAsyncTestCase):

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_can_load_image(self):
client = botocore.session.get_session().create_client('s3', endpoint_url='http://localhost:5000')

Expand All @@ -39,7 +41,8 @@ async def test_can_load_image(self):
self.assertTrue('size' in loader_result.metadata)
self.assertIsNone(loader_result.error)

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_returns_404_on_no_image(self):
conf = Config(
TC_AWS_LOADER_BUCKET=s3_bucket,
Expand All @@ -51,7 +54,8 @@ async def test_returns_404_on_no_image(self):
self.assertIsNone(loader_result.buffer)
self.assertEqual(loader_result.error, LoaderResult.ERROR_NOT_FOUND)

@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_can_validate_buckets(self):
conf = Config(
TC_AWS_ALLOWED_BUCKETS=['whitelist_bucket'],
Expand All @@ -62,14 +66,16 @@ async def test_can_validate_buckets(self):
self.assertIsNone(image.buffer)

@patch('thumbor.loaders.http_loader.load')
@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_should_use_http_loader(self, load_sync_patch):
conf = Config(TC_AWS_ENABLE_HTTP_LOADER=True)
await s3_loader.load(Context(config=conf), 'http://foo.bar')
self.assertTrue(load_sync_patch.called)

@patch('thumbor.loaders.http_loader.load')
@gen_test
@mark.flaky
@gen_test(timeout=10)
async def test_should_not_use_http_loader_if_not_prefixed_with_scheme(self, load_sync_patch):
conf = Config(TC_AWS_ENABLE_HTTP_LOADER=True)
await s3_loader.load(Context(config=conf), 'foo/bar')
Expand Down
Loading
Loading