forked from frankV/resize4me
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathresize4me.py
202 lines (158 loc) · 5.73 KB
/
resize4me.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
import boto3
import PIL
import json
from io import BytesIO
from os import path
from urllib.parse import quote_plus, unquote_plus
from PIL import Image
from botocore.client import ClientError
class Resize4Me():
"""
Resizes and uplodads images do an S3 bucket.
"""
def __init__(self, config_file='resize4me_settings.json'):
self.s3 = boto3.resource('s3')
self.source_bucket = None
self.destination_buckets = None
self.config = self.parse_config(config_file)
def parse_config(self, config_file):
"""
Parses and verifies if the configuration file is correct,
which should be in the project root directory, named as
resize4me_settings.json.
Returns:
<dict> config - the configuration file in a dictionary.
"""
with open(config_file, 'r') as file:
config = json.loads(file.read())
self.source_bucket = config.get('source_bucket')
self.destination_buckets = config.get('destination_buckets')
if not self.source_bucket:
raise ValueError('A source bucket must be configured')
if not self.destination_buckets or len(self.destination_buckets) == 0:
raise ValueError('At least one destination bucket must be configured')
for bucket in self.destination_buckets:
if not bucket.get('name'):
raise ValueError('A destination bucket must have a name')
if not bucket.get('width_size'):
raise ValueError('A destination bucket must have a width size')
return config
def verify_buckets(self):
"""
Verifies if the buckets specified in the configuration file
are accessible.
"""
buckets = [bucket.get('name') for bucket in self.destination_buckets]
buckets.append(self.source_bucket)
try:
for bucket in buckets:
self.s3.meta.client.head_bucket(Bucket=bucket)
except ClientError as e:
raise Exception('Bucket {}: {}'.format(bucket, e))
def check_extension(self, key):
"""
Verifies if the file extension is valid.
Valid formats are JPG and PNG.
Args:
<str> key - a filename (usually an S3 object key).
Returns:
<str> extension - the file/key extesion, including the dot.
"""
extension = path.splitext(key)[1].lower()
if extension.lower() in [
'.jpg',
'.jpeg',
'.png',
]:
return extension
else:
raise ValueError('File format not supported')
def resize_image(self, body, extension, size):
"""
Resizes proportionally an image using `size` as the base width.
Args:
<bytesIO> body - the image content in a buffer.
<str> extension - the image extension.
<int> size - base width used to the resize process.
Returns:
<bytesIO> buffer - returns the image content resized.
"""
img = Image.open(BytesIO(body))
wpercent = (size / float(img.size[0]))
hsize = int((float(img.size[1]) * float(wpercent)))
img = img.resize((size, hsize), PIL.Image.ANTIALIAS)
buffer = BytesIO()
if extension in ['.jpeg', '.jpg']:
format = 'JPEG'
if extension in ['.png']:
format = 'PNG'
img.save(buffer, format)
buffer.seek(0)
return buffer
def upload(self, bucket_name, key, body):
"""
Uploads a file to an S3 bucket with `public-read` ACL.
Args:
<str> bucket_name - S3 bucket name.
<str> key - S3 object key.
<binary> body - the content of the file to be uplodaded.
"""
obj = self.s3.Object(
bucket_name=bucket_name,
key=key,
)
obj.put(ACL='public-read', Body=body)
print('File saved at {}/{}'.format(
bucket_name,
key,
))
def response(self, key):
"""
Gerenates a dictionary response with all objects generated.
Args:
<str> key - S3 key from the file uploaded.
Returns:
<dict> response - dictonary with all generated files.
"""
aws_domain = 'https://s3.amazonaws.com'
response = {
self.source_bucket: '{}/{}/{}'.format(
aws_domain,
self.source_bucket,
quote_plus(key),
)
}
for bucket in self.destination_buckets:
dict_key = 'resized-{}px'.format(bucket.get('width_size'))
response[dict_key] = 'https://s3.amazonaws.com/{}/{}'.format(
bucket.get('name'),
quote_plus(key),
)
return response
def lambda_handler(event, context):
"""
Given a configuration file with source_bucket and destination_buckets,
will resize any valid file uploaded to the source_bucket and save
into the destination bucket.
"""
r4me = Resize4Me()
r4me.verify_buckets()
for object in event.get('Records'):
object_key = unquote_plus(object['s3']['object']['key'])
object_extension = r4me.check_extension(object_key)
# Source file
obj = r4me.s3.Object(
bucket_name=r4me.source_bucket,
key=object_key,
)
obj_body = obj.get()['Body'].read()
# Resized files
for bucket in r4me.destination_buckets:
bucket_name = bucket.get('name')
bucket_size = bucket.get('width_size')
resized_image = r4me.resize_image(
obj_body,
object_extension,
bucket_size
)
r4me.upload(bucket_name, object_key, resized_image)