Skip to content

Commit

Permalink
Added custom docker action
Browse files Browse the repository at this point in the history
  • Loading branch information
mfkimbell committed Nov 6, 2023
1 parent 3f901fe commit 3a761a8
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 1 deletion.
9 changes: 9 additions & 0 deletions .github/actions/deploy-s3-docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
FROM python:3

COPY requirements.txt /requirements.txt

RUN pip install -r requirements.txt

COPY deployment.py /deployment.py

CMD ["python", "/deployment.py"]
19 changes: 19 additions & 0 deletions .github/actions/deploy-s3-docker/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
name: 'Deploy to AWS S3'
description: 'Deploy a static webite via AWS S3.'
inputs:
bucket:
description: 'The S3 bucket name.'
required: true
bucket-region:
description: 'The S3 bucket region.'
required: false
default: 'us-east-1'
dist-folder:
description: 'The folder containing the deployable files.'
required: true
outputs:
website-url:
description: 'The URL of the deployed website.'
runs:
using: 'docker'
image: 'Dockerfile'
37 changes: 37 additions & 0 deletions .github/actions/deploy-s3-docker/deployment.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import os
import boto3
import mimetypes
from botocore.config import Config


def run():
# INPUT_ will begin all given enviornment variables passed to Docker
# they are taken from "inputs" and made all caps as well
bucket = os.environ["INPUT_BUCKET"]
bucket_region = os.environ["INPUT_BUCKET-REGION"]
dist_folder = os.environ["INPUT_DIST-FOLDER"]

configuration = Config(region_name=bucket_region)

s3_client = boto3.client("s3", config=configuration)

# Once again, we gain our permissions because boto3 package
# automatically looks for enviornment variables with AWS_ACCESS_KEY_ID
# and AWS_SECRET_KEY_ID
for root, subdirs, files in os.walk(dist_folder):
for file in files:
s3_client.upload_file(
os.path.join(root, file),
bucket,
os.path.join(root, file).replace(dist_folder + "/", ""),
ExtraArgs={"ContentType": mimetypes.guess_type(file)[0]},
)

website_url = f"http://{bucket}.s3-website-{bucket_region}.amazonaws.com"
# The below code sets the 'website-url' output (the old ::set-output syntax isn't supported anymore - that's the only thing that changed though)
with open(os.environ["GITHUB_OUTPUT"], "a") as gh_output:
print(f"website-url={website_url}", file=gh_output)


if __name__ == "__main__":
run()
7 changes: 7 additions & 0 deletions .github/actions/deploy-s3-docker/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
boto3==1.24.71
botocore==1.27.71
jmespath==1.0.1
python-dateutil==2.8.2
s3transfer==0.6.0
six==1.16.0
urllib3==1.26.12
3 changes: 2 additions & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@ jobs:
run: ls
- name: Deploy site
id: deploy
uses: ./.github/actions/deploy-s3-javascript
# uses: ./.github/actions/deploy-s3-javascript #if i wanted Javscript action
uses: ./.github/actions/deploy-s3-docker
#here we add secret values to our enviornment of
#the github runner, AWS-CLI looks for keys with
#these names when making calls
Expand Down

0 comments on commit 3a761a8

Please sign in to comment.