-
Notifications
You must be signed in to change notification settings - Fork 13
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Replace mocked S3 with S3 in docker-compose
- Loading branch information
1 parent
ad43296
commit daa9348
Showing
5 changed files
with
96 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
import boto3 | ||
import json | ||
import logging | ||
import os | ||
from behave import given | ||
from botocore.exceptions import ClientError | ||
from src import kafka_util | ||
|
||
|
||
def create_presigned_url(s3_client, bucket_name, object_name, expiration=3600): | ||
"""Generate a presigned URL to share an S3 object | ||
:param bucket_name: string | ||
:param object_name: string | ||
:param expiration: Time in seconds for the presigned URL to remain valid | ||
:return: Presigned URL as string. If error, returns None. | ||
""" | ||
|
||
# Generate a presigned URL for the S3 object | ||
try: | ||
response = s3_client.generate_presigned_url('get_object', | ||
Params={'Bucket': bucket_name, | ||
'Key': object_name}, | ||
ExpiresIn=expiration) | ||
except ClientError as e: | ||
logging.error(e) | ||
return None | ||
|
||
# The response contains the presigned URL | ||
return response | ||
|
||
|
||
def use_real_storage(context, archive_key, msg_path): | ||
s3_host = os.getenv("S3_HOST", default="localhost") | ||
s3_port = os.getenv("S3_PORT", default="9000") | ||
s3_access_key = os.getenv("S3_ACCESS_KEY") | ||
s3_secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY") | ||
|
||
s3_client = boto3.client('s3', | ||
endpoint_url=f'http://{s3_host}:{s3_port}', | ||
aws_access_key_id=s3_access_key, | ||
aws_secret_access_key=s3_secret_access_key) | ||
|
||
try: | ||
s3_client.head_bucket(Bucket='test') | ||
except ClientError: | ||
s3_client.create_bucket(Bucket='test') | ||
|
||
with open(f"test_data/{archive_key}.tar.gz",'rb') as archive: | ||
s3_client.put_object(Body=archive, Bucket="test", Key=archive_key) | ||
|
||
topic_name = context.__dict__["_stack"][0]["incoming_topic"] | ||
presigned_url = create_presigned_url(s3_client, "test", archive_key) | ||
|
||
with open(msg_path, "r") as f: | ||
msg = f.read().encode("utf-8") | ||
event_data = json.loads(msg) | ||
event_data["url"] = presigned_url | ||
event_data= json.dumps(event_data).encode('utf-8') | ||
headers = [("service", b"testareno")] | ||
kafka_util.send_event(context.hostname, topic_name, event_data, headers) | ||
|
||
|
||
def use_mock_storage(context, archive_key, msg_path): | ||
topic_name = context.__dict__["_stack"][0]["incoming_topic"] | ||
|
||
with open(msg_path, "r") as f: | ||
event_data = f.read().encode("utf-8") | ||
headers = [("service", b"testareno")] | ||
kafka_util.send_event(context.hostname, topic_name, event_data, headers) | ||
|
||
|
||
@when('S3 and Kafka are populated with an archive {with_or_without} workload_info') | ||
def populate_s3(context, with_or_without): | ||
if with_or_without == "with": | ||
archive_key = "archive" | ||
msg_path = "test_data/upload.json" | ||
else: | ||
archive_key = "archive_no_workloadinfo" | ||
msg_path = "test_data/upload_no_workloadinfo.json" | ||
|
||
try: | ||
use_real_storage(context, archive_key, msg_path) | ||
except Exception as e: | ||
use_mock_storage(context, archive_key, msg_path) | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -28,7 +28,7 @@ function prepare_venv() { | |
|
||
if [[ ! -d $PATH_TO_LOCAL_SHA_EXTRACTOR ]] ; then | ||
git clone --depth=1 [email protected]:ccx/ccx-sha-extractor.git $PATH_TO_LOCAL_SHA_EXTRACTOR | ||
add_trap "rm -rf ./ccx-sha-extractor" | ||
add_trap "rm -rf ./ccx-sha-extractor" | ||
fi | ||
cwd=$(pwd) | ||
cd $PATH_TO_LOCAL_SHA_EXTRACTOR || exit | ||
|
@@ -88,9 +88,9 @@ function add_trap() { | |
|
||
if ! [ "$ENV_DOCKER" ] ; then | ||
run_kafka | ||
run_mock_s3 | ||
fi | ||
|
||
run_mock_s3 | ||
prepare_venv | ||
|
||
# shellcheck disable=SC2068 | ||
|