Skip to content

Commit

Permalink
Replace mocked S3 with S3 in docker-compose
Browse files Browse the repository at this point in the history
  • Loading branch information
JiriPapousek committed Oct 31, 2023
1 parent ad43296 commit daa9348
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 22 deletions.
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ version: "3.9"
services:
# "pod" with the tests
bddtests:
build: .
image: quay.io/cloudservices/insights-behavioral-spec:latest
entrypoint:
- /bin/sh
Expand Down Expand Up @@ -37,6 +38,7 @@ services:
minio:
profiles:
- test-exporter
- test-sha-extractor
image: minio/minio
command:
- server
Expand Down
8 changes: 4 additions & 4 deletions features/SHA_Extractor/sha_extractor.feature
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Feature: SHA Extractor


Scenario: Check if SHA extractor is able to consume messages from Kafka
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
And SHA extractor service is started in group "check"
Then SHA extractor should consume message about this event
And this message should contain following attributes
Expand All @@ -35,7 +35,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to consume messages from Kafka and then download tarball
Given SHA extractor service is started
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand All @@ -51,7 +51,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to consume messages from Kafka, download tarball, and take SHA images
Given SHA extractor service is started
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand All @@ -69,7 +69,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to finish the processing of SHA images
Given SHA extractor service is started
When an archive with workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive with workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand Down
17 changes: 1 addition & 16 deletions features/steps/insights_sha_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,21 +87,6 @@ def start_sha_extractor(context, group_id=None):
context.sha_extractor = sha_extractor


@when('an archive {with_or_without} workload info is announced in "{topic_var}" topic')
def produce_event(context, with_or_without, topic_var):
"""Produce an event into specified topic."""
topic_name = context.__dict__["_stack"][0][topic_var]
if with_or_without == "with":
msg_path = "test_data/upload.json"
else:
msg_path = "test_data/upload_no_workloadinfo.json"

with open(msg_path, "r") as f:
event_data = f.read().encode("utf-8")
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


@when('the file "config/workload_info.json" is not found')
def check_workload_info_not_present(context):
"""Step when workload_info.json is not in the archive."""
Expand Down Expand Up @@ -184,7 +169,7 @@ def check_url(context):
@then("SHA extractor should download tarball from given URL attribute")
def check_start_download(context):
"""Check that sha extractor is able to start download."""
expected_msg = "Downloading http://localhost:8000/"
expected_msg = "Downloading"
assert message_in_buffer(
expected_msg, context.sha_extractor.stdout
), "download not started"
Expand Down
87 changes: 87 additions & 0 deletions features/steps/insights_sha_extractor_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import boto3
import json
import logging
import os
from behave import given
from botocore.exceptions import ClientError
from src import kafka_util


def create_presigned_url(s3_client, bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param bucket_name: string
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""

# Generate a presigned URL for the S3 object
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
return None

# The response contains the presigned URL
return response


def use_real_storage(context, archive_key, msg_path):
s3_host = os.getenv("S3_HOST", default="localhost")
s3_port = os.getenv("S3_PORT", default="9000")
s3_access_key = os.getenv("S3_ACCESS_KEY")
s3_secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY")

s3_client = boto3.client('s3',
endpoint_url=f'http://{s3_host}:{s3_port}',
aws_access_key_id=s3_access_key,
aws_secret_access_key=s3_secret_access_key)

try:
s3_client.head_bucket(Bucket='test')
except ClientError:
s3_client.create_bucket(Bucket='test')

with open(f"test_data/{archive_key}.tar.gz",'rb') as archive:
s3_client.put_object(Body=archive, Bucket="test", Key=archive_key)

topic_name = context.__dict__["_stack"][0]["incoming_topic"]
presigned_url = create_presigned_url(s3_client, "test", archive_key)

with open(msg_path, "r") as f:
msg = f.read().encode("utf-8")
event_data = json.loads(msg)
event_data["url"] = presigned_url
event_data= json.dumps(event_data).encode('utf-8')
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


def use_mock_storage(context, archive_key, msg_path):
topic_name = context.__dict__["_stack"][0]["incoming_topic"]

with open(msg_path, "r") as f:
event_data = f.read().encode("utf-8")
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


@when('S3 and Kafka are populated with an archive {with_or_without} workload_info')
def populate_s3(context, with_or_without):
if with_or_without == "with":
archive_key = "archive"
msg_path = "test_data/upload.json"
else:
archive_key = "archive_no_workloadinfo"
msg_path = "test_data/upload_no_workloadinfo.json"

try:
use_real_storage(context, archive_key, msg_path)
except Exception as e:
use_mock_storage(context, archive_key, msg_path)


4 changes: 2 additions & 2 deletions insights_sha_extractor_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ function prepare_venv() {

if [[ ! -d $PATH_TO_LOCAL_SHA_EXTRACTOR ]] ; then
git clone --depth=1 [email protected]:ccx/ccx-sha-extractor.git $PATH_TO_LOCAL_SHA_EXTRACTOR
add_trap "rm -rf ./ccx-sha-extractor"
add_trap "rm -rf ./ccx-sha-extractor"
fi
cwd=$(pwd)
cd $PATH_TO_LOCAL_SHA_EXTRACTOR || exit
Expand Down Expand Up @@ -88,9 +88,9 @@ function add_trap() {

if ! [ "$ENV_DOCKER" ] ; then
run_kafka
run_mock_s3
fi

run_mock_s3
prepare_venv

# shellcheck disable=SC2068
Expand Down

0 comments on commit daa9348

Please sign in to comment.