Skip to content

Commit

Permalink
Merge pull request #519 from JiriPapousek/fix-sha-extractor-bdd
Browse files Browse the repository at this point in the history
[CCXDEV-11895] Fix SHA extractor tests
  • Loading branch information
JiriPapousek authored Nov 1, 2023
2 parents 619e2ea + faa6a20 commit 22aaa0a
Show file tree
Hide file tree
Showing 6 changed files with 137 additions and 35 deletions.
3 changes: 3 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ version: "3.9"
services:
# "pod" with the tests
bddtests:
build: .
image: quay.io/cloudservices/insights-behavioral-spec:latest
entrypoint:
- /bin/sh
Expand All @@ -26,6 +27,7 @@ services:
profiles:
- test-notification-services
- test-aggregator
- test-sha-extractor
image: quay.io/ccxdev/kafka-no-zk:latest
ports:
- 9092:9092
Expand All @@ -36,6 +38,7 @@ services:
minio:
profiles:
- test-exporter
- test-sha-extractor
image: minio/minio
command:
- server
Expand Down
8 changes: 4 additions & 4 deletions features/SHA_Extractor/sha_extractor.feature
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Feature: SHA Extractor


Scenario: Check if SHA extractor is able to consume messages from Kafka
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
And SHA extractor service is started in group "check"
Then SHA extractor should consume message about this event
And this message should contain following attributes
Expand All @@ -35,7 +35,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to consume messages from Kafka and then download tarball
Given SHA extractor service is started
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand All @@ -51,7 +51,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to consume messages from Kafka, download tarball, and take SHA images
Given SHA extractor service is started
When an archive without workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive without workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand All @@ -69,7 +69,7 @@ Feature: SHA Extractor

Scenario: Check if SHA extractor is able to finish the processing of SHA images
Given SHA extractor service is started
When an archive with workload info is announced in "incoming_topic" topic
When S3 and Kafka are populated with an archive with workload_info
Then SHA extractor should consume message about this event
And this message should contain following attributes
| Attribute | Description | Type |
Expand Down
17 changes: 1 addition & 16 deletions features/steps/insights_sha_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,21 +87,6 @@ def start_sha_extractor(context, group_id=None):
context.sha_extractor = sha_extractor


@when('an archive {with_or_without} workload info is announced in "{topic_var}" topic')
def produce_event(context, with_or_without, topic_var):
"""Produce an event into specified topic."""
topic_name = context.__dict__["_stack"][0][topic_var]
if with_or_without == "with":
msg_path = "test_data/upload.json"
else:
msg_path = "test_data/upload_no_workloadinfo.json"

with open(msg_path, "r") as f:
event_data = f.read().encode("utf-8")
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


@when('the file "config/workload_info.json" is not found')
def check_workload_info_not_present(context):
"""Step when workload_info.json is not in the archive."""
Expand Down Expand Up @@ -184,7 +169,7 @@ def check_url(context):
@then("SHA extractor should download tarball from given URL attribute")
def check_start_download(context):
"""Check that sha extractor is able to start download."""
expected_msg = "Downloading http://localhost:8000/"
expected_msg = "Downloading"
assert message_in_buffer(
expected_msg, context.sha_extractor.stdout
), "download not started"
Expand Down
97 changes: 97 additions & 0 deletions features/steps/insights_sha_extractor_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
# Copyright © 2023, Jiří Papoušek, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Implementation of logic behind usage of S3 and Kafka in SHA extractor tests."""

import boto3
import json
import logging
import os
from behave import when
from botocore.exceptions import ClientError
from src import kafka_util


def create_presigned_url(s3_client, bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object."""
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
return None

# The response contains the presigned URL
return response


def use_real_storage(context, archive_key, msg_path):
"""Load data to real S3 storage and publish the JSON message to Kafka."""
s3_host = os.getenv("S3_HOST", default="localhost")
s3_port = os.getenv("S3_PORT", default="9000")
s3_access_key = os.getenv("S3_ACCESS_KEY")
s3_secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY")

s3_client = boto3.client('s3',
endpoint_url=f'http://{s3_host}:{s3_port}',
aws_access_key_id=s3_access_key,
aws_secret_access_key=s3_secret_access_key)

try:
s3_client.head_bucket(Bucket='test')
except ClientError:
s3_client.create_bucket(Bucket='test')

with open(f"test_data/{archive_key}.tar.gz", 'rb') as archive:
s3_client.put_object(Body=archive, Bucket="test", Key=archive_key)

topic_name = context.__dict__["_stack"][0]["incoming_topic"]
presigned_url = create_presigned_url(s3_client, "test", archive_key)

with open(msg_path, "r") as f:
msg = f.read().encode("utf-8")
event_data = json.loads(msg)
event_data["url"] = presigned_url
event_data = json.dumps(event_data).encode('utf-8')
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


def use_mock_storage(context, archive_key, msg_path):
"""Publish JSON messages to Kafka with URLs for mock storage."""
topic_name = context.__dict__["_stack"][0]["incoming_topic"]

with open(msg_path, "r") as f:
event_data = f.read().encode("utf-8")
headers = [("service", b"testareno")]
kafka_util.send_event(context.hostname, topic_name, event_data, headers)


@when('S3 and Kafka are populated with an archive {with_or_without} workload_info')
def populate_s3(context, with_or_without):
"""Try to load archive to real S3 storage and publish JSON message to Kafka."""
if with_or_without == "with":
archive_key = "archive"
msg_path = "test_data/upload.json"
else:
archive_key = "archive_no_workloadinfo"
msg_path = "test_data/upload_no_workloadinfo.json"

# use real storage and in case of failure, use mock s3 instead
try:
use_real_storage(context, archive_key, msg_path)
except Exception:
use_mock_storage(context, archive_key, msg_path)
40 changes: 30 additions & 10 deletions insights_sha_extractor_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

export PATH_TO_LOCAL_SHA_EXTRACTOR=${PATH_TO_LOCAL_SHA_EXTRACTOR:="../ccx-sha-extractor"}
exit_trap_command=""

function prepare_venv() {
echo "Preparing environment"
# shellcheck disable=SC1091
Expand All @@ -23,12 +26,16 @@ function prepare_venv() {
python3 "$(which pip3)" install --no-cache -r requirements.in || exit 1
python3 "$(which pip3)" install --no-cache -r requirements/insights_sha_extractor.txt || exit 1

git clone --depth=1 [email protected]:ccx/ccx-sha-extractor.git
cd ccx-sha-extractor || exit
if [[ ! -d $PATH_TO_LOCAL_SHA_EXTRACTOR ]] ; then
git clone --depth=1 [email protected]:ccx/ccx-sha-extractor.git $PATH_TO_LOCAL_SHA_EXTRACTOR
add_trap "rm -rf ./ccx-sha-extractor"
fi
cwd=$(pwd)
cd $PATH_TO_LOCAL_SHA_EXTRACTOR || exit
pip install --no-cache-dir -U pip setuptools wheel
pip install --no-cache-dir -r requirements.txt
pip install -e .
cd ..
cd "$cwd" || exit 1

echo "Environment ready"
}
Expand Down Expand Up @@ -56,16 +63,33 @@ function run_kafka() {
done

export kafka_cid
add_trap "docker kill ${kafka_cid}"
}

function run_mock_s3(){
function run_mock_s3() {
uvicorn mocks.s3.s3:app &
s3_pid=$!
add_trap "kill -9 $s3_pid"
}

run_kafka
function cleanup {
eval "$exit_trap_command"
}

run_mock_s3
function add_trap() {
local to_add=$1
if [ -z $exit_trap_command ] ; then
exit_trap_command="$to_add"
else
exit_trap_command="$exit_trap_command; $to_add"
fi
trap cleanup EXIT
}

if ! [ "$ENV_DOCKER" ] ; then
run_kafka
run_mock_s3
fi

prepare_venv

Expand All @@ -74,7 +98,3 @@ PYTHONDONTWRITEBYTECODE=1 python3 -m behave --no-capture \
--format=progress2 \
--tags=-skip --tags=-managed \
-D dump_errors=true @test_list/insights_sha_extractor.txt "$@"

docker kill "$kafka_cid"
kill -9 $s3_pid
rm -rf ./ccx-sha-extractor
7 changes: 2 additions & 5 deletions run_in_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,7 @@ copy_files() {
copy_python_project "$cid" "$path_to_service"
;;
"insights-sha-extractor-tests")
echo -e "\033[0;31mThese tests are not ready to be run. Aborting!\033[0m"
echo "This option will be enabled after https://issues.redhat.com/browse/CCXDEV-11895 is done."
exit 0
# copy_python_project $cid $path_to_service
copy_python_project "$cid" "$path_to_service"
;;
"notification-service-tests")
copy_go_executable "$cid" "$path_to_service" "ccx-notification-service"
Expand Down Expand Up @@ -103,7 +100,7 @@ docker_compose_profiles["exporter-tests"]="test-exporter"
docker_compose_profiles["inference-service-tests"]=""
docker_compose_profiles["insights-content-service-tests"]=""
docker_compose_profiles["insights-content-template-renderer-tests"]=""
docker_compose_profiles["insights-sha-extractor-tests"]="text-sha-extractor"
docker_compose_profiles["insights-sha-extractor-tests"]="test-sha-extractor"
docker_compose_profiles["notification-service-tests"]="test-notification-services"
docker_compose_profiles["notification-writer-tests"]="test-notification-services"
docker_compose_profiles["smart-proxy-tests"]=""
Expand Down

0 comments on commit 22aaa0a

Please sign in to comment.