diff --git a/docker-compose.yml b/docker-compose.yml index 20fa43d8..1b562a5a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,6 +2,7 @@ version: "3.9" services: # "pod" with the tests bddtests: + build: . image: quay.io/cloudservices/insights-behavioral-spec:latest entrypoint: - /bin/sh @@ -26,6 +27,7 @@ services: profiles: - test-notification-services - test-aggregator + - test-sha-extractor image: quay.io/ccxdev/kafka-no-zk:latest ports: - 9092:9092 @@ -36,6 +38,7 @@ services: minio: profiles: - test-exporter + - test-sha-extractor image: minio/minio command: - server diff --git a/features/SHA_Extractor/sha_extractor.feature b/features/SHA_Extractor/sha_extractor.feature index c43d97aa..1236f25d 100644 --- a/features/SHA_Extractor/sha_extractor.feature +++ b/features/SHA_Extractor/sha_extractor.feature @@ -19,7 +19,7 @@ Feature: SHA Extractor Scenario: Check if SHA extractor is able to consume messages from Kafka - When an archive without workload info is announced in "incoming_topic" topic + When S3 and Kafka are populated with an archive without workload_info And SHA extractor service is started in group "check" Then SHA extractor should consume message about this event And this message should contain following attributes @@ -35,7 +35,7 @@ Feature: SHA Extractor Scenario: Check if SHA extractor is able to consume messages from Kafka and then download tarball Given SHA extractor service is started - When an archive without workload info is announced in "incoming_topic" topic + When S3 and Kafka are populated with an archive without workload_info Then SHA extractor should consume message about this event And this message should contain following attributes | Attribute | Description | Type | @@ -51,7 +51,7 @@ Feature: SHA Extractor Scenario: Check if SHA extractor is able to consume messages from Kafka, download tarball, and take SHA images Given SHA extractor service is started - When an archive without workload info is announced in "incoming_topic" topic + When S3 and Kafka are populated with an archive without workload_info Then SHA extractor should consume message about this event And this message should contain following attributes | Attribute | Description | Type | @@ -69,7 +69,7 @@ Feature: SHA Extractor Scenario: Check if SHA extractor is able to finish the processing of SHA images Given SHA extractor service is started - When an archive with workload info is announced in "incoming_topic" topic + When S3 and Kafka are populated with an archive with workload_info Then SHA extractor should consume message about this event And this message should contain following attributes | Attribute | Description | Type | diff --git a/features/steps/insights_sha_extractor.py b/features/steps/insights_sha_extractor.py index a0de4906..24b6bce3 100644 --- a/features/steps/insights_sha_extractor.py +++ b/features/steps/insights_sha_extractor.py @@ -87,21 +87,6 @@ def start_sha_extractor(context, group_id=None): context.sha_extractor = sha_extractor -@when('an archive {with_or_without} workload info is announced in "{topic_var}" topic') -def produce_event(context, with_or_without, topic_var): - """Produce an event into specified topic.""" - topic_name = context.__dict__["_stack"][0][topic_var] - if with_or_without == "with": - msg_path = "test_data/upload.json" - else: - msg_path = "test_data/upload_no_workloadinfo.json" - - with open(msg_path, "r") as f: - event_data = f.read().encode("utf-8") - headers = [("service", b"testareno")] - kafka_util.send_event(context.hostname, topic_name, event_data, headers) - - @when('the file "config/workload_info.json" is not found') def check_workload_info_not_present(context): """Step when workload_info.json is not in the archive.""" @@ -184,7 +169,7 @@ def check_url(context): @then("SHA extractor should download tarball from given URL attribute") def check_start_download(context): """Check that sha extractor is able to start download.""" - expected_msg = "Downloading http://localhost:8000/" + expected_msg = "Downloading" assert message_in_buffer( expected_msg, context.sha_extractor.stdout ), "download not started" diff --git a/features/steps/insights_sha_extractor_s3.py b/features/steps/insights_sha_extractor_s3.py new file mode 100644 index 00000000..3c5ae0dc --- /dev/null +++ b/features/steps/insights_sha_extractor_s3.py @@ -0,0 +1,97 @@ +# Copyright © 2023, Jiří Papoušek, Red Hat, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Implementation of logic behind usage of S3 and Kafka in SHA extractor tests.""" + +import boto3 +import json +import logging +import os +from behave import when +from botocore.exceptions import ClientError +from src import kafka_util + + +def create_presigned_url(s3_client, bucket_name, object_name, expiration=3600): + """Generate a presigned URL to share an S3 object.""" + try: + response = s3_client.generate_presigned_url('get_object', + Params={'Bucket': bucket_name, + 'Key': object_name}, + ExpiresIn=expiration) + except ClientError as e: + logging.error(e) + return None + + # The response contains the presigned URL + return response + + +def use_real_storage(context, archive_key, msg_path): + """Load data to real S3 storage and publish the JSON message to Kafka.""" + s3_host = os.getenv("S3_HOST", default="localhost") + s3_port = os.getenv("S3_PORT", default="9000") + s3_access_key = os.getenv("S3_ACCESS_KEY") + s3_secret_access_key = os.getenv("S3_SECRET_ACCESS_KEY") + + s3_client = boto3.client('s3', + endpoint_url=f'http://{s3_host}:{s3_port}', + aws_access_key_id=s3_access_key, + aws_secret_access_key=s3_secret_access_key) + + try: + s3_client.head_bucket(Bucket='test') + except ClientError: + s3_client.create_bucket(Bucket='test') + + with open(f"test_data/{archive_key}.tar.gz", 'rb') as archive: + s3_client.put_object(Body=archive, Bucket="test", Key=archive_key) + + topic_name = context.__dict__["_stack"][0]["incoming_topic"] + presigned_url = create_presigned_url(s3_client, "test", archive_key) + + with open(msg_path, "r") as f: + msg = f.read().encode("utf-8") + event_data = json.loads(msg) + event_data["url"] = presigned_url + event_data = json.dumps(event_data).encode('utf-8') + headers = [("service", b"testareno")] + kafka_util.send_event(context.hostname, topic_name, event_data, headers) + + +def use_mock_storage(context, archive_key, msg_path): + """Publish JSON messages to Kafka with URLs for mock storage.""" + topic_name = context.__dict__["_stack"][0]["incoming_topic"] + + with open(msg_path, "r") as f: + event_data = f.read().encode("utf-8") + headers = [("service", b"testareno")] + kafka_util.send_event(context.hostname, topic_name, event_data, headers) + + +@when('S3 and Kafka are populated with an archive {with_or_without} workload_info') +def populate_s3(context, with_or_without): + """Try to load archive to real S3 storage and publish JSON message to Kafka.""" + if with_or_without == "with": + archive_key = "archive" + msg_path = "test_data/upload.json" + else: + archive_key = "archive_no_workloadinfo" + msg_path = "test_data/upload_no_workloadinfo.json" + + # use real storage and in case of failure, use mock s3 instead + try: + use_real_storage(context, archive_key, msg_path) + except Exception: + use_mock_storage(context, archive_key, msg_path) diff --git a/insights_sha_extractor_test.sh b/insights_sha_extractor_test.sh index ea40cd4e..ab9cd4c8 100755 --- a/insights_sha_extractor_test.sh +++ b/insights_sha_extractor_test.sh @@ -14,6 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +export PATH_TO_LOCAL_SHA_EXTRACTOR=${PATH_TO_LOCAL_SHA_EXTRACTOR:="../ccx-sha-extractor"} +exit_trap_command="" + function prepare_venv() { echo "Preparing environment" # shellcheck disable=SC1091 @@ -23,12 +26,16 @@ function prepare_venv() { python3 "$(which pip3)" install --no-cache -r requirements.in || exit 1 python3 "$(which pip3)" install --no-cache -r requirements/insights_sha_extractor.txt || exit 1 - git clone --depth=1 git@gitlab.cee.redhat.com:ccx/ccx-sha-extractor.git - cd ccx-sha-extractor || exit + if [[ ! -d $PATH_TO_LOCAL_SHA_EXTRACTOR ]] ; then + git clone --depth=1 git@gitlab.cee.redhat.com:ccx/ccx-sha-extractor.git $PATH_TO_LOCAL_SHA_EXTRACTOR + add_trap "rm -rf ./ccx-sha-extractor" + fi + cwd=$(pwd) + cd $PATH_TO_LOCAL_SHA_EXTRACTOR || exit pip install --no-cache-dir -U pip setuptools wheel pip install --no-cache-dir -r requirements.txt pip install -e . - cd .. + cd "$cwd" || exit 1 echo "Environment ready" } @@ -56,16 +63,33 @@ function run_kafka() { done export kafka_cid + add_trap "docker kill ${kafka_cid}" } -function run_mock_s3(){ +function run_mock_s3() { uvicorn mocks.s3.s3:app & s3_pid=$! + add_trap "kill -9 $s3_pid" } -run_kafka +function cleanup { + eval "$exit_trap_command" +} -run_mock_s3 +function add_trap() { + local to_add=$1 + if [ -z $exit_trap_command ] ; then + exit_trap_command="$to_add" + else + exit_trap_command="$exit_trap_command; $to_add" + fi + trap cleanup EXIT +} + +if ! [ "$ENV_DOCKER" ] ; then + run_kafka + run_mock_s3 +fi prepare_venv @@ -74,7 +98,3 @@ PYTHONDONTWRITEBYTECODE=1 python3 -m behave --no-capture \ --format=progress2 \ --tags=-skip --tags=-managed \ -D dump_errors=true @test_list/insights_sha_extractor.txt "$@" - -docker kill "$kafka_cid" -kill -9 $s3_pid -rm -rf ./ccx-sha-extractor diff --git a/run_in_docker.sh b/run_in_docker.sh index 82bdc7e6..a7214685 100755 --- a/run_in_docker.sh +++ b/run_in_docker.sh @@ -71,10 +71,7 @@ copy_files() { copy_python_project "$cid" "$path_to_service" ;; "insights-sha-extractor-tests") - echo -e "\033[0;31mThese tests are not ready to be run. Aborting!\033[0m" - echo "This option will be enabled after https://issues.redhat.com/browse/CCXDEV-11895 is done." - exit 0 - # copy_python_project $cid $path_to_service + copy_python_project "$cid" "$path_to_service" ;; "notification-service-tests") copy_go_executable "$cid" "$path_to_service" "ccx-notification-service" @@ -103,7 +100,7 @@ docker_compose_profiles["exporter-tests"]="test-exporter" docker_compose_profiles["inference-service-tests"]="" docker_compose_profiles["insights-content-service-tests"]="" docker_compose_profiles["insights-content-template-renderer-tests"]="" -docker_compose_profiles["insights-sha-extractor-tests"]="text-sha-extractor" +docker_compose_profiles["insights-sha-extractor-tests"]="test-sha-extractor" docker_compose_profiles["notification-service-tests"]="test-notification-services" docker_compose_profiles["notification-writer-tests"]="test-notification-services" docker_compose_profiles["smart-proxy-tests"]=""