diff --git a/api/host.py b/api/host.py index 5c6bbda610..a7b2a9d96e 100644 --- a/api/host.py +++ b/api/host.py @@ -23,9 +23,7 @@ # given priority in the host deduplication process. # NOTE: The order of this tuple is important. The order defines # the priority. -ELEVATED_CANONICAL_FACT_FIELDS = ("insights_id", - "subscription_manager_id", - ) +ELEVATED_CANONICAL_FACT_FIELDS = ("insights_id", "subscription_manager_id") logger = get_logger(__name__) @@ -46,8 +44,7 @@ def add_host_list(host_list): response_host_list.append({**e.to_json(), "host": host}) except ValidationError as e: number_of_errors += 1 - logger.exception("Input validation error while adding host", - extra={"host": host}) + logger.exception("Input validation error while adding host", extra={"host": host}) response_host_list.append({"status": 400, "title": "Bad Request", "detail": str(e.messages), @@ -62,9 +59,7 @@ def add_host_list(host_list): "detail": "Could not complete operation", "host": host}) - response = {'total': len(response_host_list), - 'errors': number_of_errors, - 'data': response_host_list} + response = {'total': len(response_host_list), 'errors': number_of_errors, 'data': response_host_list} return _build_json_response(response, status=207) @@ -80,18 +75,14 @@ def _add_host(host): input_host = Host.from_json(validated_input_host_dict.data) - if ( - not current_identity.is_trusted_system and - current_identity.account_number != input_host.account - ): + if not current_identity.is_trusted_system and current_identity.account_number != input_host.account: raise InventoryException( title="Invalid request", detail="The account number associated with the user does not " "match the account number associated with the host" ) - existing_host = find_existing_host(input_host.account, - input_host.canonical_facts) + existing_host = find_existing_host(input_host.account, input_host.canonical_facts) if existing_host: return update_existing_host(existing_host, input_host) @@ -104,8 +95,7 @@ def find_existing_host(account_number, canonical_facts): existing_host = _find_host_by_elevated_ids(account_number, canonical_facts) if not existing_host: - existing_host = find_host_by_canonical_facts(account_number, - canonical_facts) + existing_host = find_host_by_canonical_facts(account_number, canonical_facts) return existing_host @@ -115,8 +105,7 @@ def _find_host_by_elevated_ids(account_number, canonical_facts): for elevated_cf_name in ELEVATED_CANONICAL_FACT_FIELDS: cf_value = canonical_facts.get(elevated_cf_name) if cf_value: - existing_host = find_host_by_canonical_facts(account_number, - {elevated_cf_name: cf_value}) + existing_host = find_host_by_canonical_facts(account_number, {elevated_cf_name: cf_value}) if existing_host: return existing_host @@ -180,23 +169,15 @@ def get_host_list( order_how=None ): if fqdn: - query = find_hosts_by_canonical_facts( - current_identity.account_number, {"fqdn": fqdn} - ) + query = find_hosts_by_canonical_facts(current_identity.account_number, {"fqdn": fqdn}) elif display_name: - query = find_hosts_by_display_name( - current_identity.account_number, display_name - ) + query = find_hosts_by_display_name(current_identity.account_number, display_name) elif hostname_or_id: - query = find_hosts_by_hostname_or_id( - current_identity.account_number, hostname_or_id) + query = find_hosts_by_hostname_or_id(current_identity.account_number, hostname_or_id) elif insights_id: - query = find_hosts_by_canonical_facts( - current_identity.account_number, {"insights_id": insights_id}) + query = find_hosts_by_canonical_facts(current_identity.account_number, {"insights_id": insights_id}) else: - query = Host.query.filter( - Host.account == current_identity.account_number - ) + query = Host.query.filter(Host.account == current_identity.account_number) try: order_by = _params_to_order_by(order_by, order_how) @@ -208,9 +189,7 @@ def get_host_list( query_results = query.paginate(page, per_page, True) logger.debug(f"Found hosts: {query_results.items}") - return _build_paginated_host_list_response( - query_results.total, page, per_page, query_results.items - ) + return _build_paginated_host_list_response(query_results.total, page, per_page, query_results.items) def _order_how(column, order_how): @@ -235,9 +214,7 @@ def _params_to_order_by(order_by=None, order_how=None): else: ordering = (Host.display_name.asc(),) elif order_by: - raise ValueError( - "Unsupported ordering column, use \"updated\" or \"display_name\"." - ) + raise ValueError("Unsupported ordering column, use \"updated\" or \"display_name\".") elif order_how: raise ValueError( "Providing ordering direction without a column is not supported. " @@ -259,17 +236,12 @@ def _build_paginated_host_list_response(total, page, per_page, host_list): def _build_json_response(json_data, status=200): - return flask.Response(ujson.dumps(json_data), - status=status, - mimetype="application/json") + return flask.Response(ujson.dumps(json_data), status=status, mimetype="application/json") def find_hosts_by_display_name(account, display_name): logger.debug("find_hosts_by_display_name(%s)" % display_name) - return Host.query.filter( - (Host.account == account) - & Host.display_name.comparator.contains(display_name) - ) + return Host.query.filter((Host.account == account) & Host.display_name.comparator.contains(display_name)) def find_hosts_by_canonical_facts(account_number, canonical_facts): @@ -292,12 +264,9 @@ def find_hosts_by_hostname_or_id(account_number, hostname): logger.debug("Adding id (uuid) to the filter list") except Exception: # Do not filter using the id - logger.debug("The hostname (%s) could not be converted into a UUID", - hostname, - exc_info=True) + logger.debug("The hostname (%s) could not be converted into a UUID", hostname, exc_info=True) - return Host.query.filter(sqlalchemy.and_(*[Host.account == account_number, - sqlalchemy.or_(*filter_list)])) + return Host.query.filter(sqlalchemy.and_(*[Host.account == account_number, sqlalchemy.or_(*filter_list)])) @api_operation @@ -332,8 +301,7 @@ def delete_by_id(host_id_list): @api_operation @metrics.api_request_time.time() def get_host_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how=None): - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) try: order_by = _params_to_order_by(order_by, order_how) @@ -345,25 +313,17 @@ def get_host_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how= logger.debug(f"Found hosts: {query_results.items}") - return _build_paginated_host_list_response( - query_results.total, page, per_page, query_results.items - ) + return _build_paginated_host_list_response(query_results.total, page, per_page, query_results.items) def _get_host_list_by_id_list(account_number, host_id_list): - return Host.query.filter( - (Host.account == account_number) - & Host.id.in_(host_id_list) - ) + return Host.query.filter((Host.account == account_number) & Host.id.in_(host_id_list)) @api_operation @metrics.api_request_time.time() -def get_host_system_profile_by_id( - host_id_list, page=1, per_page=100, order_by=None, order_how=None -): - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) +def get_host_system_profile_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how=None): + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) try: order_by = _params_to_order_by(order_by, order_how) @@ -373,8 +333,7 @@ def get_host_system_profile_by_id( query = query.order_by(*order_by) query_results = query.paginate(page, per_page, True) - response_list = [host.to_system_profile_json() - for host in query_results.items] + response_list = [host.to_system_profile_json() for host in query_results.items] json_output = {"total": query_results.total, "count": len(response_list), @@ -392,17 +351,10 @@ def patch_by_id(host_id_list, host_data): try: validated_patch_host_data = PatchHostSchema(strict=True).load(host_data).data except ValidationError as e: - logger.exception("Input validation error while patching host: %s - %s" - % (host_id_list, host_data)) - return ({"status": 400, - "title": "Bad Request", - "detail": str(e.messages), - "type": "unknown", - }, - 400) - - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) + logger.exception("Input validation error while patching host: %s - %s" % (host_id_list, host_data)) + return ({"status": 400, "title": "Bad Request", "detail": str(e.messages), "type": "unknown"}, 400) + + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) hosts_to_update = query.all() @@ -421,8 +373,7 @@ def patch_by_id(host_id_list, host_data): @api_operation @metrics.api_request_time.time() def replace_facts(host_id_list, namespace, fact_dict): - return update_facts_by_namespace(FactOperations.replace, host_id_list, - namespace, fact_dict) + return update_facts_by_namespace(FactOperations.replace, host_id_list, namespace, fact_dict) @api_operation @@ -433,8 +384,7 @@ def merge_facts(host_id_list, namespace, fact_dict): logger.debug(error_msg) return error_msg, 400 - return update_facts_by_namespace(FactOperations.merge, host_id_list, - namespace, fact_dict) + return update_facts_by_namespace(FactOperations.merge, host_id_list, namespace, fact_dict) def update_facts_by_namespace(operation, host_id_list, namespace, fact_dict): diff --git a/api/metrics.py b/api/metrics.py index 512c9c826b..1530512868 100644 --- a/api/metrics.py +++ b/api/metrics.py @@ -1,7 +1,6 @@ from prometheus_client import Counter, Summary -api_request_time = Summary("inventory_request_processing_seconds", - "Time spent processing request") +api_request_time = Summary("inventory_request_processing_seconds", "Time spent processing request") host_dedup_processing_time = Summary("inventory_dedup_processing_seconds", "Time spent looking for existing host (dedup logic)") find_host_using_elevated_ids = Summary("inventory_find_host_using_elevated_ids_processing_seconds", @@ -10,18 +9,13 @@ "Time spent committing a new host to the database") update_host_commit_processing_time = Summary("inventory_update_host_commit_seconds", "Time spent committing a update host to the database") -api_request_count = Counter("inventory_request_count", - "The total amount of API requests") -create_host_count = Counter("inventory_create_host_count", - "The total amount of hosts created") -update_host_count = Counter("inventory_update_host_count", - "The total amount of hosts updated") -delete_host_count = Counter("inventory_delete_host_count", - "The total amount of hosts deleted") +api_request_count = Counter("inventory_request_count", "The total amount of API requests") +create_host_count = Counter("inventory_create_host_count", "The total amount of hosts created") +update_host_count = Counter("inventory_update_host_count", "The total amount of hosts updated") +delete_host_count = Counter("inventory_delete_host_count", "The total amount of hosts deleted") delete_host_processing_time = Summary("inventory_delete_host_commit_seconds", "Time spent deleting hosts from the database") -login_failure_count = Counter("inventory_login_failure_count", - "The total amount of failed login attempts") +login_failure_count = Counter("inventory_login_failure_count", "The total amount of failed login attempts") system_profile_deserialization_time = Summary("inventory_system_profile_deserialization_time", "Time spent deserializing system profile documents") system_profile_commit_processing_time = Summary("inventory_system_profile_commit_processing_time", diff --git a/api/mgmt.py b/api/mgmt.py index fd04ddc27e..608ba51635 100644 --- a/api/mgmt.py +++ b/api/mgmt.py @@ -1,8 +1,5 @@ from flask import Blueprint, jsonify -from prometheus_client import (CollectorRegistry, - multiprocess, - generate_latest, - CONTENT_TYPE_LATEST,) +from prometheus_client import CollectorRegistry, multiprocess, generate_latest, CONTENT_TYPE_LATEST from app.common import get_build_version diff --git a/app/__init__.py b/app/__init__.py index 120b5531c7..69a18aaa62 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -32,9 +32,7 @@ def create_app(config_name): app_config = Config() app_config.log_configuration(config_name) - connexion_app = connexion.App( - "inventory", specification_dir="./swagger/", options=connexion_options - ) + connexion_app = connexion.App("inventory", specification_dir="./swagger/", options=connexion_options) # Read the swagger.yml file to configure the endpoints with open("swagger/api.spec.yaml", "rb") as fp: @@ -66,14 +64,11 @@ def create_app(config_name): db.init_app(flask_app) - flask_app.register_blueprint(monitoring_blueprint, - url_prefix=app_config.mgmt_url_path_prefix) + flask_app.register_blueprint(monitoring_blueprint, url_prefix=app_config.mgmt_url_path_prefix) @flask_app.before_request def set_request_id(): - threadctx.request_id = request.headers.get( - REQUEST_ID_HEADER, - UNKNOWN_REQUEST_ID_VALUE) + threadctx.request_id = request.headers.get(REQUEST_ID_HEADER, UNKNOWN_REQUEST_ID_VALUE) init_tasks(app_config, flask_app) diff --git a/app/auth/__init__.py b/app/auth/__init__.py index b713c5d55c..173ed924c1 100644 --- a/app/auth/__init__.py +++ b/app/auth/__init__.py @@ -5,9 +5,7 @@ from app.logging import get_logger from werkzeug.local import LocalProxy -__all__ = ["current_identity", - "bearer_token_handler", - "authentication_header_handler"] +__all__ = ["current_identity", "bearer_token_handler", "authentication_header_handler"] logger = get_logger(__name__) @@ -18,8 +16,7 @@ def authentication_header_handler(apikey, required_scopes=None): validate(identity) except Exception: login_failure_count.inc() - logger.debug("Failed to validate identity header value", - exc_info=True) + logger.debug("Failed to validate identity header value", exc_info=True) return None return {"uid": identity} @@ -31,8 +28,7 @@ def bearer_token_handler(token): validate(identity) except Exception: login_failure_count.inc() - logger.debug("Failed to validate bearer token value", - exc_info=True) + logger.debug("Failed to validate bearer token value", exc_info=True) return None return {"uid": identity} diff --git a/app/events.py b/app/events.py index 13f3de9964..8726f32040 100644 --- a/app/events.py +++ b/app/events.py @@ -12,6 +12,4 @@ class HostEvent(Schema): def delete(id): - return HostEvent(strict=True).dumps( - {"id": id, "timestamp": datetime.utcnow(), "type": "delete"} - ).data + return HostEvent(strict=True).dumps({"id": id, "timestamp": datetime.utcnow(), "type": "delete"}).data diff --git a/app/exceptions.py b/app/exceptions.py index 268aa3cb3c..0ad3bae713 100644 --- a/app/exceptions.py +++ b/app/exceptions.py @@ -8,8 +8,7 @@ def __init__(self, status=400, title=None, detail=None, type="about:blank"): self.type = type def to_json(self): - return {'detail': self.detail, 'status': self.status, - 'title': self.title, 'type': self.type} + return {'detail': self.detail, 'status': self.status, 'title': self.title, 'type': self.type} class InputFormatException(InventoryException): diff --git a/app/logging.py b/app/logging.py index 84f8f60359..f37db7199a 100644 --- a/app/logging.py +++ b/app/logging.py @@ -44,8 +44,7 @@ def _configure_watchtower_logging_handler(): log_group = os.getenv("AWS_LOG_GROUP", "platform") stream_name = _get_aws_logging_stream_name(OPENSHIFT_ENVIRONMENT_NAME_FILE) - if all([aws_access_key_id, aws_secret_access_key, - aws_region_name, stream_name]): + if all([aws_access_key_id, aws_secret_access_key, aws_region_name, stream_name]): print(f"Configuring watchtower logging (log_group={log_group}, stream_name={stream_name})") boto3_session = Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, @@ -58,8 +57,7 @@ def _configure_watchtower_logging_handler(): handler.setFormatter(logstash_formatter.LogstashFormatterV1()) root.addHandler(handler) else: - print("Unable to configure watchtower logging. Please " - "verify watchtower logging configuration!") + print("Unable to configure watchtower logging. Please verify watchtower logging configuration!") def _get_aws_logging_stream_name(namespace_filename): @@ -68,8 +66,7 @@ def _get_aws_logging_stream_name(namespace_filename): return namespace_fh.read() except FileNotFoundError: namespace = DEFAULT_AWS_LOGGING_NAMESPACE - print(f"Error reading the OpenShift namepsace file. " - f"Using {namespace} as aws logging stream name") + print(f"Error reading the OpenShift namepsace file. Using {namespace} as aws logging stream name") return namespace @@ -117,9 +114,7 @@ class InventoryGunicornLogger(glogging.Logger): def setup(self, cfg): super().setup(cfg) - self._set_handler(self.error_log, - cfg.errorlog, - logstash_formatter.LogstashFormatterV1()) + self._set_handler(self.error_log, cfg.errorlog, logstash_formatter.LogstashFormatterV1()) def get_logger(name): diff --git a/app/models.py b/app/models.py index a06403327e..8f0ff64eef 100644 --- a/app/models.py +++ b/app/models.py @@ -43,9 +43,7 @@ class Host(db.Model): display_name = db.Column(db.String(200), default=_set_display_name_on_save) ansible_host = db.Column(db.String(255)) created_on = db.Column(db.DateTime, default=datetime.utcnow) - modified_on = db.Column( - db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow - ) + modified_on = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) facts = db.Column(JSONB) tags = db.Column(JSONB) canonical_facts = db.Column(JSONB) @@ -62,9 +60,9 @@ def __init__( ): if not canonical_facts: - raise InventoryException(title="Invalid request", - detail="At least one of the canonical " - "fact fields must be present.") + raise InventoryException( + title="Invalid request", detail="At least one of the canonical " "fact fields must be present." + ) self.canonical_facts = canonical_facts @@ -103,9 +101,7 @@ def to_json(self): return json_dict def to_system_profile_json(self): - json_dict = {"id": str(self.id), - "system_profile": self.system_profile_facts or {} - } + json_dict = {"id": str(self.id), "system_profile": self.system_profile_facts or {}} return json_dict def save(self): @@ -121,12 +117,10 @@ def update(self, input_host): self.update_facts(input_host.facts) def patch(self, patch_data): - logger.debug("patching host (id=%s) with data: %s" % - (self.id, patch_data)) + logger.debug("patching host (id=%s) with data: %s" % (self.id, patch_data)) if not patch_data: - raise InventoryException(title="Bad Request", - detail="Patch json document cannot be empty.") + raise InventoryException(title="Bad Request", detail="Patch json document cannot be empty.") self._update_ansible_host(patch_data.get("ansible_host")) @@ -153,8 +147,7 @@ def update_canonical_facts(self, canonical_facts): " with input canonical_facts=%s") % (self.id, self.canonical_facts, canonical_facts)) self.canonical_facts.update(canonical_facts) - logger.debug("Host (id=%s) has updated canonical_facts (%s)" - % (self.id, self.canonical_facts)) + logger.debug("Host (id=%s) has updated canonical_facts (%s)" % (self.id, self.canonical_facts)) orm.attributes.flag_modified(self, "canonical_facts") def update_facts(self, facts_dict): @@ -187,18 +180,12 @@ def _update_system_profile(self, input_system_profile): self.system_profile_facts = input_system_profile else: # Update the fields that were passed in - self.system_profile_facts = {**self.system_profile_facts, - **input_system_profile} + self.system_profile_facts = {**self.system_profile_facts, **input_system_profile} orm.attributes.flag_modified(self, "system_profile_facts") def __repr__(self): tmpl = "" - return tmpl % ( - self.id, - self.account, - self.display_name, - self.canonical_facts, - ) + return tmpl % (self.id, self.account, self.display_name, self.canonical_facts) class CanonicalFacts: @@ -272,8 +259,7 @@ def from_json(fact_list): @staticmethod def to_json(fact_dict): fact_list = [ - {"namespace": namespace, "facts": facts if facts else {}} - for namespace, facts in fact_dict.items() + {"namespace": namespace, "facts": facts if facts else {}} for namespace, facts in fact_dict.items() ] return fact_list @@ -350,18 +336,15 @@ class FactsSchema(Schema): class HostSchema(Schema): display_name = fields.Str(validate=validate.Length(min=1, max=200)) ansible_host = fields.Str(validate=validate.Length(min=0, max=255)) - account = fields.Str(required=True, - validate=validate.Length(min=1, max=10)) + account = fields.Str(required=True, validate=validate.Length(min=1, max=10)) insights_id = fields.Str(validate=verify_uuid_format) rhel_machine_id = fields.Str(validate=verify_uuid_format) subscription_manager_id = fields.Str(validate=verify_uuid_format) satellite_id = fields.Str(validate=verify_uuid_format) fqdn = fields.Str(validate=validate.Length(min=1, max=255)) bios_uuid = fields.Str(validate=verify_uuid_format) - ip_addresses = fields.List( - fields.Str(validate=validate.Length(min=1, max=255))) - mac_addresses = fields.List( - fields.Str(validate=validate.Length(min=1, max=255))) + ip_addresses = fields.List(fields.Str(validate=validate.Length(min=1, max=255))) + mac_addresses = fields.List(fields.Str(validate=validate.Length(min=1, max=255))) external_id = fields.Str(validate=validate.Length(min=1, max=500)) facts = fields.List(fields.Nested(FactsSchema)) system_profile = fields.Nested(SystemProfileSchema) diff --git a/host_dumper.py b/host_dumper.py index 2e4d611fe9..a6496ed7e2 100644 --- a/host_dumper.py +++ b/host_dumper.py @@ -14,17 +14,11 @@ " This util is expected to be used within the image/pod." ) group = parser.add_mutually_exclusive_group(required=True) -group.add_argument("--id", - help="search for a host using id") -group.add_argument("--hostname", - help="search for a host using display_name, fqdn") -group.add_argument("--insights_id", - help="search for a host using insights_id") -group.add_argument("--account_number", - help="dump all hosts associated with account") -parser.add_argument("--no-pp", - help="enable pretty printing", - action="store_true") +group.add_argument("--id", help="search for a host using id") +group.add_argument("--hostname", help="search for a host using display_name, fqdn") +group.add_argument("--insights_id", help="search for a host using insights_id") +group.add_argument("--account_number", help="dump all hosts associated with account") +parser.add_argument("--no-pp", help="enable pretty printing", action="store_true") args = parser.parse_args() with application.app_context(): @@ -33,9 +27,7 @@ if args.id: host_id_list = [args.id] print("looking up host using id") - query_results = Host.query.filter( - Host.id.in_(host_id_list) - ).all() + query_results = Host.query.filter(Host.id.in_(host_id_list)).all() elif args.hostname: print("looking up host using display_name, fqdn") query_results = Host.query.filter( @@ -48,9 +40,7 @@ Host.canonical_facts.comparator.contains({'insights_id': args.insights_id}) ).all() elif args.account_number: - query_results = Host.query.filter( - Host.account == args.account_number - ).all() + query_results = Host.query.filter(Host.account == args.account_number).all() json_host_list = [host.to_json() for host in query_results] diff --git a/migrations/env.py b/migrations/env.py index 521cb5a927..699df3aab4 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -19,9 +19,7 @@ # from myapp import mymodel # target_metadata = mymodel.Base.metadata -config.set_main_option( - 'sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI') -) +config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) target_metadata = current_app.extensions['migrate'].db.metadata # other values from the config, defined by the needs of env.py, @@ -69,9 +67,7 @@ def process_revision_directives(context, revision, directives): logger.info('No changes in schema detected.') engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool, + config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool ) connection = engine.connect() diff --git a/migrations/versions/2d951983fa89_.py b/migrations/versions/2d951983fa89_.py index f4be7838c3..6745e1fe54 100644 --- a/migrations/versions/2d951983fa89_.py +++ b/migrations/versions/2d951983fa89_.py @@ -27,18 +27,13 @@ def upgrade(): sa.Column('modified_on', sa.DateTime(), nullable=True), sa.Column('facts', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column( - 'canonical_facts', postgresql.JSONB(astext_type=sa.Text()), nullable=True - ), + sa.Column('canonical_facts', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_index('idxaccount', 'hosts', ['account'], unique=False) - op.create_index('idxinsightsid', - 'hosts', - [sa.text("(canonical_facts ->> 'insights_id')")] - ) + op.create_index('idxinsightsid', 'hosts', [sa.text("(canonical_facts ->> 'insights_id')")]) op.create_index('idxgincanonicalfacts', 'hosts', diff --git a/tasks/__init__.py b/tasks/__init__.py index 7dd274530b..766f6f75e3 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -14,8 +14,7 @@ class NullProducer: def send(self, topic, value=None): - logger.debug("NullProducer - logging message: topic (%s) - message: %s" % - (topic, value)) + logger.debug("NullProducer - logging message: topic (%s) - message: %s" % (topic, value)) producer = None @@ -72,9 +71,7 @@ def _init_system_profile_consumer(config, flask_app, handler=msg_handler, consum if consumer is None: consumer = KafkaConsumer( - config.system_profile_topic, - group_id=config.consumer_group, - bootstrap_servers=config.bootstrap_servers) + config.system_profile_topic, group_id=config.consumer_group, bootstrap_servers=config.bootstrap_servers) def _f(): with flask_app.app_context(): @@ -89,7 +86,5 @@ def _f(): logger.exception("uncaught exception in handler, moving on.") metrics.system_profile_failure_count.inc() - t = Thread( - target=_f, - daemon=True) + t = Thread(target=_f, daemon=True) t.start() diff --git a/test_api.py b/test_api.py index 802e07f9f1..c6c36d39e9 100755 --- a/test_api.py +++ b/test_api.py @@ -86,31 +86,21 @@ def setUp(self): def get(self, path, status=200, return_response_as_json=True): return self._response_check( - self.client().get(path, headers=self._get_valid_auth_header()), - status, - return_response_as_json, + self.client().get(path, headers=self._get_valid_auth_header()), status, return_response_as_json ) def post(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().post, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().post, path, data, status, return_response_as_json) def patch(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().patch, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().patch, path, data, status, return_response_as_json) def put(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().put, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().put, path, data, status, return_response_as_json) def delete(self, path, status=200, return_response_as_json=True): return self._response_check( - self.client().delete(path, headers=self._get_valid_auth_header()), - status, - return_response_as_json, + self.client().delete(path, headers=self._get_valid_auth_header()), status, return_response_as_json ) def verify_error_response(self, response, expected_title=None, @@ -127,16 +117,12 @@ def _verify_value(field_name, expected_value): _verify_value("detail", expected_detail) _verify_value("type", expected_type) - def _make_http_call( - self, http_method, path, data, status, return_response_as_json=True - ): + def _make_http_call(self, http_method, path, data, status, return_response_as_json=True): json_data = json.dumps(data) headers = self._get_valid_auth_header() headers["content-type"] = "application/json" return self._response_check( - http_method(path, data=json_data, headers=headers), - status, - return_response_as_json, + http_method(path, data=json_data, headers=headers), status, return_response_as_json ) def _response_check(self, response, status, return_response_as_json): @@ -183,36 +169,26 @@ def _build_host_id_list_for_url(self, host_list): return ",".join(host_id_list) def _verify_host_status(self, response, host_index, expected_status): - self.assertEqual(response["data"][host_index]["status"], - expected_status) + self.assertEqual(response["data"][host_index]["status"], expected_status) def _pluck_host_from_response(self, response, host_index): return response["data"][host_index]["host"] - def _validate_host(self, received_host, expected_host, - expected_id=id): + def _validate_host(self, received_host, expected_host, expected_id=id): self.assertIsNotNone(received_host["id"]) self.assertEqual(received_host["id"], expected_id) self.assertEqual(received_host["account"], expected_host.account) - self.assertEqual(received_host["insights_id"], - expected_host.insights_id) - self.assertEqual(received_host["rhel_machine_id"], - expected_host.rhel_machine_id) - self.assertEqual(received_host["subscription_manager_id"], - expected_host.subscription_manager_id) - self.assertEqual(received_host["satellite_id"], - expected_host.satellite_id) + self.assertEqual(received_host["insights_id"], expected_host.insights_id) + self.assertEqual(received_host["rhel_machine_id"], expected_host.rhel_machine_id) + self.assertEqual(received_host["subscription_manager_id"], expected_host.subscription_manager_id) + self.assertEqual(received_host["satellite_id"], expected_host.satellite_id) self.assertEqual(received_host["bios_uuid"], expected_host.bios_uuid) self.assertEqual(received_host["fqdn"], expected_host.fqdn) - self.assertEqual(received_host["mac_addresses"], - expected_host.mac_addresses) - self.assertEqual(received_host["ip_addresses"], - expected_host.ip_addresses) - self.assertEqual(received_host["display_name"], - expected_host.display_name) + self.assertEqual(received_host["mac_addresses"], expected_host.mac_addresses) + self.assertEqual(received_host["ip_addresses"], expected_host.ip_addresses) + self.assertEqual(received_host["display_name"], expected_host.display_name) self.assertEqual(received_host["facts"], expected_host.facts) - self.assertEqual(received_host["ansible_host"], - expected_host.ansible_host) + self.assertEqual(received_host["ansible_host"], expected_host.ansible_host) self.assertIsNotNone(received_host["created"]) self.assertIsNotNone(received_host["updated"]) @@ -272,9 +248,7 @@ def test_create_and_update(self): # sanity check # host_lookup_results["results"][0]["facts"][0]["facts"]["key2"] = "blah" # host_lookup_results["results"][0]["insights_id"] = "1.2.3.4" - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_update_with_same_insights_id_and_different_canonical_facts(self): original_insights_id = generate_uuid() @@ -302,15 +276,14 @@ def test_create_host_update_with_same_insights_id_and_different_canonical_facts( # Change the canonical facts except for the insights_id host_data.rhel_machine_id = generate_uuid() - host_data.ip_addresses = ["192.168.1.44", "10.0.0.2", ] + host_data.ip_addresses = ["192.168.1.44", "10.0.0.2"] host_data.subscription_manager_id = generate_uuid() host_data.satellite_id = generate_uuid() host_data.bios_uuid = generate_uuid() host_data.fqdn = "expected_fqdn" host_data.mac_addresses = ["ff:ee:dd:cc:bb:aa"] host_data.external_id = "fedcba" - host_data.facts = [{"namespace": "ns1", - "facts": {"newkey": "newvalue"}}] + host_data.facts = [{"namespace": "ns1", "facts": {"newkey": "newvalue"}}] # Update the host response = self.post(HOST_URL, [host_data.data()], 207) @@ -325,9 +298,7 @@ def test_create_host_update_with_same_insights_id_and_different_canonical_facts( # Retrieve the host using the id that we first received data = self.get("{}/{}".format(HOST_URL, original_id), 200) - self._validate_host(data["results"][0], - host_data, - expected_id=original_id) + self._validate_host(data["results"][0], host_data, expected_id=original_id) def test_create_host_with_empty_facts_display_name_then_update(self): # Create a host with empty facts, and display_name @@ -356,9 +327,7 @@ def test_create_host_with_empty_facts_display_name_then_update(self): host_lookup_results = self.get("{}/{}".format(HOST_URL, original_id), 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_and_update_multiple_hosts_with_account_mismatch(self): """ @@ -407,9 +376,7 @@ def test_create_host_without_canonical_facts(self): response_data = response_data["data"][0] - self.verify_error_response(response_data, - expected_title="Invalid request", - expected_status=400) + self.verify_error_response(response_data, expected_title="Invalid request", expected_status=400) def test_create_host_without_account(self): host_data = HostWrapper(test_data(facts=None)) @@ -429,8 +396,7 @@ def test_create_host_with_mismatched_account_numbers(self): response_data = response_data["data"][0] - self.verify_error_response(response_data, - expected_title="Invalid request") + self.verify_error_response(response_data, expected_title="Invalid request") def test_create_host_with_invalid_facts(self): facts_with_no_namespace = copy.deepcopy(FACTS) @@ -442,10 +408,7 @@ def test_create_host_with_invalid_facts(self): facts_with_empty_str_namespace = copy.deepcopy(FACTS) facts_with_empty_str_namespace[0]["namespace"] = "" - invalid_facts = [facts_with_no_namespace, - facts_with_no_facts, - facts_with_empty_str_namespace, - ] + invalid_facts = [facts_with_no_namespace, facts_with_no_facts, facts_with_empty_str_namespace] for invalid_fact in invalid_facts: with self.subTest(invalid_fact=invalid_fact): @@ -453,17 +416,10 @@ def test_create_host_with_invalid_facts(self): response_data = self.post(HOST_URL, [host_data.data()], 400) - self.verify_error_response(response_data, - expected_title="Bad Request") + self.verify_error_response(response_data, expected_title="Bad Request") def test_create_host_with_invalid_uuid_field_values(self): - uuid_field_names = ( - "insights_id", - "rhel_machine_id", - "subscription_manager_id", - "satellite_id", - "bios_uuid", - ) + uuid_field_names = ("insights_id", "rhel_machine_id", "subscription_manager_id", "satellite_id", "bios_uuid") for field_name in uuid_field_names: with self.subTest(uuid_field=field_name): @@ -477,8 +433,7 @@ def test_create_host_with_invalid_uuid_field_values(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_non_nullable_fields_as_None(self): non_nullable_field_names = ("display_name", @@ -510,13 +465,10 @@ def test_create_host_with_non_nullable_fields_as_None(self): response_data = self.post(HOST_URL, [invalid_host_dict], 400) - self.verify_error_response(response_data, - expected_title="Bad Request") + self.verify_error_response(response_data, expected_title="Bad Request") def test_create_host_with_valid_ip_address(self): - valid_ip_arrays = [["blah"], - ["1.1.1.1", "sigh"], - ] + valid_ip_arrays = [["blah"], ["1.1.1.1", "sigh"]] for ip_array in valid_ip_arrays: with self.subTest(ip_array=ip_array): @@ -531,10 +483,7 @@ def test_create_host_with_valid_ip_address(self): self.assertEqual(error_host["status"], 201) def test_create_host_with_invalid_ip_address(self): - invalid_ip_arrays = [[], - [""], - ["a"*256, ], - ] + invalid_ip_arrays = [[], [""], ["a" * 256]] for ip_array in invalid_ip_arrays: with self.subTest(ip_array=ip_array): @@ -548,13 +497,10 @@ def test_create_host_with_invalid_ip_address(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_valid_mac_address(self): - valid_mac_arrays = [["blah"], - ["11:22:33:44:55:66", "blah"], - ] + valid_mac_arrays = [["blah"], ["11:22:33:44:55:66", "blah"]] for mac_array in valid_mac_arrays: with self.subTest(mac_array=mac_array): @@ -569,10 +515,7 @@ def test_create_host_with_valid_mac_address(self): self.assertEqual(error_host["status"], 201) def test_create_host_with_invalid_mac_address(self): - invalid_mac_arrays = [[], - [""], - ["11:22:33:44:55:66", "a"*256], - ] + invalid_mac_arrays = [[], [""], ["11:22:33:44:55:66", "a" * 256]] for mac_array in invalid_mac_arrays: with self.subTest(mac_array=mac_array): @@ -586,13 +529,12 @@ def test_create_host_with_invalid_mac_address(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_display_name(self): host_data = HostWrapper(test_data(facts=None)) - invalid_display_names = ["", "a"*201] + invalid_display_names = ["", "a" * 201] for display_name in invalid_display_names: with self.subTest(display_name=display_name): @@ -604,13 +546,12 @@ def test_create_host_with_invalid_display_name(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_fqdn(self): host_data = HostWrapper(test_data(facts=None)) - invalid_fqdns = ["", "a"*256] + invalid_fqdns = ["", "a" * 256] for fqdn in invalid_fqdns: with self.subTest(fqdn=fqdn): @@ -622,13 +563,12 @@ def test_create_host_with_invalid_fqdn(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_external_id(self): host_data = HostWrapper(test_data(facts=None)) - invalid_external_ids = ["", "a"*501] + invalid_external_ids = ["", "a" * 501] for external_id in invalid_external_ids: with self.subTest(external_id=external_id): @@ -640,8 +580,7 @@ def test_create_host_with_invalid_external_id(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_ansible_host(self): # Create a host with ansible_host field @@ -659,9 +598,7 @@ def test_create_host_with_ansible_host(self): host_lookup_results = self.get("{}/{}".format(HOST_URL, original_id), 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_without_ansible_host_then_update(self): # Create a host without ansible_host field @@ -678,9 +615,7 @@ def test_create_host_without_ansible_host_then_update(self): original_id = created_host["id"] - ansible_hosts = ["ima_ansible_host_"+generate_uuid(), - "", - ] + ansible_hosts = ["ima_ansible_host_" + generate_uuid(), ""] # Update the ansible_host for ansible_host in ansible_hosts: @@ -693,14 +628,12 @@ def test_create_host_without_ansible_host_then_update(self): host_lookup_results = self.get(f"{HOST_URL}/{original_id}", 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_with_invalid_ansible_host(self): host_data = HostWrapper(test_data(facts=None)) - invalid_ansible_host = ["a"*256] + invalid_ansible_host = ["a" * 256] for ansible_host in invalid_ansible_host: with self.subTest(ansible_host=ansible_host): @@ -712,8 +645,7 @@ def test_create_host_with_invalid_ansible_host(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") class ResolveDisplayNameOnCreationTestCase(DBAPITestCase): @@ -741,9 +673,7 @@ def test_create_host_without_display_name_and_without_fqdn(self): # Explicitly set the display_name to the be id...this is expected here host_data.display_name = created_host["id"] - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_without_display_name_and_with_fqdn(self): """ @@ -770,9 +700,7 @@ def test_create_host_without_display_name_and_with_fqdn(self): # Explicitly set the display_name ...this is expected here host_data.display_name = expected_display_name - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) class BulkCreateHostsTestCase(DBAPITestCase): @@ -826,9 +754,7 @@ def test_create_and_update_multiple_hosts_with_different_accounts(self): expected_host = HostWrapper(host_list[i]) - self._validate_host(host["host"], - expected_host, - expected_id=expected_host.id) + self._validate_host(host["host"], expected_host, expected_id=expected_host.id) i += 1 @@ -962,10 +888,8 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self system_profiles = [({}, {})] # Only set the enabled_services to start out with - enabled_services_only_system_profile = {"enabled_services": - ["firewalld"]} - system_profiles.append((enabled_services_only_system_profile, - enabled_services_only_system_profile)) + enabled_services_only_system_profile = {"enabled_services": ["firewalld"]} + system_profiles.append((enabled_services_only_system_profile, enabled_services_only_system_profile)) # Set the entire system profile...overwriting the enabled_service # set from before @@ -973,22 +897,15 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self system_profiles.append((full_system_profile, full_system_profile)) # Change the enabled_services - full_system_profile = {**full_system_profile, - **enabled_services_only_system_profile} - system_profiles.append((enabled_services_only_system_profile, - full_system_profile)) + full_system_profile = {**full_system_profile, **enabled_services_only_system_profile} + system_profiles.append((enabled_services_only_system_profile, full_system_profile)) # Make sure an empty system profile doesn't overwrite the data - system_profiles.append(({}, - full_system_profile)) + system_profiles.append(({}, full_system_profile)) for i, (system_profile, expected_system_profile) in enumerate(system_profiles): with self.subTest(system_profile=i): - mq_message = { - "id": original_id, - "request_id": None, - "system_profile": system_profile - } + mq_message = {"id": original_id, "request_id": None, "system_profile": system_profile} with self.app.app_context(): msg_handler(mq_message) @@ -997,8 +914,7 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - expected_system_profile) + self.assertEqual(actual_host["system_profile"], expected_system_profile) def test_create_host_with_null_system_profile(self): facts = None @@ -1011,9 +927,7 @@ def test_create_host_with_null_system_profile(self): # Create the host without a system profile response = self.post(HOST_URL, [host], 400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) def test_create_host_with_system_profile_with_invalid_data(self): facts = None @@ -1073,8 +987,7 @@ def test_create_host_with_system_profile_with_different_yum_urls(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - host["system_profile"]) + self.assertEqual(actual_host["system_profile"], host["system_profile"]) def test_create_host_with_system_profile_with_different_cloud_providers(self): facts = None @@ -1102,8 +1015,7 @@ def test_create_host_with_system_profile_with_different_cloud_providers(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - host["system_profile"]) + self.assertEqual(actual_host["system_profile"], host["system_profile"]) def test_get_system_profile_of_host_that_does_not_have_system_profile(self): facts = None @@ -1127,8 +1039,7 @@ def test_get_system_profile_of_host_that_does_not_have_system_profile(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - expected_system_profile) + self.assertEqual(actual_host["system_profile"], expected_system_profile) def test_get_system_profile_of_multiple_hosts(self): facts = None @@ -1149,18 +1060,13 @@ def test_get_system_profile_of_multiple_hosts(self): original_id = created_host["id"] host_id_list.append(original_id) - expected_system_profiles.append({ - "id": original_id, - "system_profile": host["system_profile"] - }) + expected_system_profiles.append({"id": original_id, "system_profile": host["system_profile"]}) url_host_id_list = ",".join(host_id_list) test_url = "{}/{}/system_profile".format(HOST_URL, url_host_id_list) host_lookup_results = self.get(test_url, 200) - self.assertEqual( - len(expected_system_profiles), len(host_lookup_results["results"]) - ) + self.assertEqual(len(expected_system_profiles), len(host_lookup_results["results"])) for expected_system_profile in expected_system_profiles: self.assertIn(expected_system_profile, host_lookup_results["results"]) @@ -1179,9 +1085,7 @@ def test_get_system_profile_with_invalid_host_id(self): for host_id in invalid_host_ids: with self.subTest(invalid_host_id=host_id): response = self.get("{}/{}/system_profile".format(HOST_URL, host_id), 400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) class PreCreatedHostsBaseTestCase(DBAPITestCase, PaginationTestCase): @@ -1225,9 +1129,7 @@ def test_update_fields(self): for patch_doc in patch_docs: with self.subTest(valid_patch_doc=patch_doc): - response_data = self.patch(f"{HOST_URL}/{original_id}", - patch_doc, - 200) + response_data = self.patch(f"{HOST_URL}/{original_id}", patch_doc, 200) response_data = self.get(f"{HOST_URL}/{original_id}", 200) @@ -1248,8 +1150,7 @@ def test_patch_with_branch_id_parameter(self): self.patch(test_url, patch_doc, 200) def test_update_fields_on_multiple_hosts(self): - patch_doc = {"display_name": "fred_flintstone", - "ansible_host": "barney_rubble"} + patch_doc = {"display_name": "fred_flintstone", "ansible_host": "barney_rubble"} url_host_id_list = self._build_host_id_list_for_url(self.added_hosts) @@ -1290,13 +1191,9 @@ def test_invalid_data(self): for patch_doc in invalid_data_list: with self.subTest(invalid_patch_doc=patch_doc): - response = self.patch(f"{HOST_URL}/{original_id}", - patch_doc, - status=400) + response = self.patch(f"{HOST_URL}/{original_id}", patch_doc, status=400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) def test_invalid_host_id(self): patch_doc = {"display_name": "branch_id_test"} @@ -1437,11 +1334,7 @@ def _base_query_test(self, host_id_list, expected_host_list): self._base_paging_test(url, len(expected_host_list)) def test_query_existent_hosts(self): - host_lists = [ - self.added_hosts[0:1], - self.added_hosts[1:3], - self.added_hosts, - ] + host_lists = [self.added_hosts[0:1], self.added_hosts[1:3], self.added_hosts] for host_list in host_lists: with self.subTest(host_list=host_list): host_id_list = self._build_host_id_list_for_url(host_list) @@ -1498,9 +1391,7 @@ def test_query_invalid_paging_parameters(self): invalid_values = ["-1", "0", "notanumber"] for paging_parameter in paging_parameters: for invalid_value in invalid_values: - with self.subTest( - paging_parameter=paging_parameter, invalid_value=invalid_value - ): + with self.subTest(paging_parameter=paging_parameter, invalid_value=invalid_value): self.get(f"{base_url}?{paging_parameter}={invalid_value}", 400) @@ -1577,11 +1468,7 @@ class QueryOrderTestCase(PreCreatedHostsBaseTestCase): def _queries_subtests_with_added_hosts(self): host_id_list = [host.id for host in self.added_hosts] url_host_id_list = ",".join(host_id_list) - urls = ( - HOST_URL, - f"{HOST_URL}/{url_host_id_list}", - f"{HOST_URL}/{url_host_id_list}/system_profile", - ) + urls = (HOST_URL, f"{HOST_URL}/{url_host_id_list}", f"{HOST_URL}/{url_host_id_list}/system_profile") for url in urls: with self.subTest(url=url): yield url @@ -1776,9 +1663,7 @@ def test_replace_and_add_facts_to_multiple_hosts_including_nonexistent_host(self url_host_id_list = self._build_host_id_list_for_url(host_list) # Add a couple of host ids that should not exist in the database - url_host_id_list = ( - url_host_id_list + "," + generate_uuid() + "," + generate_uuid() - ) + url_host_id_list = url_host_id_list + "," + generate_uuid() + "," + generate_uuid() patch_url = HOST_URL + "/" + url_host_id_list + "/facts/" + target_namespace diff --git a/test_db_model.py b/test_db_model.py index 5f75901c44..6a8863d2f1 100644 --- a/test_db_model.py +++ b/test_db_model.py @@ -23,8 +23,7 @@ def _create_host(insights_id=None, fqdn=None, display_name=None): def test_create_host_with_canonical_facts_as_None(flask_app_fixture): # Test to make sure canonical facts that are None or '' do # not get inserted into the db - invalid_canonical_facts = {"fqdn": None, - "insights_id": '', } + invalid_canonical_facts = {"fqdn": None, "insights_id": ""} valid_canonical_facts = {"bios_uuid": "1234"} host_dict = {**invalid_canonical_facts, **valid_canonical_facts} @@ -50,9 +49,7 @@ def test_create_host_with_display_name_and_fqdn_as_empty_str(flask_app_fixture): def test_update_existing_host_fix_display_name_using_existing_fqdn(flask_app_fixture): expected_fqdn = 'host1.domain1.com' insights_id = str(uuid.uuid4()) - existing_host = _create_host(insights_id=insights_id, - fqdn=expected_fqdn, - display_name=None) + existing_host = _create_host(insights_id=insights_id, fqdn=expected_fqdn, display_name=None) # Clear the display_name existing_host.display_name = None @@ -94,9 +91,7 @@ def test_update_existing_host_fix_display_name_using_id(flask_app_fixture): assert existing_host.display_name is None # Update the host - input_host = Host( - {"insights_id": existing_host.canonical_facts["insights_id"]}, display_name='' - ) + input_host = Host({"insights_id": existing_host.canonical_facts["insights_id"]}, display_name='') existing_host.update(input_host) assert existing_host.display_name == existing_host.id @@ -105,8 +100,7 @@ def test_update_existing_host_fix_display_name_using_id(flask_app_fixture): def test_create_host_without_system_profile(flask_app_fixture): # Test the situation where the db/sqlalchemy sets the # system_profile_facts to None - created_host = _create_host(fqdn="fred.flintstone.com", - display_name="fred") + created_host = _create_host(fqdn="fred.flintstone.com", display_name="fred") assert created_host.system_profile_facts == {} diff --git a/test_host_dedup_logic.py b/test_host_dedup_logic.py index 93a0fca021..0fdf2466bc 100644 --- a/test_host_dedup_logic.py +++ b/test_host_dedup_logic.py @@ -31,10 +31,7 @@ def basic_host_dedup_test(initial_canonical_facts, search_canonical_facts): def test_find_host_using_subset_canonical_fact_match(flask_app_fixture): fqdn = "fred.flintstone.com" - canonical_facts = {"fqdn": fqdn, - "bios_uuid": generate_uuid(), - "rhel_machine_id": generate_uuid(), - } + canonical_facts = {"fqdn": fqdn, "bios_uuid": generate_uuid(), "rhel_machine_id": generate_uuid()} # Create the subset of canonical facts to search by subset_canonical_facts = {"fqdn": fqdn} @@ -43,8 +40,7 @@ def test_find_host_using_subset_canonical_fact_match(flask_app_fixture): def test_find_host_using_superset_canonical_fact_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid()} + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid()} # Create the superset of canonical facts to search by superset_canonical_facts = canonical_facts.copy() @@ -55,10 +51,7 @@ def test_find_host_using_superset_canonical_fact_match(flask_app_fixture): def test_find_host_using_insights_id_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid(), - "insights_id": generate_uuid(), - } + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid(), "insights_id": generate_uuid()} # Change the canonical facts except the insights_id...match on insights_id search_canonical_facts = {"fqdn": "barney", @@ -70,10 +63,7 @@ def test_find_host_using_insights_id_match(flask_app_fixture): def test_find_host_using_subscription_manager_id_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid(), - "subscription_manager_id": generate_uuid(), - } + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid(), "subscription_manager_id": generate_uuid()} # Change the bios_uuid so that falling back to subset match will fail search_canonical_facts = { @@ -85,13 +75,8 @@ def test_find_host_using_subscription_manager_id_match(flask_app_fixture): @mark.parametrize(("host_create_order", "expected_host"), (((0, 1), 1), ((1, 0), 0))) -def test_find_host_using_elevated_ids_match( - flask_app_fixture, host_create_order, expected_host -): - hosts_canonical_facts = ( - {"subscription_manager_id": generate_uuid()}, - {"insights_id": generate_uuid()}, - ) +def test_find_host_using_elevated_ids_match(flask_app_fixture, host_create_order, expected_host): + hosts_canonical_facts = ({"subscription_manager_id": generate_uuid()}, {"insights_id": generate_uuid()}) created_hosts = [] for host_canonical_facts in host_create_order: @@ -99,9 +84,7 @@ def test_find_host_using_elevated_ids_match( created_hosts.append(created_host) search_canonical_facts = { - key: value - for host_canonical_facts in hosts_canonical_facts - for key, value in host_canonical_facts.items() + key: value for host_canonical_facts in hosts_canonical_facts for key, value in host_canonical_facts.items() } found_host = find_existing_host(ACCOUNT_NUMBER, search_canonical_facts) diff --git a/test_unit.py b/test_unit.py index d2c567c8f4..83c77d684e 100755 --- a/test_unit.py +++ b/test_unit.py @@ -3,11 +3,7 @@ from api import api_operation from api.host import _params_to_order_by, _order_how from app.config import Config -from app.auth.identity import (Identity, - validate, - from_auth_header, - from_bearer_token, - SHARED_SECRET_ENV_VAR) +from app.auth.identity import Identity, validate, from_auth_header, from_bearer_token, SHARED_SECRET_ENV_VAR from base64 import b64encode from json import dumps from unittest import main, TestCase diff --git a/test_validators.py b/test_validators.py index df414bdab7..d9481ee1e7 100644 --- a/test_validators.py +++ b/test_validators.py @@ -1,13 +1,9 @@ import pytest -from app.validators import (verify_uuid_format, - verify_ip_address_format, - verify_mac_address_format) +from app.validators import verify_uuid_format, verify_ip_address_format, verify_mac_address_format -@pytest.mark.parametrize("uuid", ["4a8fb994-57fe-4dbb-ad2a-9e922560b6c1", - "4a8fb99457fe4dbbad2a9e922560b6c1", - ]) +@pytest.mark.parametrize("uuid", ["4a8fb994-57fe-4dbb-ad2a-9e922560b6c1", "4a8fb99457fe4dbbad2a9e922560b6c1"]) def test_valid_uuid(uuid): assert verify_uuid_format(uuid) is True