diff --git a/api/host.py b/api/host.py index 4cbc1fd96..4e5b720f8 100644 --- a/api/host.py +++ b/api/host.py @@ -27,9 +27,7 @@ # given priority in the host deduplication process. # NOTE: The order of this tuple is important. The order defines # the priority. -ELEVATED_CANONICAL_FACT_FIELDS = ("insights_id", - "subscription_manager_id", - ) +ELEVATED_CANONICAL_FACT_FIELDS = ("insights_id", "subscription_manager_id") logger = get_logger(__name__) @@ -50,8 +48,7 @@ def add_host_list(host_list): response_host_list.append({**e.to_json(), "host": host}) except ValidationError as e: number_of_errors += 1 - logger.exception("Input validation error while adding host", - extra={"host": host}) + logger.exception("Input validation error while adding host", extra={"host": host}) response_host_list.append({"status": 400, "title": "Bad Request", "detail": str(e.messages), @@ -66,9 +63,7 @@ def add_host_list(host_list): "detail": "Could not complete operation", "host": host}) - response = {"total": len(response_host_list), - "errors": number_of_errors, - "data": response_host_list} + response = {"total": len(response_host_list), "errors": number_of_errors, "data": response_host_list} return _build_json_response(response, status=207) @@ -84,18 +79,14 @@ def _add_host(host): input_host = Host.from_json(validated_input_host_dict.data) - if ( - not current_identity.is_trusted_system and - current_identity.account_number != input_host.account - ): + if not current_identity.is_trusted_system and current_identity.account_number != input_host.account: raise InventoryException( title="Invalid request", detail="The account number associated with the user does not " "match the account number associated with the host", ) - existing_host = find_existing_host(input_host.account, - input_host.canonical_facts) + existing_host = find_existing_host(input_host.account, input_host.canonical_facts) if existing_host: return update_existing_host(existing_host, input_host) @@ -108,8 +99,7 @@ def find_existing_host(account_number, canonical_facts): existing_host = _find_host_by_elevated_ids(account_number, canonical_facts) if not existing_host: - existing_host = find_host_by_canonical_facts(account_number, - canonical_facts) + existing_host = find_host_by_canonical_facts(account_number, canonical_facts) return existing_host @@ -119,8 +109,7 @@ def _find_host_by_elevated_ids(account_number, canonical_facts): for elevated_cf_name in ELEVATED_CANONICAL_FACT_FIELDS: cf_value = canonical_facts.get(elevated_cf_name) if cf_value: - existing_host = find_host_by_canonical_facts(account_number, - {elevated_cf_name: cf_value}) + existing_host = find_host_by_canonical_facts(account_number, {elevated_cf_name: cf_value}) if existing_host: return existing_host @@ -184,23 +173,15 @@ def get_host_list( order_how=None ): if fqdn: - query = find_hosts_by_canonical_facts( - current_identity.account_number, {"fqdn": fqdn} - ) + query = find_hosts_by_canonical_facts(current_identity.account_number, {"fqdn": fqdn}) elif display_name: - query = find_hosts_by_display_name( - current_identity.account_number, display_name - ) + query = find_hosts_by_display_name(current_identity.account_number, display_name) elif hostname_or_id: - query = find_hosts_by_hostname_or_id( - current_identity.account_number, hostname_or_id) + query = find_hosts_by_hostname_or_id(current_identity.account_number, hostname_or_id) elif insights_id: - query = find_hosts_by_canonical_facts( - current_identity.account_number, {"insights_id": insights_id}) + query = find_hosts_by_canonical_facts(current_identity.account_number, {"insights_id": insights_id}) else: - query = Host.query.filter( - Host.account == current_identity.account_number - ) + query = Host.query.filter(Host.account == current_identity.account_number) try: order_by = _params_to_order_by(order_by, order_how) @@ -212,9 +193,7 @@ def get_host_list( query_results = query.paginate(page, per_page, True) logger.debug("Found hosts: %s", query_results.items) - return _build_paginated_host_list_response( - query_results.total, page, per_page, query_results.items - ) + return _build_paginated_host_list_response(query_results.total, page, per_page, query_results.items) def _order_how(column, order_how): @@ -239,9 +218,7 @@ def _params_to_order_by(order_by=None, order_how=None): else: ordering = (Host.display_name.asc(),) elif order_by: - raise ValueError( - 'Unsupported ordering column, use "updated" or "display_name".' - ) + raise ValueError('Unsupported ordering column, use "updated" or "display_name".') elif order_how: raise ValueError( "Providing ordering direction without a column is not supported. " @@ -263,17 +240,12 @@ def _build_paginated_host_list_response(total, page, per_page, host_list): def _build_json_response(json_data, status=200): - return flask.Response(ujson.dumps(json_data), - status=status, - mimetype="application/json") + return flask.Response(ujson.dumps(json_data), status=status, mimetype="application/json") def find_hosts_by_display_name(account, display_name): logger.debug("find_hosts_by_display_name(%s)", display_name) - return Host.query.filter( - (Host.account == account) - & Host.display_name.comparator.contains(display_name) - ) + return Host.query.filter((Host.account == account) & Host.display_name.comparator.contains(display_name)) def find_hosts_by_canonical_facts(account_number, canonical_facts): @@ -296,12 +268,9 @@ def find_hosts_by_hostname_or_id(account_number, hostname): logger.debug("Adding id (uuid) to the filter list") except Exception: # Do not filter using the id - logger.debug("The hostname (%s) could not be converted into a UUID", - hostname, - exc_info=True) + logger.debug("The hostname (%s) could not be converted into a UUID", hostname, exc_info=True) - return Host.query.filter(sqlalchemy.and_(*[Host.account == account_number, - sqlalchemy.or_(*filter_list)])) + return Host.query.filter(sqlalchemy.and_(*[Host.account == account_number, sqlalchemy.or_(*filter_list)])) @api_operation @@ -336,8 +305,7 @@ def delete_by_id(host_id_list): @api_operation @metrics.api_request_time.time() def get_host_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how=None): - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) try: order_by = _params_to_order_by(order_by, order_how) @@ -349,25 +317,17 @@ def get_host_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how= logger.debug("Found hosts: %s", query_results.items) - return _build_paginated_host_list_response( - query_results.total, page, per_page, query_results.items - ) + return _build_paginated_host_list_response(query_results.total, page, per_page, query_results.items) def _get_host_list_by_id_list(account_number, host_id_list): - return Host.query.filter( - (Host.account == account_number) - & Host.id.in_(host_id_list) - ) + return Host.query.filter((Host.account == account_number) & Host.id.in_(host_id_list)) @api_operation @metrics.api_request_time.time() -def get_host_system_profile_by_id( - host_id_list, page=1, per_page=100, order_by=None, order_how=None -): - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) +def get_host_system_profile_by_id(host_id_list, page=1, per_page=100, order_by=None, order_how=None): + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) try: order_by = _params_to_order_by(order_by, order_how) @@ -377,8 +337,7 @@ def get_host_system_profile_by_id( query = query.order_by(*order_by) query_results = query.paginate(page, per_page, True) - response_list = [host.to_system_profile_json() - for host in query_results.items] + response_list = [host.to_system_profile_json() for host in query_results.items] json_output = {"total": query_results.total, "count": len(response_list), @@ -397,15 +356,9 @@ def patch_by_id(host_id_list, host_data): validated_patch_host_data = PatchHostSchema(strict=True).load(host_data).data except ValidationError as e: logger.exception(f"Input validation error while patching host: {host_id_list} - {host_data}") - return ({"status": 400, - "title": "Bad Request", - "detail": str(e.messages), - "type": "unknown", - }, - 400) + return ({"status": 400, "title": "Bad Request", "detail": str(e.messages), "type": "unknown"}, 400) - query = _get_host_list_by_id_list(current_identity.account_number, - host_id_list) + query = _get_host_list_by_id_list(current_identity.account_number, host_id_list) hosts_to_update = query.all() @@ -424,8 +377,7 @@ def patch_by_id(host_id_list, host_data): @api_operation @metrics.api_request_time.time() def replace_facts(host_id_list, namespace, fact_dict): - return update_facts_by_namespace(FactOperations.replace, host_id_list, - namespace, fact_dict) + return update_facts_by_namespace(FactOperations.replace, host_id_list, namespace, fact_dict) @api_operation @@ -436,8 +388,7 @@ def merge_facts(host_id_list, namespace, fact_dict): logger.debug(error_msg) return error_msg, 400 - return update_facts_by_namespace(FactOperations.merge, host_id_list, - namespace, fact_dict) + return update_facts_by_namespace(FactOperations.merge, host_id_list, namespace, fact_dict) def update_facts_by_namespace(operation, host_id_list, namespace, fact_dict): @@ -451,10 +402,9 @@ def update_facts_by_namespace(operation, host_id_list, namespace, fact_dict): if len(hosts_to_update) != len(host_id_list): error_msg = ( - "ERROR: The number of hosts requested does not match the number of hosts found in the " - "host database. This could happen if the namespace does not exist or the account " - "number associated with the call does not match the account number associated with one " - "or more the hosts. Rejecting the fact change request." + "ERROR: The number of hosts requested does not match the number of hosts found in the host database. " + "This could happen if the namespace does not exist or the account number associated with the call does " + "not match the account number associated with one or more the hosts. Rejecting the fact change request." ) logger.debug(error_msg) return error_msg, 400 diff --git a/api/metrics.py b/api/metrics.py index a6d37bc07..98ac4ba53 100644 --- a/api/metrics.py +++ b/api/metrics.py @@ -1,8 +1,7 @@ from prometheus_client import Counter from prometheus_client import Summary -api_request_time = Summary("inventory_request_processing_seconds", - "Time spent processing request") +api_request_time = Summary("inventory_request_processing_seconds", "Time spent processing request") host_dedup_processing_time = Summary("inventory_dedup_processing_seconds", "Time spent looking for existing host (dedup logic)") find_host_using_elevated_ids = Summary("inventory_find_host_using_elevated_ids_processing_seconds", @@ -11,18 +10,13 @@ "Time spent committing a new host to the database") update_host_commit_processing_time = Summary("inventory_update_host_commit_seconds", "Time spent committing a update host to the database") -api_request_count = Counter("inventory_request_count", - "The total amount of API requests") -create_host_count = Counter("inventory_create_host_count", - "The total amount of hosts created") -update_host_count = Counter("inventory_update_host_count", - "The total amount of hosts updated") -delete_host_count = Counter("inventory_delete_host_count", - "The total amount of hosts deleted") +api_request_count = Counter("inventory_request_count", "The total amount of API requests") +create_host_count = Counter("inventory_create_host_count", "The total amount of hosts created") +update_host_count = Counter("inventory_update_host_count", "The total amount of hosts updated") +delete_host_count = Counter("inventory_delete_host_count", "The total amount of hosts deleted") delete_host_processing_time = Summary("inventory_delete_host_commit_seconds", "Time spent deleting hosts from the database") -login_failure_count = Counter("inventory_login_failure_count", - "The total amount of failed login attempts") +login_failure_count = Counter("inventory_login_failure_count", "The total amount of failed login attempts") system_profile_deserialization_time = Summary("inventory_system_profile_deserialization_time", "Time spent deserializing system profile documents") system_profile_commit_processing_time = Summary("inventory_system_profile_commit_processing_time", diff --git a/app/__init__.py b/app/__init__.py index e2659ba2d..625f79a99 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -33,9 +33,7 @@ def create_app(config_name): app_config = Config() app_config.log_configuration(config_name) - connexion_app = connexion.App( - "inventory", specification_dir="./swagger/", options=connexion_options - ) + connexion_app = connexion.App("inventory", specification_dir="./swagger/", options=connexion_options) # Read the swagger.yml file to configure the endpoints with open("swagger/api.spec.yaml", "rb") as fp: @@ -67,14 +65,11 @@ def create_app(config_name): db.init_app(flask_app) - flask_app.register_blueprint(monitoring_blueprint, - url_prefix=app_config.mgmt_url_path_prefix) + flask_app.register_blueprint(monitoring_blueprint, url_prefix=app_config.mgmt_url_path_prefix) @flask_app.before_request def set_request_id(): - threadctx.request_id = request.headers.get( - REQUEST_ID_HEADER, - UNKNOWN_REQUEST_ID_VALUE) + threadctx.request_id = request.headers.get(REQUEST_ID_HEADER, UNKNOWN_REQUEST_ID_VALUE) init_tasks(app_config, flask_app) diff --git a/app/auth/__init__.py b/app/auth/__init__.py index 1f51908c9..174e2be0b 100644 --- a/app/auth/__init__.py +++ b/app/auth/__init__.py @@ -7,9 +7,7 @@ from app.auth.identity import validate from app.logging import get_logger -__all__ = ["current_identity", - "bearer_token_handler", - "authentication_header_handler"] +__all__ = ["current_identity", "bearer_token_handler", "authentication_header_handler"] logger = get_logger(__name__) @@ -20,8 +18,7 @@ def authentication_header_handler(apikey, required_scopes=None): validate(identity) except Exception: login_failure_count.inc() - logger.debug("Failed to validate identity header value", - exc_info=True) + logger.debug("Failed to validate identity header value", exc_info=True) return None return {"uid": identity} @@ -33,8 +30,7 @@ def bearer_token_handler(token): validate(identity) except Exception: login_failure_count.inc() - logger.debug("Failed to validate bearer token value", - exc_info=True) + logger.debug("Failed to validate bearer token value", exc_info=True) return None return {"uid": identity} diff --git a/app/events.py b/app/events.py index 86961c5ad..7279b6b9b 100644 --- a/app/events.py +++ b/app/events.py @@ -14,6 +14,4 @@ class HostEvent(Schema): def delete(id): - return HostEvent(strict=True).dumps( - {"id": id, "timestamp": datetime.utcnow(), "type": "delete"} - ).data + return HostEvent(strict=True).dumps({"id": id, "timestamp": datetime.utcnow(), "type": "delete"}).data diff --git a/app/exceptions.py b/app/exceptions.py index ede87312b..5db5b9c92 100644 --- a/app/exceptions.py +++ b/app/exceptions.py @@ -6,8 +6,7 @@ def __init__(self, status=400, title=None, detail=None, type="about:blank"): self.type = type def to_json(self): - return {"detail": self.detail, "status": self.status, - "title": self.title, "type": self.type} + return {"detail": self.detail, "status": self.status, "title": self.title, "type": self.type} class InputFormatException(InventoryException): diff --git a/app/logging.py b/app/logging.py index cd5d79c73..fc0a8e921 100644 --- a/app/logging.py +++ b/app/logging.py @@ -43,8 +43,7 @@ def _configure_watchtower_logging_handler(): log_group = os.getenv("AWS_LOG_GROUP", "platform") stream_name = _get_aws_logging_stream_name(OPENSHIFT_ENVIRONMENT_NAME_FILE) - if all([aws_access_key_id, aws_secret_access_key, - aws_region_name, stream_name]): + if all([aws_access_key_id, aws_secret_access_key, aws_region_name, stream_name]): print(f"Configuring watchtower logging (log_group={log_group}, stream_name={stream_name})") boto3_session = Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, @@ -57,8 +56,7 @@ def _configure_watchtower_logging_handler(): handler.setFormatter(logstash_formatter.LogstashFormatterV1()) root.addHandler(handler) else: - print("Unable to configure watchtower logging. Please " - "verify watchtower logging configuration!") + print("Unable to configure watchtower logging. Please verify watchtower logging configuration!") def _get_aws_logging_stream_name(namespace_filename): @@ -67,8 +65,7 @@ def _get_aws_logging_stream_name(namespace_filename): return namespace_fh.read() except FileNotFoundError: namespace = DEFAULT_AWS_LOGGING_NAMESPACE - print(f"Error reading the OpenShift namepsace file. " - f"Using {namespace} as aws logging stream name") + print(f"Error reading the OpenShift namepsace file. Using {namespace} as aws logging stream name") return namespace @@ -117,9 +114,7 @@ class InventoryGunicornLogger(glogging.Logger): def setup(self, cfg): super().setup(cfg) - self._set_handler(self.error_log, - cfg.errorlog, - logstash_formatter.LogstashFormatterV1()) + self._set_handler(self.error_log, cfg.errorlog, logstash_formatter.LogstashFormatterV1()) def get_logger(name): diff --git a/app/models.py b/app/models.py index 656b4270a..48e0bc024 100644 --- a/app/models.py +++ b/app/models.py @@ -51,9 +51,7 @@ class Host(db.Model): display_name = db.Column(db.String(200), default=_set_display_name_on_save) ansible_host = db.Column(db.String(255)) created_on = db.Column(db.DateTime, default=datetime.utcnow) - modified_on = db.Column( - db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow - ) + modified_on = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) facts = db.Column(JSONB) tags = db.Column(JSONB) canonical_facts = db.Column(JSONB) @@ -70,9 +68,9 @@ def __init__( ): if not canonical_facts: - raise InventoryException(title="Invalid request", - detail="At least one of the canonical " - "fact fields must be present.") + raise InventoryException( + title="Invalid request", detail="At least one of the canonical " "fact fields must be present." + ) self.canonical_facts = canonical_facts @@ -111,9 +109,7 @@ def to_json(self): return json_dict def to_system_profile_json(self): - json_dict = {"id": str(self.id), - "system_profile": self.system_profile_facts or {} - } + json_dict = {"id": str(self.id), "system_profile": self.system_profile_facts or {}} return json_dict def save(self): @@ -132,8 +128,7 @@ def patch(self, patch_data): logger.debug("patching host (id=%s) with data: %s", self.id, patch_data) if not patch_data: - raise InventoryException(title="Bad Request", - detail="Patch json document cannot be empty.") + raise InventoryException(title="Bad Request", detail="Patch json document cannot be empty.") self._update_ansible_host(patch_data.get("ansible_host")) @@ -163,7 +158,7 @@ def update_canonical_facts(self, canonical_facts): canonical_facts ) self.canonical_facts.update(canonical_facts) - logger.debug("Host (id=%s) has updated canonical_facts (%s)", self.id, self.canonical_facts) + logger.debug("Host (id=%s) has updated canonical_facts (%s)" , self.id, self.canonical_facts) orm.attributes.flag_modified(self, "canonical_facts") def update_facts(self, facts_dict): @@ -196,8 +191,7 @@ def _update_system_profile(self, input_system_profile): self.system_profile_facts = input_system_profile else: # Update the fields that were passed in - self.system_profile_facts = {**self.system_profile_facts, - **input_system_profile} + self.system_profile_facts = {**self.system_profile_facts, **input_system_profile} orm.attributes.flag_modified(self, "system_profile_facts") def __repr__(self): @@ -278,8 +272,7 @@ def from_json(fact_list): @staticmethod def to_json(fact_dict): fact_list = [ - {"namespace": namespace, "facts": facts if facts else {}} - for namespace, facts in fact_dict.items() + {"namespace": namespace, "facts": facts if facts else {}} for namespace, facts in fact_dict.items() ] return fact_list @@ -356,18 +349,15 @@ class FactsSchema(Schema): class HostSchema(Schema): display_name = fields.Str(validate=validate.Length(min=1, max=200)) ansible_host = fields.Str(validate=validate.Length(min=0, max=255)) - account = fields.Str(required=True, - validate=validate.Length(min=1, max=10)) + account = fields.Str(required=True, validate=validate.Length(min=1, max=10)) insights_id = fields.Str(validate=verify_uuid_format) rhel_machine_id = fields.Str(validate=verify_uuid_format) subscription_manager_id = fields.Str(validate=verify_uuid_format) satellite_id = fields.Str(validate=verify_uuid_format) fqdn = fields.Str(validate=validate.Length(min=1, max=255)) bios_uuid = fields.Str(validate=verify_uuid_format) - ip_addresses = fields.List( - fields.Str(validate=validate.Length(min=1, max=255))) - mac_addresses = fields.List( - fields.Str(validate=validate.Length(min=1, max=255))) + ip_addresses = fields.List(fields.Str(validate=validate.Length(min=1, max=255))) + mac_addresses = fields.List(fields.Str(validate=validate.Length(min=1, max=255))) external_id = fields.Str(validate=validate.Length(min=1, max=500)) facts = fields.List(fields.Nested(FactsSchema)) system_profile = fields.Nested(SystemProfileSchema) diff --git a/host_dumper.py b/host_dumper.py index 369186578..02394f614 100644 --- a/host_dumper.py +++ b/host_dumper.py @@ -13,17 +13,11 @@ " This util is expected to be used within the image/pod." ) group = parser.add_mutually_exclusive_group(required=True) -group.add_argument("--id", - help="search for a host using id") -group.add_argument("--hostname", - help="search for a host using display_name, fqdn") -group.add_argument("--insights_id", - help="search for a host using insights_id") -group.add_argument("--account_number", - help="dump all hosts associated with account") -parser.add_argument("--no-pp", - help="enable pretty printing", - action="store_true") +group.add_argument("--id", help="search for a host using id") +group.add_argument("--hostname", help="search for a host using display_name, fqdn") +group.add_argument("--insights_id", help="search for a host using insights_id") +group.add_argument("--account_number", help="dump all hosts associated with account") +parser.add_argument("--no-pp", help="enable pretty printing", action="store_true") args = parser.parse_args() with application.app_context(): @@ -32,9 +26,7 @@ if args.id: host_id_list = [args.id] print("looking up host using id") - query_results = Host.query.filter( - Host.id.in_(host_id_list) - ).all() + query_results = Host.query.filter(Host.id.in_(host_id_list)).all() elif args.hostname: print("looking up host using display_name, fqdn") query_results = Host.query.filter( @@ -47,9 +39,7 @@ Host.canonical_facts.comparator.contains({'insights_id': args.insights_id}) ).all() elif args.account_number: - query_results = Host.query.filter( - Host.account == args.account_number - ).all() + query_results = Host.query.filter(Host.account == args.account_number).all() json_host_list = [host.to_json() for host in query_results] diff --git a/migrations/env.py b/migrations/env.py index 17849bf81..fde6e1899 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -22,9 +22,7 @@ # from myapp import mymodel # target_metadata = mymodel.Base.metadata -config.set_main_option( - "sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI") -) +config.set_main_option("sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI")) target_metadata = current_app.extensions["migrate"].db.metadata # other values from the config, defined by the needs of env.py, @@ -72,9 +70,7 @@ def process_revision_directives(context, revision, directives): logger.info("No changes in schema detected.") engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix="sqlalchemy.", - poolclass=pool.NullPool, + config.get_section(config.config_ini_section), prefix="sqlalchemy.", poolclass=pool.NullPool ) connection = engine.connect() diff --git a/migrations/versions/2d951983fa89_.py b/migrations/versions/2d951983fa89_.py index cc0aa78fe..ec42decee 100644 --- a/migrations/versions/2d951983fa89_.py +++ b/migrations/versions/2d951983fa89_.py @@ -27,18 +27,13 @@ def upgrade(): sa.Column("modified_on", sa.DateTime(), nullable=True), sa.Column("facts", postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column("tags", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column( - "canonical_facts", postgresql.JSONB(astext_type=sa.Text()), nullable=True - ), + sa.Column("canonical_facts", postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint("id"), ) op.create_index("idxaccount", "hosts", ["account"], unique=False) - op.create_index("idxinsightsid", - "hosts", - [sa.text("(canonical_facts ->> 'insights_id')")] - ) + op.create_index("idxinsightsid", "hosts", [sa.text("(canonical_facts ->> 'insights_id')")]) op.create_index("idxgincanonicalfacts", "hosts", diff --git a/tasks/__init__.py b/tasks/__init__.py index b8b5d2504..ee998e7f9 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -73,9 +73,7 @@ def _init_system_profile_consumer(config, flask_app, handler=msg_handler, consum if consumer is None: consumer = KafkaConsumer( - config.system_profile_topic, - group_id=config.consumer_group, - bootstrap_servers=config.bootstrap_servers) + config.system_profile_topic, group_id=config.consumer_group, bootstrap_servers=config.bootstrap_servers) def _f(): with flask_app.app_context(): @@ -90,7 +88,5 @@ def _f(): logger.exception("uncaught exception in handler, moving on.") metrics.system_profile_failure_count.inc() - t = Thread( - target=_f, - daemon=True) + t = Thread(target=_f, daemon=True) t.start() diff --git a/test_api.py b/test_api.py index feeafbb28..3160f3a39 100755 --- a/test_api.py +++ b/test_api.py @@ -90,31 +90,21 @@ def setUp(self): def get(self, path, status=200, return_response_as_json=True): return self._response_check( - self.client().get(path, headers=self._get_valid_auth_header()), - status, - return_response_as_json, + self.client().get(path, headers=self._get_valid_auth_header()), status, return_response_as_json ) def post(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().post, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().post, path, data, status, return_response_as_json) def patch(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().patch, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().patch, path, data, status, return_response_as_json) def put(self, path, data, status=200, return_response_as_json=True): - return self._make_http_call( - self.client().put, path, data, status, return_response_as_json - ) + return self._make_http_call(self.client().put, path, data, status, return_response_as_json) def delete(self, path, status=200, return_response_as_json=True): return self._response_check( - self.client().delete(path, headers=self._get_valid_auth_header()), - status, - return_response_as_json, + self.client().delete(path, headers=self._get_valid_auth_header()), status, return_response_as_json ) def verify_error_response(self, response, expected_title=None, @@ -131,16 +121,12 @@ def _verify_value(field_name, expected_value): _verify_value("detail", expected_detail) _verify_value("type", expected_type) - def _make_http_call( - self, http_method, path, data, status, return_response_as_json=True - ): + def _make_http_call(self, http_method, path, data, status, return_response_as_json=True): json_data = json.dumps(data) headers = self._get_valid_auth_header() headers["content-type"] = "application/json" return self._response_check( - http_method(path, data=json_data, headers=headers), - status, - return_response_as_json, + http_method(path, data=json_data, headers=headers), status, return_response_as_json ) def _response_check(self, response, status, return_response_as_json): @@ -186,36 +172,26 @@ def _build_host_id_list_for_url(self, host_list): return ",".join(host_id_list) def _verify_host_status(self, response, host_index, expected_status): - self.assertEqual(response["data"][host_index]["status"], - expected_status) + self.assertEqual(response["data"][host_index]["status"], expected_status) def _pluck_host_from_response(self, response, host_index): return response["data"][host_index]["host"] - def _validate_host(self, received_host, expected_host, - expected_id=id): + def _validate_host(self, received_host, expected_host, expected_id=id): self.assertIsNotNone(received_host["id"]) self.assertEqual(received_host["id"], expected_id) self.assertEqual(received_host["account"], expected_host.account) - self.assertEqual(received_host["insights_id"], - expected_host.insights_id) - self.assertEqual(received_host["rhel_machine_id"], - expected_host.rhel_machine_id) - self.assertEqual(received_host["subscription_manager_id"], - expected_host.subscription_manager_id) - self.assertEqual(received_host["satellite_id"], - expected_host.satellite_id) + self.assertEqual(received_host["insights_id"], expected_host.insights_id) + self.assertEqual(received_host["rhel_machine_id"], expected_host.rhel_machine_id) + self.assertEqual(received_host["subscription_manager_id"], expected_host.subscription_manager_id) + self.assertEqual(received_host["satellite_id"], expected_host.satellite_id) self.assertEqual(received_host["bios_uuid"], expected_host.bios_uuid) self.assertEqual(received_host["fqdn"], expected_host.fqdn) - self.assertEqual(received_host["mac_addresses"], - expected_host.mac_addresses) - self.assertEqual(received_host["ip_addresses"], - expected_host.ip_addresses) - self.assertEqual(received_host["display_name"], - expected_host.display_name) + self.assertEqual(received_host["mac_addresses"], expected_host.mac_addresses) + self.assertEqual(received_host["ip_addresses"], expected_host.ip_addresses) + self.assertEqual(received_host["display_name"], expected_host.display_name) self.assertEqual(received_host["facts"], expected_host.facts) - self.assertEqual(received_host["ansible_host"], - expected_host.ansible_host) + self.assertEqual(received_host["ansible_host"], expected_host.ansible_host) self.assertIsNotNone(received_host["created"]) self.assertIsNotNone(received_host["updated"]) @@ -275,9 +251,7 @@ def test_create_and_update(self): # sanity check # host_lookup_results["results"][0]["facts"][0]["facts"]["key2"] = "blah" # host_lookup_results["results"][0]["insights_id"] = "1.2.3.4" - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_update_with_same_insights_id_and_different_canonical_facts(self): original_insights_id = generate_uuid() @@ -305,15 +279,14 @@ def test_create_host_update_with_same_insights_id_and_different_canonical_facts( # Change the canonical facts except for the insights_id host_data.rhel_machine_id = generate_uuid() - host_data.ip_addresses = ["192.168.1.44", "10.0.0.2", ] + host_data.ip_addresses = ["192.168.1.44", "10.0.0.2"] host_data.subscription_manager_id = generate_uuid() host_data.satellite_id = generate_uuid() host_data.bios_uuid = generate_uuid() host_data.fqdn = "expected_fqdn" host_data.mac_addresses = ["ff:ee:dd:cc:bb:aa"] host_data.external_id = "fedcba" - host_data.facts = [{"namespace": "ns1", - "facts": {"newkey": "newvalue"}}] + host_data.facts = [{"namespace": "ns1", "facts": {"newkey": "newvalue"}}] # Update the host response = self.post(HOST_URL, [host_data.data()], 207) @@ -328,9 +301,7 @@ def test_create_host_update_with_same_insights_id_and_different_canonical_facts( # Retrieve the host using the id that we first received data = self.get(f"{HOST_URL}/{original_id}", 200) - self._validate_host(data["results"][0], - host_data, - expected_id=original_id) + self._validate_host(data["results"][0], host_data, expected_id=original_id) def test_create_host_with_empty_facts_display_name_then_update(self): # Create a host with empty facts, and display_name @@ -359,9 +330,7 @@ def test_create_host_with_empty_facts_display_name_then_update(self): host_lookup_results = self.get(f"{HOST_URL}/{original_id}", 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_and_update_multiple_hosts_with_account_mismatch(self): """ @@ -410,9 +379,7 @@ def test_create_host_without_canonical_facts(self): response_data = response_data["data"][0] - self.verify_error_response(response_data, - expected_title="Invalid request", - expected_status=400) + self.verify_error_response(response_data, expected_title="Invalid request", expected_status=400) def test_create_host_without_account(self): host_data = HostWrapper(test_data(facts=None)) @@ -432,8 +399,7 @@ def test_create_host_with_mismatched_account_numbers(self): response_data = response_data["data"][0] - self.verify_error_response(response_data, - expected_title="Invalid request") + self.verify_error_response(response_data, expected_title="Invalid request") def test_create_host_with_invalid_facts(self): facts_with_no_namespace = copy.deepcopy(FACTS) @@ -445,10 +411,7 @@ def test_create_host_with_invalid_facts(self): facts_with_empty_str_namespace = copy.deepcopy(FACTS) facts_with_empty_str_namespace[0]["namespace"] = "" - invalid_facts = [facts_with_no_namespace, - facts_with_no_facts, - facts_with_empty_str_namespace, - ] + invalid_facts = [facts_with_no_namespace, facts_with_no_facts, facts_with_empty_str_namespace] for invalid_fact in invalid_facts: with self.subTest(invalid_fact=invalid_fact): @@ -456,17 +419,10 @@ def test_create_host_with_invalid_facts(self): response_data = self.post(HOST_URL, [host_data.data()], 400) - self.verify_error_response(response_data, - expected_title="Bad Request") + self.verify_error_response(response_data, expected_title="Bad Request") def test_create_host_with_invalid_uuid_field_values(self): - uuid_field_names = ( - "insights_id", - "rhel_machine_id", - "subscription_manager_id", - "satellite_id", - "bios_uuid", - ) + uuid_field_names = ("insights_id", "rhel_machine_id", "subscription_manager_id", "satellite_id", "bios_uuid") for field_name in uuid_field_names: with self.subTest(uuid_field=field_name): @@ -480,8 +436,7 @@ def test_create_host_with_invalid_uuid_field_values(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_non_nullable_fields_as_None(self): non_nullable_field_names = ("display_name", @@ -513,13 +468,10 @@ def test_create_host_with_non_nullable_fields_as_None(self): response_data = self.post(HOST_URL, [invalid_host_dict], 400) - self.verify_error_response(response_data, - expected_title="Bad Request") + self.verify_error_response(response_data, expected_title="Bad Request") def test_create_host_with_valid_ip_address(self): - valid_ip_arrays = [["blah"], - ["1.1.1.1", "sigh"], - ] + valid_ip_arrays = [["blah"], ["1.1.1.1", "sigh"]] for ip_array in valid_ip_arrays: with self.subTest(ip_array=ip_array): @@ -534,10 +486,7 @@ def test_create_host_with_valid_ip_address(self): self.assertEqual(error_host["status"], 201) def test_create_host_with_invalid_ip_address(self): - invalid_ip_arrays = [[], - [""], - ["a"*256, ], - ] + invalid_ip_arrays = [[], [""], ["a" * 256]] for ip_array in invalid_ip_arrays: with self.subTest(ip_array=ip_array): @@ -551,13 +500,10 @@ def test_create_host_with_invalid_ip_address(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_valid_mac_address(self): - valid_mac_arrays = [["blah"], - ["11:22:33:44:55:66", "blah"], - ] + valid_mac_arrays = [["blah"], ["11:22:33:44:55:66", "blah"]] for mac_array in valid_mac_arrays: with self.subTest(mac_array=mac_array): @@ -572,10 +518,7 @@ def test_create_host_with_valid_mac_address(self): self.assertEqual(error_host["status"], 201) def test_create_host_with_invalid_mac_address(self): - invalid_mac_arrays = [[], - [""], - ["11:22:33:44:55:66", "a"*256], - ] + invalid_mac_arrays = [[], [""], ["11:22:33:44:55:66", "a" * 256]] for mac_array in invalid_mac_arrays: with self.subTest(mac_array=mac_array): @@ -589,13 +532,12 @@ def test_create_host_with_invalid_mac_address(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_display_name(self): host_data = HostWrapper(test_data(facts=None)) - invalid_display_names = ["", "a"*201] + invalid_display_names = ["", "a" * 201] for display_name in invalid_display_names: with self.subTest(display_name=display_name): @@ -607,13 +549,12 @@ def test_create_host_with_invalid_display_name(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_fqdn(self): host_data = HostWrapper(test_data(facts=None)) - invalid_fqdns = ["", "a"*256] + invalid_fqdns = ["", "a" * 256] for fqdn in invalid_fqdns: with self.subTest(fqdn=fqdn): @@ -625,13 +566,12 @@ def test_create_host_with_invalid_fqdn(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_invalid_external_id(self): host_data = HostWrapper(test_data(facts=None)) - invalid_external_ids = ["", "a"*501] + invalid_external_ids = ["", "a" * 501] for external_id in invalid_external_ids: with self.subTest(external_id=external_id): @@ -643,8 +583,7 @@ def test_create_host_with_invalid_external_id(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") def test_create_host_with_ansible_host(self): # Create a host with ansible_host field @@ -662,9 +601,7 @@ def test_create_host_with_ansible_host(self): host_lookup_results = self.get(f"{HOST_URL}/{original_id}", 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_without_ansible_host_then_update(self): # Create a host without ansible_host field @@ -681,9 +618,7 @@ def test_create_host_without_ansible_host_then_update(self): original_id = created_host["id"] - ansible_hosts = ["ima_ansible_host_"+generate_uuid(), - "", - ] + ansible_hosts = ["ima_ansible_host_" + generate_uuid(), ""] # Update the ansible_host for ansible_host in ansible_hosts: @@ -696,14 +631,12 @@ def test_create_host_without_ansible_host_then_update(self): host_lookup_results = self.get(f"{HOST_URL}/{original_id}", 200) - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_with_invalid_ansible_host(self): host_data = HostWrapper(test_data(facts=None)) - invalid_ansible_host = ["a"*256] + invalid_ansible_host = ["a" * 256] for ansible_host in invalid_ansible_host: with self.subTest(ansible_host=ansible_host): @@ -715,8 +648,7 @@ def test_create_host_with_invalid_ansible_host(self): self.assertEqual(error_host["status"], 400) - self.verify_error_response(error_host, - expected_title="Bad Request") + self.verify_error_response(error_host, expected_title="Bad Request") class ResolveDisplayNameOnCreationTestCase(DBAPITestCase): @@ -743,9 +675,7 @@ def test_create_host_without_display_name_and_without_fqdn(self): # Explicitly set the display_name to the be id...this is expected here host_data.display_name = created_host["id"] - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) def test_create_host_without_display_name_and_with_fqdn(self): """ @@ -772,9 +702,7 @@ def test_create_host_without_display_name_and_with_fqdn(self): # Explicitly set the display_name ...this is expected here host_data.display_name = expected_display_name - self._validate_host(host_lookup_results["results"][0], - host_data, - expected_id=original_id) + self._validate_host(host_lookup_results["results"][0], host_data, expected_id=original_id) class BulkCreateHostsTestCase(DBAPITestCase): @@ -827,9 +755,7 @@ def test_create_and_update_multiple_hosts_with_different_accounts(self): expected_host = HostWrapper(host_list[i]) - self._validate_host(host["host"], - expected_host, - expected_id=expected_host.id) + self._validate_host(host["host"], expected_host, expected_id=expected_host.id) i += 1 @@ -962,10 +888,8 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self system_profiles = [({}, {})] # Only set the enabled_services to start out with - enabled_services_only_system_profile = {"enabled_services": - ["firewalld"]} - system_profiles.append((enabled_services_only_system_profile, - enabled_services_only_system_profile)) + enabled_services_only_system_profile = {"enabled_services": ["firewalld"]} + system_profiles.append((enabled_services_only_system_profile, enabled_services_only_system_profile)) # Set the entire system profile...overwriting the enabled_service # set from before @@ -973,22 +897,15 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self system_profiles.append((full_system_profile, full_system_profile)) # Change the enabled_services - full_system_profile = {**full_system_profile, - **enabled_services_only_system_profile} - system_profiles.append((enabled_services_only_system_profile, - full_system_profile)) + full_system_profile = {**full_system_profile, **enabled_services_only_system_profile} + system_profiles.append((enabled_services_only_system_profile, full_system_profile)) # Make sure an empty system profile doesn't overwrite the data - system_profiles.append(({}, - full_system_profile)) + system_profiles.append(({}, full_system_profile)) for i, (system_profile, expected_system_profile) in enumerate(system_profiles): with self.subTest(system_profile=i): - mq_message = { - "id": original_id, - "request_id": None, - "system_profile": system_profile - } + mq_message = {"id": original_id, "request_id": None, "system_profile": system_profile} with self.app.app_context(): msg_handler(mq_message) @@ -997,8 +914,7 @@ def test_create_host_without_system_profile_then_update_with_system_profile(self self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - expected_system_profile) + self.assertEqual(actual_host["system_profile"], expected_system_profile) def test_create_host_with_null_system_profile(self): facts = None @@ -1011,9 +927,7 @@ def test_create_host_with_null_system_profile(self): # Create the host without a system profile response = self.post(HOST_URL, [host], 400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) def test_create_host_with_system_profile_with_invalid_data(self): facts = None @@ -1073,8 +987,7 @@ def test_create_host_with_system_profile_with_different_yum_urls(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - host["system_profile"]) + self.assertEqual(actual_host["system_profile"], host["system_profile"]) def test_create_host_with_system_profile_with_different_cloud_providers(self): facts = None @@ -1102,8 +1015,7 @@ def test_create_host_with_system_profile_with_different_cloud_providers(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - host["system_profile"]) + self.assertEqual(actual_host["system_profile"], host["system_profile"]) def test_get_system_profile_of_host_that_does_not_have_system_profile(self): facts = None @@ -1127,8 +1039,7 @@ def test_get_system_profile_of_host_that_does_not_have_system_profile(self): self.assertEqual(original_id, actual_host["id"]) - self.assertEqual(actual_host["system_profile"], - expected_system_profile) + self.assertEqual(actual_host["system_profile"], expected_system_profile) def test_get_system_profile_of_multiple_hosts(self): facts = None @@ -1149,18 +1060,13 @@ def test_get_system_profile_of_multiple_hosts(self): original_id = created_host["id"] host_id_list.append(original_id) - expected_system_profiles.append({ - "id": original_id, - "system_profile": host["system_profile"] - }) + expected_system_profiles.append({"id": original_id, "system_profile": host["system_profile"]}) url_host_id_list = ",".join(host_id_list) test_url = f"{HOST_URL}/{url_host_id_list}/system_profile" host_lookup_results = self.get(test_url, 200) - self.assertEqual( - len(expected_system_profiles), len(host_lookup_results["results"]) - ) + self.assertEqual(len(expected_system_profiles), len(host_lookup_results["results"])) for expected_system_profile in expected_system_profiles: self.assertIn(expected_system_profile, host_lookup_results["results"]) @@ -1179,9 +1085,7 @@ def test_get_system_profile_with_invalid_host_id(self): for host_id in invalid_host_ids: with self.subTest(invalid_host_id=host_id): response = self.get(f"{HOST_URL}/{host_id}/system_profile", 400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) class PreCreatedHostsBaseTestCase(DBAPITestCase, PaginationTestCase): @@ -1224,9 +1128,7 @@ def test_update_fields(self): for patch_doc in patch_docs: with self.subTest(valid_patch_doc=patch_doc): - response_data = self.patch(f"{HOST_URL}/{original_id}", - patch_doc, - 200) + response_data = self.patch(f"{HOST_URL}/{original_id}", patch_doc, 200) response_data = self.get(f"{HOST_URL}/{original_id}", 200) @@ -1247,8 +1149,7 @@ def test_patch_with_branch_id_parameter(self): self.patch(test_url, patch_doc, 200) def test_update_fields_on_multiple_hosts(self): - patch_doc = {"display_name": "fred_flintstone", - "ansible_host": "barney_rubble"} + patch_doc = {"display_name": "fred_flintstone", "ansible_host": "barney_rubble"} url_host_id_list = self._build_host_id_list_for_url(self.added_hosts) @@ -1289,13 +1190,9 @@ def test_invalid_data(self): for patch_doc in invalid_data_list: with self.subTest(invalid_patch_doc=patch_doc): - response = self.patch(f"{HOST_URL}/{original_id}", - patch_doc, - status=400) + response = self.patch(f"{HOST_URL}/{original_id}", patch_doc, status=400) - self.verify_error_response(response, - expected_title="Bad Request", - expected_status=400) + self.verify_error_response(response, expected_title="Bad Request", expected_status=400) def test_invalid_host_id(self): patch_doc = {"display_name": "branch_id_test"} @@ -1433,11 +1330,7 @@ def _base_query_test(self, host_id_list, expected_host_list): self._base_paging_test(url, len(expected_host_list)) def test_query_existent_hosts(self): - host_lists = [ - self.added_hosts[0:1], - self.added_hosts[1:3], - self.added_hosts, - ] + host_lists = [self.added_hosts[0:1], self.added_hosts[1:3], self.added_hosts] for host_list in host_lists: with self.subTest(host_list=host_list): host_id_list = self._build_host_id_list_for_url(host_list) @@ -1494,9 +1387,7 @@ def test_query_invalid_paging_parameters(self): invalid_values = ["-1", "0", "notanumber"] for paging_parameter in paging_parameters: for invalid_value in invalid_values: - with self.subTest( - paging_parameter=paging_parameter, invalid_value=invalid_value - ): + with self.subTest(paging_parameter=paging_parameter, invalid_value=invalid_value): self.get(f"{base_url}?{paging_parameter}={invalid_value}", 400) @@ -1570,11 +1461,7 @@ class QueryOrderTestCase(PreCreatedHostsBaseTestCase): def _queries_subtests_with_added_hosts(self): host_id_list = [host.id for host in self.added_hosts] url_host_id_list = ",".join(host_id_list) - urls = ( - HOST_URL, - f"{HOST_URL}/{url_host_id_list}", - f"{HOST_URL}/{url_host_id_list}/system_profile", - ) + urls = (HOST_URL, f"{HOST_URL}/{url_host_id_list}", f"{HOST_URL}/{url_host_id_list}/system_profile") for url in urls: with self.subTest(url=url): yield url @@ -1765,9 +1652,7 @@ def test_replace_and_add_facts_to_multiple_hosts_including_nonexistent_host(self url_host_id_list = self._build_host_id_list_for_url(host_list) # Add a couple of host ids that should not exist in the database - url_host_id_list = ( - url_host_id_list + "," + generate_uuid() + "," + generate_uuid() - ) + url_host_id_list = url_host_id_list + "," + generate_uuid() + "," + generate_uuid() patch_url = HOST_URL + "/" + url_host_id_list + "/facts/" + target_namespace diff --git a/test_db_model.py b/test_db_model.py index 9749721a5..ab16ddb15 100644 --- a/test_db_model.py +++ b/test_db_model.py @@ -23,8 +23,7 @@ def _create_host(insights_id=None, fqdn=None, display_name=None): def test_create_host_with_canonical_facts_as_None(flask_app_fixture): # Test to make sure canonical facts that are None or '' do # not get inserted into the db - invalid_canonical_facts = {"fqdn": None, - "insights_id": '', } + invalid_canonical_facts = {"fqdn": None, "insights_id": ""} valid_canonical_facts = {"bios_uuid": "1234"} host_dict = {**invalid_canonical_facts, **valid_canonical_facts} @@ -50,9 +49,7 @@ def test_create_host_with_display_name_and_fqdn_as_empty_str(flask_app_fixture): def test_update_existing_host_fix_display_name_using_existing_fqdn(flask_app_fixture): expected_fqdn = "host1.domain1.com" insights_id = str(uuid.uuid4()) - existing_host = _create_host(insights_id=insights_id, - fqdn=expected_fqdn, - display_name=None) + existing_host = _create_host(insights_id=insights_id, fqdn=expected_fqdn, display_name=None) # Clear the display_name existing_host.display_name = None @@ -94,9 +91,7 @@ def test_update_existing_host_fix_display_name_using_id(flask_app_fixture): assert existing_host.display_name is None # Update the host - input_host = Host( - {"insights_id": existing_host.canonical_facts["insights_id"]}, display_name="" - ) + input_host = Host({"insights_id": existing_host.canonical_facts["insights_id"]}, display_name="") existing_host.update(input_host) assert existing_host.display_name == existing_host.id @@ -105,8 +100,7 @@ def test_update_existing_host_fix_display_name_using_id(flask_app_fixture): def test_create_host_without_system_profile(flask_app_fixture): # Test the situation where the db/sqlalchemy sets the # system_profile_facts to None - created_host = _create_host(fqdn="fred.flintstone.com", - display_name="fred") + created_host = _create_host(fqdn="fred.flintstone.com", display_name="fred") assert created_host.system_profile_facts == {} diff --git a/test_host_dedup_logic.py b/test_host_dedup_logic.py index 8de09e83c..d8e3d137e 100644 --- a/test_host_dedup_logic.py +++ b/test_host_dedup_logic.py @@ -32,10 +32,7 @@ def basic_host_dedup_test(initial_canonical_facts, search_canonical_facts): def test_find_host_using_subset_canonical_fact_match(flask_app_fixture): fqdn = "fred.flintstone.com" - canonical_facts = {"fqdn": fqdn, - "bios_uuid": generate_uuid(), - "rhel_machine_id": generate_uuid(), - } + canonical_facts = {"fqdn": fqdn, "bios_uuid": generate_uuid(), "rhel_machine_id": generate_uuid()} # Create the subset of canonical facts to search by subset_canonical_facts = {"fqdn": fqdn} @@ -44,8 +41,7 @@ def test_find_host_using_subset_canonical_fact_match(flask_app_fixture): def test_find_host_using_superset_canonical_fact_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid()} + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid()} # Create the superset of canonical facts to search by superset_canonical_facts = canonical_facts.copy() @@ -56,10 +52,7 @@ def test_find_host_using_superset_canonical_fact_match(flask_app_fixture): def test_find_host_using_insights_id_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid(), - "insights_id": generate_uuid(), - } + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid(), "insights_id": generate_uuid()} # Change the canonical facts except the insights_id...match on insights_id search_canonical_facts = {"fqdn": "barney", @@ -71,10 +64,7 @@ def test_find_host_using_insights_id_match(flask_app_fixture): def test_find_host_using_subscription_manager_id_match(flask_app_fixture): - canonical_facts = {"fqdn": "fred", - "bios_uuid": generate_uuid(), - "subscription_manager_id": generate_uuid(), - } + canonical_facts = {"fqdn": "fred", "bios_uuid": generate_uuid(), "subscription_manager_id": generate_uuid()} # Change the bios_uuid so that falling back to subset match will fail search_canonical_facts = { @@ -86,13 +76,8 @@ def test_find_host_using_subscription_manager_id_match(flask_app_fixture): @mark.parametrize(("host_create_order", "expected_host"), (((0, 1), 1), ((1, 0), 0))) -def test_find_host_using_elevated_ids_match( - flask_app_fixture, host_create_order, expected_host -): - hosts_canonical_facts = ( - {"subscription_manager_id": generate_uuid()}, - {"insights_id": generate_uuid()}, - ) +def test_find_host_using_elevated_ids_match(flask_app_fixture, host_create_order, expected_host): + hosts_canonical_facts = ({"subscription_manager_id": generate_uuid()}, {"insights_id": generate_uuid()}) created_hosts = [] for host_canonical_facts in host_create_order: @@ -100,9 +85,7 @@ def test_find_host_using_elevated_ids_match( created_hosts.append(created_host) search_canonical_facts = { - key: value - for host_canonical_facts in hosts_canonical_facts - for key, value in host_canonical_facts.items() + key: value for host_canonical_facts in hosts_canonical_facts for key, value in host_canonical_facts.items() } found_host = find_existing_host(ACCOUNT_NUMBER, search_canonical_facts) diff --git a/test_validators.py b/test_validators.py index 459f467de..66e1d7c73 100644 --- a/test_validators.py +++ b/test_validators.py @@ -5,9 +5,7 @@ from app.validators import verify_uuid_format -@pytest.mark.parametrize("uuid", ["4a8fb994-57fe-4dbb-ad2a-9e922560b6c1", - "4a8fb99457fe4dbbad2a9e922560b6c1", - ]) +@pytest.mark.parametrize("uuid", ["4a8fb994-57fe-4dbb-ad2a-9e922560b6c1", "4a8fb99457fe4dbbad2a9e922560b6c1"]) def test_valid_uuid(uuid): assert verify_uuid_format(uuid) is True