Skip to content

Commit

Permalink
Fix E501 rule
Browse files Browse the repository at this point in the history
Fixed the E501 Flake8 rule targetting too long lines. In most places,
the code has been only reformatted. Sometimes a variable name has been
shortened or another code-aware change has been made.
  • Loading branch information
Glutexo committed Jul 11, 2019
1 parent c91e5e0 commit 5c4ab70
Show file tree
Hide file tree
Showing 13 changed files with 244 additions and 135 deletions.
17 changes: 14 additions & 3 deletions api/host.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,9 @@ def patch_by_id(host_id_list, host_data):
hosts_to_update = query.all()

if not hosts_to_update:
logger.debug("Failed to find hosts during patch operation - hosts: %s" % host_id_list)
logger.debug(
"Failed to find hosts during patch operation - hosts: %s" % host_id_list
)
return flask.abort(status.HTTP_404_NOT_FOUND)

for host in hosts_to_update:
Expand All @@ -418,7 +420,10 @@ def replace_facts(host_id_list, namespace, fact_dict):
@metrics.api_request_time.time()
def merge_facts(host_id_list, namespace, fact_dict):
if not fact_dict:
error_msg = "ERROR: Invalid request. Merging empty facts into existing facts is a no-op."
error_msg = (
"ERROR: Invalid request. "
"Merging empty facts into existing facts is a no-op."
)
logger.debug(error_msg)
return error_msg, 400

Expand All @@ -436,7 +441,13 @@ def update_facts_by_namespace(operation, host_id_list, namespace, fact_dict):
logger.debug("hosts_to_update:%s" % hosts_to_update)

if len(hosts_to_update) != len(host_id_list):
error_msg = "ERROR: The number of hosts requested does not match the " "number of hosts found in the host database. This could " " happen if the namespace " "does not exist or the account number associated with the " "call does not match the account number associated with " "one or more the hosts. Rejecting the fact change request."
error_msg = (
"ERROR: The number of hosts requested does not match the number of hosts "
"found in the host database. This could happen if the namespace does not "
"exist or the account number associated with the call does not match the "
"account number associated with one or more the hosts. Rejecting the fact "
"change request."
)
logger.debug(error_msg)
return error_msg, 400

Expand Down
47 changes: 31 additions & 16 deletions api/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,21 @@

api_request_time = Summary("inventory_request_processing_seconds",
"Time spent processing request")
host_dedup_processing_time = Summary("inventory_dedup_processing_seconds",
"Time spent looking for existing host (dedup logic)")
find_host_using_elevated_ids = Summary("inventory_find_host_using_elevated_ids_processing_seconds",
"Time spent looking for existing host using the elevated ids")
new_host_commit_processing_time = Summary("inventory_new_host_commit_seconds",
"Time spent committing a new host to the database")
update_host_commit_processing_time = Summary("inventory_update_host_commit_seconds",
"Time spent committing a update host to the database")
host_dedup_processing_time = Summary(
"inventory_dedup_processing_seconds",
"Time spent looking for existing host (dedup logic)")
find_host_using_elevated_ids = Summary(
"inventory_find_host_using_elevated_ids_processing_seconds",
"Time spent looking for existing host using the elevated ids"
)
new_host_commit_processing_time = Summary(
"inventory_new_host_commit_seconds",
"Time spent committing a new host to the database"
)
update_host_commit_processing_time = Summary(
"inventory_update_host_commit_seconds",
"Time spent committing a update host to the database"
)
api_request_count = Counter("inventory_request_count",
"The total amount of API requests")
create_host_count = Counter("inventory_create_host_count",
Expand All @@ -22,11 +29,19 @@
"Time spent deleting hosts from the database")
login_failure_count = Counter("inventory_login_failure_count",
"The total amount of failed login attempts")
system_profile_deserialization_time = Summary("inventory_system_profile_deserialization_time",
"Time spent deserializing system profile documents")
system_profile_commit_processing_time = Summary("inventory_system_profile_commit_processing_time",
"Time spent committing an update to a system profile to the database")
system_profile_commit_count = Counter("inventory_system_profile_commit_count",
"Count of successful system profile commits to the database")
system_profile_failure_count = Counter("inventory_system_profile_failure_count",
"Count of failures to commit the system profile to the database")
system_profile_deserialization_time = Summary(
"inventory_system_profile_deserialization_time",
"Time spent deserializing system profile documents"
)
system_profile_commit_processing_time = Summary(
"inventory_system_profile_commit_processing_time",
"Time spent committing an update to a system profile to the database"
)
system_profile_commit_count = Counter(
"inventory_system_profile_commit_count",
"Count of successful system profile commits to the database"
)
system_profile_failure_count = Counter(
"inventory_system_profile_failure_count",
"Count of failures to commit the system profile to the database"
)
52 changes: 31 additions & 21 deletions app/config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import os
from os import getenv

from app.common import get_build_version
from app.logging import get_logger
Expand All @@ -10,34 +10,36 @@ class Config:
def __init__(self):
self.logger = get_logger(__name__)

self._db_user = os.getenv("INVENTORY_DB_USER", "insights")
self._db_password = os.getenv("INVENTORY_DB_PASS", "insights")
self._db_host = os.getenv("INVENTORY_DB_HOST", "localhost")
self._db_name = os.getenv("INVENTORY_DB_NAME", "insights")
self._db_ssl_mode = os.getenv("INVENTORY_DB_SSL_MODE", "")
self._db_ssl_cert = os.getenv("INVENTORY_DB_SSL_CERT", "")
self._db_user = getenv("INVENTORY_DB_USER", "insights")
self._db_password = getenv("INVENTORY_DB_PASS", "insights")
self._db_host = getenv("INVENTORY_DB_HOST", "localhost")
self._db_name = getenv("INVENTORY_DB_NAME", "insights")
self._db_ssl_mode = getenv("INVENTORY_DB_SSL_MODE", "")
self._db_ssl_cert = getenv("INVENTORY_DB_SSL_CERT", "")

self.db_pool_timeout = int(os.getenv("INVENTORY_DB_POOL_TIMEOUT", "5"))
self.db_pool_size = int(os.getenv("INVENTORY_DB_POOL_SIZE", "5"))
self.db_pool_timeout = int(getenv("INVENTORY_DB_POOL_TIMEOUT", "5"))
self.db_pool_size = int(getenv("INVENTORY_DB_POOL_SIZE", "5"))

self.db_uri = self._build_db_uri(self._db_ssl_mode)

self.base_url_path = self._build_base_url_path()
self.api_url_path_prefix = self._build_api_path()
self.legacy_api_url_path_prefix = os.getenv("INVENTORY_LEGACY_API_URL", "")
self.mgmt_url_path_prefix = os.getenv("INVENTORY_MANAGEMENT_URL_PATH_PREFIX", "/")
self.legacy_api_url_path_prefix = getenv("INVENTORY_LEGACY_API_URL", "")
self.mgmt_url_path_prefix = getenv("INVENTORY_MANAGEMENT_URL_PATH_PREFIX", "/")

self.api_urls = [self.api_url_path_prefix, self.legacy_api_url_path_prefix]

self.system_profile_topic = os.environ.get("KAFKA_TOPIC", "platform.system-profile")
self.consumer_group = os.environ.get("KAFKA_GROUP", "inventory")
self.bootstrap_servers = os.environ.get("KAFKA_BOOTSTRAP_SERVERS", "kafka:29092")
self.event_topic = os.environ.get("KAFKA_EVENT_TOPIC", "platform.inventory.events")
self.kafka_enabled = all(map(os.environ.get, ["KAFKA_TOPIC", "KAFKA_GROUP", "KAFKA_BOOTSTRAP_SERVERS"]))
self.system_profile_topic = getenv("KAFKA_TOPIC", "platform.system-profile")
self.consumer_group = getenv("KAFKA_GROUP", "inventory")
self.bootstrap_servers = getenv("KAFKA_BOOTSTRAP_SERVERS", "kafka:29092")
self.event_topic = getenv("KAFKA_EVENT_TOPIC", "platform.inventory.events")
self.kafka_enabled = all(
map(getenv, ["KAFKA_TOPIC", "KAFKA_GROUP", "KAFKA_BOOTSTRAP_SERVERS"])
)

def _build_base_url_path(self):
app_name = os.getenv("APP_NAME", "inventory")
path_prefix = os.getenv("PATH_PREFIX", "api")
app_name = getenv("APP_NAME", "inventory")
path_prefix = getenv("PATH_PREFIX", "api")
base_url_path = f"/{path_prefix}/{app_name}"
return base_url_path

Expand Down Expand Up @@ -65,11 +67,19 @@ def log_configuration(self, config_name):
self.logger.info("Insights Host Inventory Configuration:")
self.logger.info("Build Version: %s" % get_build_version())
self.logger.info("API URL Path: %s" % self.api_url_path_prefix)
self.logger.info("Management URL Path Prefix: %s" % self.mgmt_url_path_prefix)
self.logger.info(
"Management URL Path Prefix: %s" % self.mgmt_url_path_prefix
)
self.logger.info("DB Host: %s" % self._db_host)
self.logger.info("DB Name: %s" % self._db_name)
self.logger.info("DB Connection URI: %s" % self._build_db_uri(self._db_ssl_mode, hide_password=True))
self.logger.info(
"DB Connection URI: %s" % self._build_db_uri(
self._db_ssl_mode, hide_password=True
)
)
if self._db_ssl_mode == self.SSL_VERIFY_FULL:
self.logger.info("Using SSL for DB connection:")
self.logger.info("Postgresql SSL verification type: %s" % self._db_ssl_mode)
self.logger.info(
"Postgresql SSL verification type: %s" % self._db_ssl_mode
)
self.logger.info("Path to certificate: %s" % self._db_ssl_cert)
9 changes: 6 additions & 3 deletions app/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from boto3.session import Session
from gunicorn import glogging

OPENSHIFT_ENVIRONMENT_NAME_FILE = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
OPENSHIFT_ENV_NAME_FILE = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"
DEFAULT_AWS_LOGGING_NAMESPACE = "inventory-dev"
LOGGER_PREFIX = "inventory."

Expand Down Expand Up @@ -42,11 +42,14 @@ def _configure_watchtower_logging_handler():
aws_secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY", None)
aws_region_name = os.getenv("AWS_REGION_NAME", None)
log_group = os.getenv("AWS_LOG_GROUP", "platform")
stream_name = _get_aws_logging_stream_name(OPENSHIFT_ENVIRONMENT_NAME_FILE)
stream_name = _get_aws_logging_stream_name(OPENSHIFT_ENV_NAME_FILE)

if all([aws_access_key_id, aws_secret_access_key,
aws_region_name, stream_name]):
print(f"Configuring watchtower logging (log_group={log_group}, stream_name={stream_name})")
print(
"Configuring watchtower logging "
f"(log_group={log_group}, stream_name={stream_name})"
)
boto3_session = Session(aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=aws_region_name)
Expand Down
15 changes: 9 additions & 6 deletions app/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,15 @@ class Host(db.Model):
__tablename__ = "hosts"
# These Index entries are essentially place holders so that the
# alembic autogenerate functionality does not try to remove the indexes
__table_args__ = (Index("idxinsightsid", text("(canonical_facts ->> 'insights_id')")),
Index("idxgincanonicalfacts", "canonical_facts"),
Index("idxaccount", "account"),
Index("hosts_subscription_manager_id_index",
text("(canonical_facts ->> 'subscription_manager_id')")),
)
__table_args__ = (
Index("idxinsightsid", text("(canonical_facts ->> 'insights_id')")),
Index("idxgincanonicalfacts", "canonical_facts"),
Index("idxaccount", "account"),
Index(
"hosts_subscription_manager_id_index",
text("(canonical_facts ->> 'subscription_manager_id')")
),
)

id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
account = db.Column(db.String(10))
Expand Down
11 changes: 6 additions & 5 deletions host_dumper.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@

application = create_app("cli")

parser = argparse.ArgumentParser(description="Util that dumps a host from the hosts table."
" The db configuration is read from the environment. This util is expected to be"
" used within the image/pod")
parser = argparse.ArgumentParser(
description="Util that dumps a host from the hosts table. The db configuration is "
"read from the environment. This util is expected to be used within the image/pod"
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--id",
help="search for a host using id")
Expand Down Expand Up @@ -44,8 +45,8 @@
elif args.insights_id:
print("looking up host using insights_id")
query_results = Host.query.filter(
Host.canonical_facts.comparator.contains({'insights_id':args.insights_id})
).all()
Host.canonical_facts.comparator.contains({"insights_id":args.insights_id})
).all()
elif args.account_number:
query_results = Host.query.filter(
Host.account == args.account_number
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,14 @@

def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hosts', sa.Column('system_profile_facts', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
op.add_column(
'hosts',
sa.Column(
'system_profile_facts',
postgresql.JSONB(astext_type=sa.Text()),
nullable=True
)
)
# ### end Alembic commands ###


Expand Down
4 changes: 3 additions & 1 deletion migrations/versions/a9f0e674cf52_add_ansible_host_column.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@

def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hosts', sa.Column('ansible_host', sa.String(length=255), nullable=True))
op.add_column(
'hosts', sa.Column('ansible_host', sa.String(length=255), nullable=True)
)
# ### end Alembic commands ###


Expand Down
10 changes: 9 additions & 1 deletion run_gunicorn.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,15 @@ def run_server():
variables.
"""
bind = f"0.0.0.0:{LISTEN_PORT}"
run(("gunicorn", "--log-config=logconfig.ini", "--log-level=debug", f"--bind={bind}", "run"))
run(
(
"gunicorn",
"--log-config=logconfig.ini",
"--log-level=debug",
f"--bind={bind}",
"run",
)
)


if __name__ == "__main__":
Expand Down
8 changes: 6 additions & 2 deletions tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,17 @@ def msg_handler(parsed):
if host is None:
logger.error("Host with id [%s] not found!", id_)
return
logger.info("Processing message id=%s request_id=%s", parsed["id"], parsed["request_id"])
logger.info(
"Processing message id=%s request_id=%s", parsed["id"], parsed["request_id"]
)
profile = SystemProfileSchema(strict=True).load(parsed["system_profile"]).data
host._update_system_profile(profile)
db.session.commit()


def _init_system_profile_consumer(config, flask_app, handler=msg_handler, consumer=None):
def _init_system_profile_consumer(
config, flask_app, handler=msg_handler, consumer=None
):

if not config.kafka_enabled:
logger.info("System profile consumer has been disabled")
Expand Down
Loading

0 comments on commit 5c4ab70

Please sign in to comment.