From 864120acb4c7c227bb526ae64be6edd571e435a2 Mon Sep 17 00:00:00 2001 From: Xiangce Liu Date: Wed, 25 Dec 2024 14:23:02 +0800 Subject: [PATCH] feat: enhance add_filter to specify the max matched lines - added a new keyword argument "max_match=MAX_MATCH" to `add_filter`. By declaring the max_match, at most `max_match` number of lines that contain the patterns will be filtered out in the collection. For redundant declarations the maximum `max_match` will be kept as the final scanning number. If no `max_match` is declared when `add_filter`, the filters.MAX_MATCH (=10000) will be take as the default value. - added a new keyword argument "with_matches=False" to the `get_filters`. When "with_matches=True" is specified, the return value of the `get_filters` will be dict in which the max scanning numbers for each filter pattern are included as the dict value. - update the exiting tests - RHINENG-14669 Signed-off-by: Xiangce Liu --- insights/core/filters.py | 70 ++++++++------ insights/core/spec_cleaner.py | 6 +- insights/core/spec_factory.py | 15 +-- insights/tests/__init__.py | 73 ++++++++------ insights/tests/core/test_filters.py | 28 +++++- .../container/test_containers_inspect.py | 95 ++++++++++++++++--- insights/tests/datasources/test_awx_manage.py | 24 +++-- insights/tests/datasources/test_cloud_init.py | 2 +- insights/tests/datasources/test_dir_list.py | 2 +- .../datasources/test_kernel_module_list.py | 2 +- insights/tests/datasources/test_ls.py | 2 +- insights/tests/datasources/test_lsattr.py | 6 +- .../tests/datasources/test_machine_ids.py | 94 ++++++++++-------- .../datasources/test_package_provides.py | 74 +++++++++++---- insights/tests/datasources/test_rpm_pkgs.py | 29 +++++- insights/tests/datasources/test_semanage.py | 14 +-- insights/tests/datasources/test_user_group.py | 2 +- .../tests/parsers/test_dse_ldif_simple.py | 11 ++- insights/tests/parsers/test_ls_usr_bin.py | 16 +++- insights/tests/parsers/test_ls_usr_sbin.py | 17 +++- insights/tests/parsers/test_messages.py | 21 ++-- insights/tests/parsers/test_rhsm_log.py | 21 ++-- insights/tests/specs/test_specs.py | 2 +- insights/tests/specs/test_specs_filters.py | 36 +++++-- insights/tests/specs/test_specs_save_as.py | 2 +- insights/tests/test_context_wrap.py | 2 +- .../tests/tools/test_apply_spec_filters.py | 2 +- 27 files changed, 448 insertions(+), 220 deletions(-) diff --git a/insights/core/filters.py b/insights/core/filters.py index b4a7ff636e..79a32d8e77 100644 --- a/insights/core/filters.py +++ b/insights/core/filters.py @@ -48,16 +48,12 @@ from insights.util import parse_bool _CACHE = {} -FILTERS = defaultdict(set) +FILTERS = defaultdict(dict) ENABLED = parse_bool(os.environ.get("INSIGHTS_FILTERS_ENABLED"), default=True) -MATCH_COUNT = 10000 +MAX_MATCH = 10000 -# TODO: -# - support specifying the max match number of filtered lines -# add_filter(Messages, "Such an Error", 10) -# def add_filter(component, patterns, match_count=MATCH_COUNT): -def add_filter(component, patterns): +def add_filter(component, patterns, max_match=MAX_MATCH): """ Add a filter or list of filters to a component. When the component is a datasource, the filter will be directly added to that datasouce. @@ -71,8 +67,26 @@ def add_filter(component, patterns): parser or combiner. patterns (str, [str]): A string, list of strings, or set of strings to add to the datasource's filters. + max_match (int): A int, the maximum matched lines to filter out. + MAX_MATCH by default. """ + def get_dependency_datasources(comp): + """Get (all) the first depended datasource""" + dep_ds = set() + if plugins.is_datasource(comp): + dep_ds.add(comp) + return dep_ds + for dep in dr.get_dependencies(comp): + dep_ds.update(get_dependency_datasources(dep)) + return dep_ds + + def none_max(a, b): + return a if b is None else b if a is None else max(a, b) + + def max_matchs(da, db): + return dict((k, none_max(da.get(k), db.get(k))) for k in set(da.keys()).union(db.keys())) + def inner(comp, patterns): if comp in _CACHE: del _CACHE[comp] @@ -82,25 +96,16 @@ def inner(comp, patterns): raise TypeError("Filter patterns must be of type string, list, or set.") if isinstance(patterns, six.string_types): - patterns = set([patterns]) + patterns = {patterns: max_match} elif isinstance(patterns, list): - patterns = set(patterns) + patterns = dict((pt, max_match) for pt in patterns) + # here patterns is a dict for pat in patterns: if not pat: raise Exception("Filter patterns must not be empty.") - FILTERS[comp] |= patterns - - def get_dependency_datasources(comp): - """Get (all) the first depended datasource""" - dep_ds = set() - if plugins.is_datasource(comp): - dep_ds.add(comp) - return dep_ds - for dep in dr.get_dependencies(comp): - dep_ds.update(get_dependency_datasources(dep)) - return dep_ds + FILTERS[comp].update(max_matchs(FILTERS[comp], patterns)) if not plugins.is_datasource(component): deps = get_dependency_datasources(component) @@ -127,7 +132,7 @@ def get_dependency_datasources(comp): _add_filter = add_filter -def get_filters(component): +def get_filters(component, with_matches=False): """ Get the set of filters for the given datasource. @@ -143,13 +148,19 @@ def get_filters(component): Args: component (a datasource): The target datasource + with_matches (boolean): Needs the max matches being returned? False by + default. Returns: - set: The set of filters defined for the datasource + (set or dict): when `with_matches=False`, returns the set of filters + defined for the datasource only. + when `with_matches=True`, returns filters defined for + the datasource with the max match count specified by + `add_filter`. """ def inner(c, filters=None): - filters = filters or set() + filters = filters or dict() if hasattr(c, 'filterable') and c.filterable is False: return filters @@ -161,20 +172,21 @@ def inner(c, filters=None): return filters if c in FILTERS: - filters |= FILTERS[c] + filters.update(FILTERS[c]) for d in dr.get_dependents(c): - filters |= inner(d, filters) + filters.update(inner(d, filters)) + return filters if not component: # No filters for nothing - return set() + return dict() if with_matches else set() if component not in _CACHE: _CACHE[component] = inner(component) - return _CACHE[component] + return _CACHE[component] if with_matches else set(_CACHE[component].keys()) def apply_filters(target, lines): @@ -202,7 +214,7 @@ def loads(string): """Loads the filters dictionary given a string.""" d = _loads(string) for k, v in d.items(): - FILTERS[dr.get_component(k) or k] = set(v) + FILTERS[dr.get_component(k) or k] = v def load(stream=None): @@ -222,7 +234,7 @@ def dumps(): """Returns a string representation of the sorted FILTERS dictionary.""" d = {} for k, v in FILTERS.items(): - d[dr.get_name(k)] = sorted(v) + d[dr.get_name(k)] = dict(sorted(v.items())) return _dumps(d) diff --git a/insights/core/spec_cleaner.py b/insights/core/spec_cleaner.py index 5e807a803e..b1e210d178 100644 --- a/insights/core/spec_cleaner.py +++ b/insights/core/spec_cleaner.py @@ -11,6 +11,8 @@ Obfuscate the IP or Hostname appears in the spec content according to the specs native requirement and user configuration. +- Filtering + Filter line as per the allow list got from the "filters.yaml" """ import logging @@ -360,13 +362,13 @@ def _filter_line_per_allowlist(self, line, allow_info): for a_key in list(allow_info.keys()): # keep line when any filter match # FIXME: - # Considering performance, din't handle multiple filters in one same line + # Considering performance, didn't handle multiple filters in one same line if a_key in line: allow_info[a_key] -= 1 # stop checking it when enough lines contain the key were found allow_info.pop(a_key) if allow_info[a_key] == 0 else None return line - # discard the line when all filters are enough matched + # discard line when none filters found def get_obfuscate_functions(self, filename='', no_obfuscate=None): """ diff --git a/insights/core/spec_factory.py b/insights/core/spec_factory.py index 9413ecc5e4..7b4835c0b5 100644 --- a/insights/core/spec_factory.py +++ b/insights/core/spec_factory.py @@ -62,7 +62,7 @@ def __init__(self): self._content = None self._exception = None self._filterable = False - self._filters = set() + self._filters = dict() def load(self): raise NotImplementedError() @@ -96,11 +96,10 @@ def _clean_content(self): allowlist = None if self._filterable: cleans.append("Filter") - allowlist = dict((f, filters.MATCH_COUNT) for f in self._filters) + allowlist = self._filters # Cleaning - Entry if cleans: log.debug("Cleaning (%s) %s", "/".join(cleans), self.relative_path) - allowlist = None content = self.cleaner.clean_content( content[::-1], # Scan from bottom allowlist=allowlist, @@ -210,7 +209,7 @@ def __init__(self, relative_path, root="/", save_as=None, ds=None, ctx=None, cle if self.ds and filters.ENABLED else False ) - self._filters = filters.get_filters(self.ds) if self.ds else set() + self._filters = filters.get_filters(self.ds, True) if self.ds else set() self.validate() @@ -314,10 +313,7 @@ def create_args(self): """ args = [] if self._filters: -<<<<<<< HEAD log.debug("Pre-filtering %s", self.relative_path) -======= ->>>>>>> 19ecd79e (feat: support line filter in spec Cleaner) args.append(["grep", "-F", "\n".join(self._filters), self.path]) return args @@ -412,7 +408,7 @@ def __init__( if self.ds and filters.ENABLED else False ) - self._filters = filters.get_filters(self.ds) if self.ds else set() + self._filters = filters.get_filters(self.ds, True) if self.ds else set() self.validate() @@ -439,10 +435,7 @@ def create_args(self): command = [shlex.split(self.cmd)] if self.split and self._filters: -<<<<<<< HEAD log.debug("Pre-filtering %s", self.relative_path) -======= ->>>>>>> 19ecd79e (feat: support line filter in spec Cleaner) command.append(["grep", "-F", "\n".join(self._filters)]) return command diff --git a/insights/tests/__init__.py b/insights/tests/__init__.py index a9f65b965a..f8e7b0ad60 100644 --- a/insights/tests/__init__.py +++ b/insights/tests/__init__.py @@ -36,11 +36,14 @@ def _intercept_add_filter(func): @wraps(func) - def inner(component, pattern): - ret = add_filter(component, pattern) + def inner(component, pattern, max_match=filters.MAX_MATCH): + ret = add_filter(component, pattern, max_match=max_match) calling_module = inspect.stack()[1][0].f_globals.get("__name__") - ADDED_FILTERS[calling_module] |= set(r for r in dr.get_registry_points(component) if r.filterable) + ADDED_FILTERS[calling_module] |= set( + r for r in dr.get_registry_points(component) if r.filterable + ) return ret + return inner @@ -51,6 +54,7 @@ def inner(ds, pattern): calling_module = inspect.stack()[1][0].f_globals.get("__name__") ADDED_FILTERS[calling_module].add(ds) return ret + return inner @@ -92,8 +96,11 @@ def _beautify_deep_compare_diff(result, expected): diff.append('\tkey "{0}" not in Result;'.format(k)) for k in common_keys: if not eq(result[k], expected[k]): - diff.append('\tkey "{0}" unequal values:\n\t\tExpected: {1}\n\t\tResult : {2}'.format( - k, expected[k], result[k])) + diff.append( + '\tkey "{0}" unequal values:\n\t\tExpected: {1}\n\t\tResult : {2}'.format( + k, expected[k], result[k] + ) + ) if not diff: diff.append('\tUnrecognized unequal values in result layer one;') @@ -118,7 +125,7 @@ def deep_compare(result, expected): # This case ensures that when rules return a make_none() response, all of the older # CI tests that are looking for None instead of make_none() will still pass if result is None or (isinstance(result, dict) and result.get("type") == "none"): - assert (expected is None or expected == MAKE_NONE_RESULT), result + assert expected is None or expected == MAKE_NONE_RESULT, result return if isinstance(result, dict) and expected is None: @@ -150,12 +157,11 @@ def run_input_data(component, input_data, store_skips=False): 'CloudInstance': ['insights.parsers.subscription_manager.SubscriptionManagerFacts'], 'CloudProvider': ['insights.parsers.rhsm_conf.RHSMConf'], 'OSRelease': ['insights.parsers.dmesg.DmesgLineList'], - 'Sap': ['insights.parsers.saphostctrl.SAPHostCtrlInstances'] + 'Sap': ['insights.parsers.saphostctrl.SAPHostCtrlInstances'], } -def run_test(component, input_data, - expected=_UNDEFINED, return_make_none=False, do_filter=True): +def run_test(component, input_data, expected=_UNDEFINED, return_make_none=False, do_filter=True): """ Arguments: component: The insights component need to test. @@ -165,6 +171,7 @@ def run_test(component, input_data, do_filter: Does need to check dependency spec filter warning? - it's not required to check the filters for sosreport """ + def get_filtered_specs(module): filtered = set() mods = dir(importlib.import_module(module)) @@ -183,7 +190,9 @@ def get_filtered_specs(module): rps = dr.get_registry_points(component) filtered = get_filtered_specs(mod) filterable = set(d for d in rps if dr.get_delegate(d).filterable) - filtered - missing_filters = filterable - ADDED_FILTERS.get(mod, set()) - ADDED_FILTERS.get(sup_mod, set()) + missing_filters = ( + filterable - ADDED_FILTERS.get(mod, set()) - ADDED_FILTERS.get(sup_mod, set()) + ) if missing_filters: names = [dr.get_name(m) for m in missing_filters] msg = "%s must add filters to %s" @@ -204,16 +213,18 @@ def integrate(input_data, component): return run_test(component, input_data) -def context_wrap(lines, - path="path", - hostname=DEFAULT_HOSTNAME, - release=DEFAULT_RELEASE, - version="-1.-1", - machine_id="machine_id", - strip=True, - split=True, - filtered_spec=None, - **kwargs): +def context_wrap( + lines, + path="path", + hostname=DEFAULT_HOSTNAME, + release=DEFAULT_RELEASE, + version="-1.-1", + machine_id="machine_id", + strip=True, + split=True, + filtered_spec=None, + **kwargs +): if isinstance(lines, six.string_types): if strip: lines = lines.strip() @@ -223,10 +234,16 @@ def context_wrap(lines, if filtered_spec is not None and filtered_spec in filters.FILTERS: lines = [l for l in lines if any([f in l for f in filters.FILTERS[filtered_spec]])] - return Context(content=lines, - path=path, hostname=hostname, - release=release, version=version.split("."), - machine_id=machine_id, relative_path=path, **kwargs) + return Context( + content=lines, + path=path, + hostname=hostname, + release=release, + version=version.split("."), + machine_id=machine_id, + relative_path=path, + **kwargs + ) input_data_cache = {} @@ -236,10 +253,7 @@ def context_wrap(lines, def create_metadata(system_id, product): - ctx_metadata = { - "system_id": system_id, - "links": [] - } + ctx_metadata = {"system_id": system_id, "links": []} ctx_metadata["type"] = product.role ctx_metadata["product"] = product.__class__.__name__ return json.dumps(ctx_metadata) @@ -265,6 +279,7 @@ class InputData(object): contain the specified value in the context.path field. This is useful for testing pattern-like file parsers. """ + def __init__(self, name=None, hostname=None): cnt = input_data_cache.get(name, 0) self.name = "{0}-{1:0>5}".format(name, cnt) @@ -421,6 +436,7 @@ def archive_provider(component, test_func=deep_compare, stride=1): [1] insights.tests.deep_compare() """ + def _wrap(func): @six.wraps(func) def __wrap(stride=stride): @@ -430,4 +446,5 @@ def __wrap(stride=stride): __wrap.stride = stride ARCHIVE_GENERATORS.append(__wrap) return __wrap + return _wrap diff --git a/insights/tests/core/test_filters.py b/insights/tests/core/test_filters.py index c8d86d9c62..57d3d8a27d 100644 --- a/insights/tests/core/test_filters.py +++ b/insights/tests/core/test_filters.py @@ -43,19 +43,22 @@ class LocalSpecsNoFilters(object): def setup_function(func): if func is test_get_filter: - filters.add_filter(Specs.ps_aux, "COMMAND") + filters.add_filter(Specs.ps_aux, "COMMAND", 99999) + filters.add_filter(Specs.ps_aux, ["COMMAND", "PID"], 99) if func is test_get_filter_registry_point: filters.add_filter(Specs.ps_aux, "COMMAND") filters.add_filter(DefaultSpecs.ps_aux, "MEM") if func is test_filter_dumps_loads: - filters.add_filter(Specs.ps_aux, ["PID", "COMMAND", "TEST"]) + filters.add_filter(Specs.ps_aux, ["PID", "COMMAND"]) + filters.add_filter(Specs.ps_aux, "TEST_10", 10) + filters.add_filter(Specs.ps_aux, ["PID", "TEST_5"], 5) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_filter_dumps_loads(): @@ -65,7 +68,12 @@ def test_filter_dumps_loads(): filters.loads(r) assert Specs.ps_aux in filters.FILTERS - assert filters.FILTERS[Specs.ps_aux] == set(["PID", "COMMAND", "TEST"]) + assert filters.FILTERS[Specs.ps_aux] == { + 'COMMAND': filters.MAX_MATCH, + 'PID': filters.MAX_MATCH, # max match + 'TEST_10': 10, + 'TEST_5': 5, + } r2 = filters.dumps() assert r2 == r # 'filters' are in the same order in every dumps() @@ -75,8 +83,10 @@ def test_get_filter(): f = filters.get_filters(Specs.ps_aux) assert "COMMAND" in f - f = filters.get_filters(DefaultSpecs.ps_aux) + f = filters.get_filters(DefaultSpecs.ps_aux, True) assert "COMMAND" in f + assert f["COMMAND"] == 99999 # max match + assert f["PID"] == 99 def test_get_filter_registry_point(): @@ -185,6 +195,11 @@ def test_add_filter_exception_empty(): filters.add_filter(Specs.ps_aux, "") +def test_add_filter_exception_None(): + with pytest.raises(Exception): + filters.add_filter(Specs.ps_aux, None) + + def test_get_filters(): _filter = 'A filter' filters.add_filter(MySpecs.has_filters, _filter) @@ -193,3 +208,6 @@ def test_get_filters(): assert _filter in ret_has ret_no = filters.get_filters(MySpecs.no_filters) assert len(ret_no) == 0 + + assert filters.get_filters(None) == set() + assert filters.get_filters(None, True) == dict() diff --git a/insights/tests/datasources/container/test_containers_inspect.py b/insights/tests/datasources/container/test_containers_inspect.py index d27727b463..a5a425555d 100644 --- a/insights/tests/datasources/container/test_containers_inspect.py +++ b/insights/tests/datasources/container/test_containers_inspect.py @@ -9,8 +9,11 @@ from insights.core.spec_factory import DatasourceProvider from insights.specs import Specs from insights.specs.datasources.container import running_rhel_containers -from insights.specs.datasources.container.containers_inspect import (LocalSpecs, containers_inspect_data_datasource, - running_rhel_containers_id) +from insights.specs.datasources.container.containers_inspect import ( + LocalSpecs, + containers_inspect_data_datasource, + running_rhel_containers_id, +) INSPECT_1 = """ @@ -908,34 +911,83 @@ ('podman', '28fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e'), ('podman', '528890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54'), ('docker', '38fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e'), - ('docker', '538890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54') + ('docker', '538890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54'), ] -EXPECTED_RESULT = [{'Id': 'aeaea3ead527', 'engine': 'podman', 'Image': '538460c14d75dee1504e56ad8ddb7fe039093b1530ef8f90442a454b9aa3dc8b', 'Config|Cmd': ["sleep", "1000000"], 'HostConfig|Privileged': False}, {'Id': '28fb57be8bb2', 'engine': 'podman', 'Image': '538460c14d75dee1504e56ad8ddb7fe039093b1530ef8f90442a454b9aa3dc8b', 'Config|Cmd': ["sleep", "1000000"], 'HostConfig|Privileged': True}, {'Id': 'c7efee959ea8', 'engine': 'docker', 'Image': 'acf3e09a39c95d354539b6591298be0b0814f5d74e95e722863241192b9a079b', 'Config|Cmd': ["sleep", "1000000"], 'HostConfig|Privileged': True}] +EXPECTED_RESULT = [ + { + 'Id': 'aeaea3ead527', + 'engine': 'podman', + 'Image': '538460c14d75dee1504e56ad8ddb7fe039093b1530ef8f90442a454b9aa3dc8b', + 'Config|Cmd': ["sleep", "1000000"], + 'HostConfig|Privileged': False, + }, + { + 'Id': '28fb57be8bb2', + 'engine': 'podman', + 'Image': '538460c14d75dee1504e56ad8ddb7fe039093b1530ef8f90442a454b9aa3dc8b', + 'Config|Cmd': ["sleep", "1000000"], + 'HostConfig|Privileged': True, + }, + { + 'Id': 'c7efee959ea8', + 'engine': 'docker', + 'Image': 'acf3e09a39c95d354539b6591298be0b0814f5d74e95e722863241192b9a079b', + 'Config|Cmd': ["sleep", "1000000"], + 'HostConfig|Privileged': True, + }, +] EXPECTED_RESULT_NG = [{'Id': '28fb57be8bb2', 'engine': 'podman'}] def setup_function(func): - if func is test_containers_inspect_datasource or func is test_containers_inspect_datasource_NG_output_1 or func is test_containers_inspect_datasource_NG_output_2: - filters.add_filter(Specs.container_inspect_keys, ["HostConfig|Privileged", "NoSuchKey|Privileged", "Config|Cmd", "Id", "Image"]) + if ( + func is test_containers_inspect_datasource + or func is test_containers_inspect_datasource_NG_output_1 + or func is test_containers_inspect_datasource_NG_output_2 + ): + filters.add_filter( + Specs.container_inspect_keys, + ["HostConfig|Privileged", "NoSuchKey|Privileged", "Config|Cmd", "Id", "Image"], + ) if func is test_containers_inspect_datasource_no_filter: filters.add_filter(Specs.container_inspect_keys, []) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_running_rhel_containers_id(): broker = dr.Broker() containers_info = [ - ("registry.access.redhat.com/rhel", "podman", "aeaea3ead52724bb525bb2b5c619d67836250756920f0cb9884431ba53b476d8"), - ("registry.access.redhat.com/rhel", "podman", "28fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e"), - ("registry.access.redhat.com/rhel", "podman", "528890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54"), - ("registry.access.redhat.com/rhel", "docker", "38fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e"), - ("registry.access.redhat.com/rhel", "docker", "538890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54") + ( + "registry.access.redhat.com/rhel", + "podman", + "aeaea3ead52724bb525bb2b5c619d67836250756920f0cb9884431ba53b476d8", + ), + ( + "registry.access.redhat.com/rhel", + "podman", + "28fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e", + ), + ( + "registry.access.redhat.com/rhel", + "podman", + "528890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54", + ), + ( + "registry.access.redhat.com/rhel", + "docker", + "38fb57be8bb204e652c472a406e0d99956c8d35d6e88abfc13253d101a00911e", + ), + ( + "registry.access.redhat.com/rhel", + "docker", + "538890e93bf71736e00a87c7a1fa33e5bb03a9a196e5b10faaa9e545e749aa54", + ), ] broker[running_rhel_containers] = containers_info result = running_rhel_containers_id(broker) @@ -952,7 +1004,13 @@ def test_containers_inspect_datasource(): containers_inspect_data_2.cmd = "/usr/bin/podman inspect 28fb57be8bb2" containers_inspect_data_3.content = INSPECT_3.splitlines() containers_inspect_data_3.cmd = "/usr/bin/docker inspect c7efee959ea8" - broker = {LocalSpecs.containers_inspect_data_raw: [containers_inspect_data_1, containers_inspect_data_2, containers_inspect_data_3]} + broker = { + LocalSpecs.containers_inspect_data_raw: [ + containers_inspect_data_1, + containers_inspect_data_2, + containers_inspect_data_3, + ] + } result = containers_inspect_data_datasource(broker) assert result is not None assert isinstance(result, DatasourceProvider) @@ -968,7 +1026,12 @@ def test_containers_inspect_datasource_no_filter(): containers_inspect_data_1.cmd = "/usr/bin/docker inspect aeaea3ead527" containers_inspect_data_2.content = INSPECT_2.splitlines() containers_inspect_data_2.cmd = "/usr/bin/podman inspect 28fb57be8bb2" - broker = {LocalSpecs.containers_inspect_data_raw: [containers_inspect_data_1, containers_inspect_data_2]} + broker = { + LocalSpecs.containers_inspect_data_raw: [ + containers_inspect_data_1, + containers_inspect_data_2, + ] + } with pytest.raises(SkipComponent) as e: containers_inspect_data_datasource(broker) assert 'SkipComponent' in str(e) @@ -982,7 +1045,9 @@ def test_containers_inspect_datasource_NG_output_1(): result = containers_inspect_data_datasource(broker) assert result is not None assert isinstance(result, DatasourceProvider) - expected = DatasourceProvider(content=json.dumps(EXPECTED_RESULT_NG), relative_path=RELATIVE_PATH) + expected = DatasourceProvider( + content=json.dumps(EXPECTED_RESULT_NG), relative_path=RELATIVE_PATH + ) assert result.content[0] == expected.content[0] assert result.relative_path == expected.relative_path diff --git a/insights/tests/datasources/test_awx_manage.py b/insights/tests/datasources/test_awx_manage.py index a513401220..f1ea839cdf 100644 --- a/insights/tests/datasources/test_awx_manage.py +++ b/insights/tests/datasources/test_awx_manage.py @@ -8,7 +8,10 @@ from insights.core.exceptions import SkipComponent from insights.core.spec_factory import DatasourceProvider from insights.specs import Specs -from insights.specs.datasources.awx_manage import LocalSpecs, awx_manage_check_license_data_datasource +from insights.specs.datasources.awx_manage import ( + LocalSpecs, + awx_manage_check_license_data_datasource, +) AWX_MANAGE_LICENSE = """ @@ -23,22 +26,28 @@ "license_type": "enterprise", "time_remaining": 29885220, "instance_count": 100, - "support_level": "Standard" + "support_level": "Standard", } RELATIVE_PATH = 'insights_commands/awx-manage_check_license_--data' def setup_function(func): - if func is test_ansible_tower_license_datasource or func is test_ansible_tower_license_datasource_NG_output: - filters.add_filter(Specs.awx_manage_check_license_data, ["license_type", "support_level", "instance_count", "time_remaining"]) + if ( + func is test_ansible_tower_license_datasource + or func is test_ansible_tower_license_datasource_NG_output + ): + filters.add_filter( + Specs.awx_manage_check_license_data, + ["license_type", "support_level", "instance_count", "time_remaining"], + ) if func is test_ansible_tower_license_datasource_no_filter: filters.add_filter(Specs.awx_manage_check_license_data, []) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_ansible_tower_license_datasource(): @@ -48,7 +57,10 @@ def test_ansible_tower_license_datasource(): result = awx_manage_check_license_data_datasource(broker) assert result is not None assert isinstance(result, DatasourceProvider) - expected = DatasourceProvider(content=json.dumps(OrderedDict(sorted(AWX_MANAGE_FILTER_JSON.items()))), relative_path=RELATIVE_PATH) + expected = DatasourceProvider( + content=json.dumps(OrderedDict(sorted(AWX_MANAGE_FILTER_JSON.items()))), + relative_path=RELATIVE_PATH, + ) assert result.content == expected.content assert result.relative_path == expected.relative_path diff --git a/insights/tests/datasources/test_cloud_init.py b/insights/tests/datasources/test_cloud_init.py index e39517d2c0..903e922a49 100644 --- a/insights/tests/datasources/test_cloud_init.py +++ b/insights/tests/datasources/test_cloud_init.py @@ -87,7 +87,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) @pytest.mark.parametrize("ssh_deletekeys", [0, 1]) diff --git a/insights/tests/datasources/test_dir_list.py b/insights/tests/datasources/test_dir_list.py index 1c0013fdea..f8e250fef8 100644 --- a/insights/tests/datasources/test_dir_list.py +++ b/insights/tests/datasources/test_dir_list.py @@ -17,7 +17,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_du_dirs_list(): diff --git a/insights/tests/datasources/test_kernel_module_list.py b/insights/tests/datasources/test_kernel_module_list.py index 622e1051cf..b2a1d2121e 100644 --- a/insights/tests/datasources/test_kernel_module_list.py +++ b/insights/tests/datasources/test_kernel_module_list.py @@ -43,7 +43,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_module_filters(): diff --git a/insights/tests/datasources/test_ls.py b/insights/tests/datasources/test_ls.py index b7680d4d13..435ce1e120 100644 --- a/insights/tests/datasources/test_ls.py +++ b/insights/tests/datasources/test_ls.py @@ -57,7 +57,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_la_empty(): diff --git a/insights/tests/datasources/test_lsattr.py b/insights/tests/datasources/test_lsattr.py index 13a1391c1f..0fb78cc14c 100644 --- a/insights/tests/datasources/test_lsattr.py +++ b/insights/tests/datasources/test_lsattr.py @@ -14,12 +14,14 @@ def setup_function(func): if func is test_lsattr_one_file: filters.add_filter(Specs.lsattr_files_or_dirs, ["/etc/default/grub"]) if func is test_lsattr_more_files: - filters.add_filter(Specs.lsattr_files_or_dirs, ["/etc/default/grub", "/etc/httpd/httpd.conf"]) + filters.add_filter( + Specs.lsattr_files_or_dirs, ["/etc/default/grub", "/etc/httpd/httpd.conf"] + ) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_lsattr_empty(): diff --git a/insights/tests/datasources/test_machine_ids.py b/insights/tests/datasources/test_machine_ids.py index 71e939ecef..ffee10a13d 100644 --- a/insights/tests/datasources/test_machine_ids.py +++ b/insights/tests/datasources/test_machine_ids.py @@ -32,13 +32,10 @@ def get(self, url): "insights_id": "95160ae3-1ee1-40e1-9666-316dbb9270dd", "subscription_manager_id": "a5a71082-1d09-4fc8-bffc-ad326e24df6a", "satellite_id": None, - "ip_addresses": [ - "10.0.222.82" - ], + "ip_addresses": ["10.0.222.82"], "fqdn": "hostname1.compute.internal", - } - ] + ], } ) elif url.endswith('deff'): @@ -53,23 +50,17 @@ def get(self, url): "insights_id": "95160ae3-1ee1-40e1-9666-316dbb927ff1", "subscription_manager_id": "sfwerwfsf-1d09-4fc8-bffc-sfxcsfsf", "satellite_id": None, - "ip_addresses": [ - "10.0.222.82" - ], + "ip_addresses": ["10.0.222.82"], "fqdn": "hostname1.compute.internal", - }, { "insights_id": "95160ae3-1ee1-40e1-9666-316dbb927ff1", "subscription_manager_id": "sfweswersf-1d09-4fc8-bffc-sfxcssdf", "satellite_id": None, - "ip_addresses": [ - "10.0.222.83" - ], + "ip_addresses": ["10.0.222.83"], "fqdn": "hostname2.compute.internal", - - } - ] + }, + ], } ) elif url.endswith('wrong'): @@ -83,7 +74,13 @@ def __init__(self, content): def setup_function(func): - if func in [test_duplicate, test_non_duplicate, test_wrong_machine_id_content, test_machine_id_not_in_filters, test_api_result_not_in_json_format]: + if func in [ + test_duplicate, + test_non_duplicate, + test_wrong_machine_id_content, + test_machine_id_not_in_filters, + test_api_result_not_in_json_format, + ]: filters.add_filter(Specs.duplicate_machine_id, ["dc194312-8cdd-4e75-8cf1-2094bfsfsdeff"]) filters.add_filter(Specs.duplicate_machine_id, ["dc194312-8cdd-4e75-8cf1-2094bf45678"]) filters.add_filter(Specs.duplicate_machine_id, ["dc194312-8cdd-4e75-8cf1-2094bfwrong"]) @@ -93,81 +90,100 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection()) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', return_value=MockInsightsConnection() +) def test_duplicate(conn): broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsdeff']), - 'client_config': InsightsConfig(legacy_upload=False) + 'client_config': InsightsConfig(legacy_upload=False), } result = dup_machine_id_info(broker) - expected = DatasourceProvider(content=["dc194312-8cdd-4e75-8cf1-2094bfsfsdeff hostname1.compute.internal,hostname2.compute.internal"], relative_path='insights_commands/duplicate_machine_id_info') + expected = DatasourceProvider( + content=[ + "dc194312-8cdd-4e75-8cf1-2094bfsfsdeff hostname1.compute.internal,hostname2.compute.internal" + ], + relative_path='insights_commands/duplicate_machine_id_info', + ) assert expected.content == result.content assert expected.relative_path == result.relative_path broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsdeff']), - 'client_config': InsightsConfig(legacy_upload=True) + 'client_config': InsightsConfig(legacy_upload=True), } result = dup_machine_id_info(broker) - expected = DatasourceProvider(content=["dc194312-8cdd-4e75-8cf1-2094bfsfsdeff hostname1.compute.internal,hostname2.compute.internal"], relative_path='insights_commands/duplicate_machine_id_info') + expected = DatasourceProvider( + content=[ + "dc194312-8cdd-4e75-8cf1-2094bfsfsdeff hostname1.compute.internal,hostname2.compute.internal" + ], + relative_path='insights_commands/duplicate_machine_id_info', + ) assert expected.content == result.content assert expected.relative_path == result.relative_path -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection()) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', return_value=MockInsightsConnection() +) def test_non_duplicate(conn): broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bf45678']), - 'client_config': InsightsConfig(legacy_upload=False) + 'client_config': InsightsConfig(legacy_upload=False), } with pytest.raises(SkipComponent): dup_machine_id_info(broker) -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection()) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', return_value=MockInsightsConnection() +) def test_module_filters_empty(conn): broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsdeff']), - 'client_config': InsightsConfig(legacy_upload=False) + 'client_config': InsightsConfig(legacy_upload=False), } with pytest.raises(SkipComponent): dup_machine_id_info(broker) -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection()) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', return_value=MockInsightsConnection() +) def test_wrong_machine_id_content(conn): broker = { - Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsdeff', 'dc194312-8cdd-4e75-8cf1-2094bfsf45678']), - 'client_config': InsightsConfig(legacy_upload=False) + Specs.machine_id: context_wrap( + lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsdeff', 'dc194312-8cdd-4e75-8cf1-2094bfsf45678'] + ), + 'client_config': InsightsConfig(legacy_upload=False), } with pytest.raises(SkipComponent): dup_machine_id_info(broker) -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection()) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', return_value=MockInsightsConnection() +) def test_machine_id_not_in_filters(conn): broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfsfsabc']), - 'client_config': InsightsConfig(legacy_upload=False) + 'client_config': InsightsConfig(legacy_upload=False), } with pytest.raises(SkipComponent): dup_machine_id_info(broker) -@patch('insights.specs.datasources.machine_ids.get_connection', - return_value=MockInsightsConnection("wrong")) +@patch( + 'insights.specs.datasources.machine_ids.get_connection', + return_value=MockInsightsConnection("wrong"), +) def test_api_result_not_in_json_format(conn): broker = { Specs.machine_id: context_wrap(lines=['dc194312-8cdd-4e75-8cf1-2094bfwrong']), - 'client_config': InsightsConfig(legacy_upload=False) + 'client_config': InsightsConfig(legacy_upload=False), } with pytest.raises(SkipComponent): dup_machine_id_info(broker) diff --git a/insights/tests/datasources/test_package_provides.py b/insights/tests/datasources/test_package_provides.py index d89453e53d..14cf1fe879 100644 --- a/insights/tests/datasources/test_package_provides.py +++ b/insights/tests/datasources/test_package_provides.py @@ -28,27 +28,63 @@ def shell_out(self, cmd, split=True, timeout=None, keep_rc=False, env=None, sign arg = tmp_cmd[-1] if 'readlink' in shell_cmd: if arg == JAVA_PATH_1: - return (0, [JAVA_PATH_2, ]) + return ( + 0, + [ + JAVA_PATH_2, + ], + ) elif arg == JAVA_PATH_ERR: - return (1, ['file not found', ]) + return ( + 1, + [ + 'file not found', + ], + ) elif arg.startswith('/'): - return (0, [arg, ]) + return ( + 0, + [ + arg, + ], + ) elif 'rpm' in shell_cmd: if arg == JAVA_PATH_2: - return (0, [JAVA_PKG_2, ]) + return ( + 0, + [ + JAVA_PKG_2, + ], + ) elif arg == HTTPD_PATH: - return (0, [HTTPD_PKG, ]) + return ( + 0, + [ + HTTPD_PKG, + ], + ) else: - return (1, ['file {0} is not owned by any package'.format(arg), ]) + return ( + 1, + [ + 'file {0} is not owned by any package'.format(arg), + ], + ) elif 'which' in shell_cmd: if 'exception' in arg: raise Exception() elif arg.startswith('/'): - return [tmp_cmd[-1], ] + return [ + tmp_cmd[-1], + ] elif arg.endswith('java'): - return ['/usr/bin/java', ] + return [ + '/usr/bin/java', + ] elif arg.endswith('httpd'): - return ['/usr/sbin/httpd', ] + return [ + '/usr/sbin/httpd', + ] raise Exception() @@ -62,7 +98,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_get_package(): @@ -71,7 +107,9 @@ def test_get_package(): result = get_package(ctx, '/usr/bin/java') assert result == 'java-1.8.0-openjdk-headless-1.8.0.292.b10-1.el7_9.x86_64' - result = get_package(ctx, '/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.292.b10-1.el7_9.x86_64/jre/bin/java') + result = get_package( + ctx, '/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.292.b10-1.el7_9.x86_64/jre/bin/java' + ) assert result == 'java-1.8.0-openjdk-headless-1.8.0.292.b10-1.el7_9.x86_64' @@ -106,12 +144,14 @@ def test_get_package_err(): """ EXPECTED = DatasourceProvider( - "\n".join([ - "{0} {1}".format(HTTPD_PATH, HTTPD_PKG), - "{0} {1}".format(JAVA_PATH_1, JAVA_PKG_2), - "{0} {1}".format(JAVA_PATH_2, JAVA_PKG_2) - ]), - relative_path='insights_commands/package_provides_command' + "\n".join( + [ + "{0} {1}".format(HTTPD_PATH, HTTPD_PKG), + "{0} {1}".format(JAVA_PATH_1, JAVA_PKG_2), + "{0} {1}".format(JAVA_PATH_2, JAVA_PKG_2), + ] + ), + relative_path='insights_commands/package_provides_command', ) diff --git a/insights/tests/datasources/test_rpm_pkgs.py b/insights/tests/datasources/test_rpm_pkgs.py index 47e7a1f028..26c3d53c14 100644 --- a/insights/tests/datasources/test_rpm_pkgs.py +++ b/insights/tests/datasources/test_rpm_pkgs.py @@ -29,12 +29,25 @@ def setup_function(func): if func is test_rpm_v_pkgs: - filters.add_filter(Specs.rpm_V_package_list, ['coreutils', 'procps', 'procps-ng', 'shadow-utils', 'passwd', 'sudo', 'chrony', 'findutils', 'glibc']) + filters.add_filter( + Specs.rpm_V_package_list, + [ + 'coreutils', + 'procps', + 'procps-ng', + 'shadow-utils', + 'passwd', + 'sudo', + 'chrony', + 'findutils', + 'glibc', + ], + ) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def get_users(): @@ -77,4 +90,14 @@ def test_pkgs_list_empty(): def test_rpm_v_pkgs(): ret = rpm_v_pkg_list({}) - assert ret == ['chrony', 'coreutils', 'findutils', 'glibc', 'passwd', 'procps', 'procps-ng', 'shadow-utils', 'sudo'] + assert ret == [ + 'chrony', + 'coreutils', + 'findutils', + 'glibc', + 'passwd', + 'procps', + 'procps-ng', + 'shadow-utils', + 'sudo', + ] diff --git a/insights/tests/datasources/test_semanage.py b/insights/tests/datasources/test_semanage.py index 0c730b4786..bdf2ea4a18 100644 --- a/insights/tests/datasources/test_semanage.py +++ b/insights/tests/datasources/test_semanage.py @@ -43,15 +43,13 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_linux_users_count_map_staff_u(): selinux_list = Mock() selinux_list.content = SEMANGE_LOGIN_LIST_OUTPUT1.splitlines() - broker = { - LocalSpecs.selinux_user_mapping: selinux_list - } + broker = {LocalSpecs.selinux_user_mapping: selinux_list} result = users_count_map_selinux_user(broker) assert result is not None assert isinstance(result, DatasourceProvider) @@ -64,9 +62,7 @@ def test_linux_users_count_map_staff_u(): def test_linux_users_count_map_more_selinux_users(): selinux_list = Mock() selinux_list.content = SEMANGE_LOGIN_LIST_OUTPUT1.splitlines() - broker = { - LocalSpecs.selinux_user_mapping: selinux_list - } + broker = {LocalSpecs.selinux_user_mapping: selinux_list} result = users_count_map_selinux_user(broker) assert result is not None assert isinstance(result, DatasourceProvider) @@ -79,8 +75,6 @@ def test_linux_users_count_map_more_selinux_users(): def test_linux_users_count_map_staff_u_except(): selinux_list = Mock() selinux_list.content = SEMANGE_LOGIN_LIST_OUTPUT2.splitlines() - broker = { - LocalSpecs.selinux_user_mapping: selinux_list - } + broker = {LocalSpecs.selinux_user_mapping: selinux_list} with pytest.raises(SkipComponent): users_count_map_selinux_user(broker) diff --git a/insights/tests/datasources/test_user_group.py b/insights/tests/datasources/test_user_group.py index c47c889a1e..c7795a4081 100644 --- a/insights/tests/datasources/test_user_group.py +++ b/insights/tests/datasources/test_user_group.py @@ -17,7 +17,7 @@ def setup_function(func): def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_group_filters(): diff --git a/insights/tests/parsers/test_dse_ldif_simple.py b/insights/tests/parsers/test_dse_ldif_simple.py index 9c133a7573..d251658eae 100644 --- a/insights/tests/parsers/test_dse_ldif_simple.py +++ b/insights/tests/parsers/test_dse_ldif_simple.py @@ -135,19 +135,20 @@ def setup_function(func): if func in [test_dse_ldif_filtered]: filters.add_filter( - Specs.dse_ldif, [ + Specs.dse_ldif, + [ "nsslapd-security", "sslVersionMin", "sslVersionMax", "nsSSL3", "cn: config", # Note that this can serve as a canary for knowing whether the spec is collected. - ] + ], ) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_dse_ldif_smoke(): @@ -185,7 +186,9 @@ def test_dse_ldif_coverage(): def test_dse_ldif_filtered(): - dse_ldif_simple = DseLdifSimple(context_wrap(DSE_LDIF_REAL_EXAMPLE, filtered_spec=Specs.dse_ldif)) + dse_ldif_simple = DseLdifSimple( + context_wrap(DSE_LDIF_REAL_EXAMPLE, filtered_spec=Specs.dse_ldif) + ) assert dse_ldif_simple["nsslapd-security"] == "on" assert len(dse_ldif_simple) == 6 expected = { diff --git a/insights/tests/parsers/test_ls_usr_bin.py b/insights/tests/parsers/test_ls_usr_bin.py index 565bf733be..5945bef3e1 100644 --- a/insights/tests/parsers/test_ls_usr_bin.py +++ b/insights/tests/parsers/test_ls_usr_bin.py @@ -20,15 +20,22 @@ def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_ls_usr_bin(): ls_usr_bin = LsUsrBin(context_wrap(LS_USR_BIN, path='insights_commands/ls_-ln_.usr.bin')) - assert ls_usr_bin.files_of('/usr/bin') == ['python', 'python-argcomplete-check-easy-install-script', 'python-argcomplete-tcsh', 'python-config', 'python2'] + assert ls_usr_bin.files_of('/usr/bin') == [ + 'python', + 'python-argcomplete-check-easy-install-script', + 'python-argcomplete-tcsh', + 'python-config', + 'python2', + ] python = ls_usr_bin.dir_entry('/usr/bin', 'python') assert python is not None - assert python == {'date': 'Oct 22 2019', + assert python == { + 'date': 'Oct 22 2019', 'dir': '/usr/bin', 'group': '0', 'link': 'python2', @@ -38,7 +45,8 @@ def test_ls_usr_bin(): 'perms': 'rwxrwxrwx.', 'raw_entry': 'lrwxrwxrwx. 1 0 0 7 Oct 22 2019 python -> python2', 'size': 7, - 'type': 'l'} + 'type': 'l', + } def test_ls_usr_bin_doc_examples(): diff --git a/insights/tests/parsers/test_ls_usr_sbin.py b/insights/tests/parsers/test_ls_usr_sbin.py index 4aa2923fb3..0c1a5ca43b 100644 --- a/insights/tests/parsers/test_ls_usr_sbin.py +++ b/insights/tests/parsers/test_ls_usr_sbin.py @@ -20,12 +20,18 @@ def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_ls_usr_sbin(): ls_usr_sbin = LsUsrSbin(context_wrap(LS_USR_SBIN, path='insights_commands/ls_-ln_.usr.sbin')) - assert ls_usr_sbin.files_of('/usr/sbin') == ['accessdb', 'addgnupghome', 'addpart', 'postconf', 'postdrop'] + assert ls_usr_sbin.files_of('/usr/sbin') == [ + 'accessdb', + 'addgnupghome', + 'addpart', + 'postconf', + 'postdrop', + ] postdrop = ls_usr_sbin.dir_entry('/usr/sbin', 'postdrop') assert postdrop is not None assert postdrop == { @@ -38,7 +44,8 @@ def test_ls_usr_sbin(): 'date': 'Jan 27 2014', 'type': '-', 'size': 218552, - 'dir': '/usr/sbin'} + 'dir': '/usr/sbin', + } def test_ls_usr_sbin_doc_examples(): @@ -46,7 +53,9 @@ def test_ls_usr_sbin_doc_examples(): 'Specs': Specs, 'add_filter': filters.add_filter, 'LsUsrSbin': LsUsrSbin, - 'ls_usr_sbin': LsUsrSbin(context_wrap(LS_USR_SBIN, path='insights_commands/ls_-ln_.usr.sbin')), + 'ls_usr_sbin': LsUsrSbin( + context_wrap(LS_USR_SBIN, path='insights_commands/ls_-ln_.usr.sbin') + ), } failed, total = doctest.testmod(ls_usr_sbin, globs=env) assert failed == 0 diff --git a/insights/tests/parsers/test_messages.py b/insights/tests/parsers/test_messages.py index 1ff790d95d..d90cc00c8f 100644 --- a/insights/tests/parsers/test_messages.py +++ b/insights/tests/parsers/test_messages.py @@ -19,26 +19,16 @@ Apr 22 10:41:13 boy-bona crontab[32515]: (root) LIST (root) """.strip() -filters.add_filter(Specs.messages, [ - "LIST", - "CROND", - "jabberd", - "Wrapper", - "Launching", - "yum" -]) +filters.add_filter(Specs.messages, ["LIST", "CROND", "jabberd", "Wrapper", "Launching", "yum"]) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_doc_examples(): - env = { - 'msgs': messages.Messages(context_wrap(MSGINFO)), - 'Messages': messages.Messages - } + env = {'msgs': messages.Messages(context_wrap(MSGINFO)), 'Messages': messages.Messages} failed, total = doctest.testmod(messages, globs=env) assert failed == 0 @@ -54,5 +44,8 @@ def test_messages(): assert crond[0].get('procname') == "CROND[27921]" assert msg_info.get('jabberd/sm[11057]')[0].get('hostname') == "lxc-rhel68-sat56" assert msg_info.get('Wrapper')[0].get('message') == "--> Wrapper Started as Daemon" - assert msg_info.get('Launching')[0].get('raw_message') == "May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: Launching a JVM..." + assert ( + msg_info.get('Launching')[0].get('raw_message') + == "May 18 15:13:36 lxc-rhel68-sat56 wrapper[11375]: Launching a JVM..." + ) assert 2 == len(msg_info.get('yum')) diff --git a/insights/tests/parsers/test_rhsm_log.py b/insights/tests/parsers/test_rhsm_log.py index 15d2f8c690..ba96345b17 100644 --- a/insights/tests/parsers/test_rhsm_log.py +++ b/insights/tests/parsers/test_rhsm_log.py @@ -30,24 +30,27 @@ 2011-12-27-08:41:13,104 [ERROR] @managercli.py:66 - certificate verify failed """ -filters.add_filter(Specs.rhsm_log, [ - "[ERROR]", - "[Errno" -]) +filters.add_filter(Specs.rhsm_log, ["[ERROR]", "[Errno"]) def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_rhsm_log(): rlog = RhsmLog(context_wrap(LOG1)) ern_list = rlog.get('[Errno -2]') assert 1 == len(ern_list) - assert ern_list[0]['raw_message'] == "2016-07-31 04:07:21,245 [ERROR] rhsmcertd-worker:24440 @entcertlib.py:121 - [Errno -2] Name or service not known" + assert ( + ern_list[0]['raw_message'] + == "2016-07-31 04:07:21,245 [ERROR] rhsmcertd-worker:24440 @entcertlib.py:121 - [Errno -2] Name or service not known" + ) assert ern_list[0]['timestamp'] == datetime(2016, 7, 31, 4, 7, 21, 245000) - assert ern_list[0]['message'] == "[ERROR] rhsmcertd-worker:24440 @entcertlib.py:121 - [Errno -2] Name or service not known" + assert ( + ern_list[0]['message'] + == "[ERROR] rhsmcertd-worker:24440 @entcertlib.py:121 - [Errno -2] Name or service not known" + ) rlog = RhsmLog(context_wrap(LOG2)) ern_list = rlog.get('[Errno -2]') @@ -62,7 +65,5 @@ def test_rhsm_log(): def test_doc(): - failed_count, tests = doctest.testmod( - rhsm_log, globs={'rhsm_log': RhsmLog(context_wrap(LOG1))} - ) + failed_count, tests = doctest.testmod(rhsm_log, globs={'rhsm_log': RhsmLog(context_wrap(LOG1))}) assert failed_count == 0 diff --git a/insights/tests/specs/test_specs.py b/insights/tests/specs/test_specs.py index 825cfab22e..8e1ec3f5bf 100644 --- a/insights/tests/specs/test_specs.py +++ b/insights/tests/specs/test_specs.py @@ -243,7 +243,7 @@ def setup_function(func): def teardown_function(func): # Reset Test ENV filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) dr.COMPONENTS = defaultdict(lambda: defaultdict(set)) dr.TYPE_OBSERVERS = defaultdict(set) dr.ENABLED = defaultdict(lambda: True) diff --git a/insights/tests/specs/test_specs_filters.py b/insights/tests/specs/test_specs_filters.py index 4a70014b4c..7674e7b463 100644 --- a/insights/tests/specs/test_specs_filters.py +++ b/insights/tests/specs/test_specs_filters.py @@ -30,10 +30,10 @@ here = os.path.abspath(os.path.dirname(__file__)) FILTER_DATA = "Some test data" -MAX_GLOBS = 1001 this_file = os.path.abspath(__file__).rstrip("c") test_large_file = '/tmp/_insights_test.large_file_filter' +test_one_line_left = '/tmp/_insights_test.one_line_left' test_empty_after_filter = '/tmp/_insights_test.empty_after_filter' specs_manifest = """ @@ -91,6 +91,7 @@ class Specs(SpecSet): empty_after_filter = RegistryPoint(filterable=True) cmd_w_args_filter = RegistryPoint(filterable=True) large_filter = RegistryPoint(filterable=True) + one_line_left = RegistryPoint(filterable=True) class Stuff(Specs): @@ -121,6 +122,7 @@ def files2(broker): empty_after_filter = simple_file(test_empty_after_filter) cmd_w_args_filter = command_with_args('ls -lt %s', files2) large_filter = simple_file(test_large_file) + one_line_left = simple_file(test_one_line_left) class stage(dr.ComponentType): @@ -138,6 +140,7 @@ def invoke(self, broker): Stuff.first_of_spec_w_filter, Stuff.cmd_w_args_filter, Stuff.large_filter, + Stuff.one_line_left, optional=[Stuff.empty_after_filter], ) def dostuff(broker): @@ -149,6 +152,7 @@ def dostuff(broker): assert Stuff.first_file_spec_w_filter in broker assert Stuff.first_of_spec_w_filter in broker assert Stuff.cmd_w_args_filter in broker + assert Stuff.one_line_left in broker assert Stuff.empty_after_filter not in broker @@ -172,14 +176,17 @@ def setup_function(func): with open(test_empty_after_filter, 'w') as t: t.write('no-filter') with open(test_large_file, 'w') as fd: - for i in range(filters.MATCH_COUNT + 1): + for i in range(filters.MAX_MATCH + 1): + fd.write(str(i) + FILTER_DATA + '\n') + with open(test_one_line_left, 'w') as fd: + for i in range(filters.MAX_MATCH + 1): fd.write(str(i) + FILTER_DATA + '\n') def teardown_function(func): # Reset Test ENV filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) dr.COMPONENTS = defaultdict(lambda: defaultdict(set)) dr.TYPE_OBSERVERS = defaultdict(set) dr.ENABLED = defaultdict(lambda: True) @@ -188,6 +195,8 @@ def teardown_function(func): os.remove(test_empty_after_filter) if os.path.exists(test_large_file): os.remove(test_large_file) + if os.path.exists(test_one_line_left): + os.remove(test_one_line_left) def test_specs_filters_spec_factory(): @@ -201,6 +210,7 @@ def test_specs_filters_spec_factory(): add_filter(Stuff.empty_after_filter, " hello ") add_filter(Stuff.cmd_w_args_filter, [" ", ":"]) add_filter(Stuff.large_filter, ["Some"]) + add_filter(Stuff.one_line_left, ["test data"], 1) broker = dr.Broker() broker[HostContext] = HostContext() broker['cleaner'] = Cleaner(None, None) @@ -228,6 +238,7 @@ def test_line_terminators(): add_filter(Stuff.empty_after_filter, " hello ") add_filter(Stuff.cmd_w_args_filter, [" ", ":"]) add_filter(Stuff.large_filter, ["Some"]) + add_filter(Stuff.one_line_left, ["test data"], 1) broker = dr.Broker() broker[HostContext] = HostContext() broker['cleaner'] = Cleaner(None, None) @@ -272,7 +283,9 @@ def test_exp_no_filters(): exception_cnt += 10000000 elif "large_filter" in str(spec): exception_cnt += 100000000 - assert exception_cnt == 111111111 + elif "one_line_left" in str(spec): + exception_cnt += 1000000000 + assert exception_cnt == 1111111111 @pytest.mark.parametrize("obfuscate", [True, False]) @@ -288,6 +301,7 @@ def test_specs_filters_collect(gen, obfuscate): add_filter(Stuff.empty_after_filter, " hello ") add_filter(Stuff.cmd_w_args_filter, [" ", ":"]) add_filter(Stuff.large_filter, ["Some"]) + add_filter(Stuff.one_line_left, ["test data"], 1) # Preparation manifest = collect.load_manifest(specs_manifest) for pkg in manifest.get("plugins", {}).get("packages", []): @@ -327,11 +341,17 @@ def test_specs_filters_collect(gen, obfuscate): org_content = fp.readlines() assert len(org_content) > len(new_content) if "large_filter" in spec: - # if matched lines exceed the MATCH_COUNT - # collect the last MATCH_COUNT lines only - assert len(new_content) == filters.MATCH_COUNT + # if matched lines exceed the MAX_MATCH + # collect the last MAX_MATCH lines only + assert len(new_content) == filters.MAX_MATCH + assert new_content[0] != org_content[0] + assert new_content[-1].strip() == org_content[-1].strip() + elif "one_line_left" in spec: + # only one line is required + assert len(new_content) == 1 assert new_content[0] != org_content[0] + # the last line is kept assert new_content[-1].strip() == org_content[-1].strip() - assert count == 10 # Number of Specs + assert count == 11 # Number of Specs arch.delete_archive_dir() diff --git a/insights/tests/specs/test_specs_save_as.py b/insights/tests/specs/test_specs_save_as.py index bf4d39d24e..1226104334 100644 --- a/insights/tests/specs/test_specs_save_as.py +++ b/insights/tests/specs/test_specs_save_as.py @@ -145,7 +145,7 @@ def invoke(self, broker): def teardown_function(func): # Reset Test ENV filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) dr.COMPONENTS = defaultdict(lambda: defaultdict(set)) dr.TYPE_OBSERVERS = defaultdict(set) dr.ENABLED = defaultdict(lambda: True) diff --git a/insights/tests/test_context_wrap.py b/insights/tests/test_context_wrap.py index 5feddbddbc..9b0057817f 100644 --- a/insights/tests/test_context_wrap.py +++ b/insights/tests/test_context_wrap.py @@ -18,7 +18,7 @@ def teardown_function(func): filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) def test_context_wrap_unfiltered(): diff --git a/insights/tests/tools/test_apply_spec_filters.py b/insights/tests/tools/test_apply_spec_filters.py index 54efdf4c26..7a325d3423 100644 --- a/insights/tests/tools/test_apply_spec_filters.py +++ b/insights/tests/tools/test_apply_spec_filters.py @@ -82,7 +82,7 @@ def teardown_function(): os.remove(yaml_file) filters._CACHE = {} - filters.FILTERS = defaultdict(set) + filters.FILTERS = defaultdict(dict) @pytest.mark.skipif(sys.version_info < (2, 7), reason='Skip py26')