Skip to content

Commit

Permalink
Merge pull request cylc#6161 from oliver-sanders/lint--
Browse files Browse the repository at this point in the history
Lint
  • Loading branch information
hjoliver authored Jul 18, 2024
2 parents f7e0f80 + b188a44 commit c24ff05
Show file tree
Hide file tree
Showing 55 changed files with 197 additions and 148 deletions.
2 changes: 1 addition & 1 deletion cylc/flow/broadcast_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_broadcast_change_iter(modified_settings, is_cancel=False):
value = setting
keys_str = ""
while isinstance(value, dict):
key, value = list(value.items())[0]
key, value = next(iter(value.items()))
if isinstance(value, dict):
keys_str += "[" + key + "]"
else:
Expand Down
9 changes: 5 additions & 4 deletions cylc/flow/clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def _clean_check(opts: 'Values', id_: str, run_dir: Path) -> None:
except ContactFileExists as exc:
raise ServiceFileError(
f"Cannot clean running workflow {id_}.\n\n{exc}"
)
) from None


def init_clean(id_: str, opts: 'Values') -> None:
Expand Down Expand Up @@ -173,7 +173,7 @@ def init_clean(id_: str, opts: 'Values') -> None:
try:
platform_names = get_platforms_from_db(local_run_dir)
except ServiceFileError as exc:
raise ServiceFileError(f"Cannot clean {id_} - {exc}")
raise ServiceFileError(f"Cannot clean {id_} - {exc}") from None
except sqlite3.OperationalError as exc:
# something went wrong with the query
# e.g. the table/field we need isn't there
Expand All @@ -186,7 +186,7 @@ def init_clean(id_: str, opts: 'Values') -> None:
' local files (you may need to remove files on other'
' platforms manually).'
)
raise ServiceFileError(f"Cannot clean {id_} - {exc}")
raise ServiceFileError(f"Cannot clean {id_} - {exc}") from exc

if platform_names and platform_names != {'localhost'}:
remote_clean(
Expand Down Expand Up @@ -361,7 +361,8 @@ def remote_clean(
except PlatformLookupError as exc:
raise PlatformLookupError(
f"Cannot clean {id_} on remote platforms as the workflow database "
f"is out of date/inconsistent with the global config - {exc}")
f"is out of date/inconsistent with the global config - {exc}"
) from None

queue: Deque[RemoteCleanQueueTuple] = deque()
remote_clean_cmd = partial(
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/command_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def flow_opts(flows: List[str], flow_wait: bool) -> None:
try:
int(val)
except ValueError:
raise InputError(ERR_OPT_FLOW_VAL.format(val))
raise InputError(ERR_OPT_FLOW_VAL.format(val)) from None

if flow_wait and flows[0] in [FLOW_NEW, FLOW_NONE]:
raise InputError(ERR_OPT_FLOW_WAIT)
Expand Down
4 changes: 2 additions & 2 deletions cylc/flow/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ async def stop(
try:
mode = StopMode(mode)
except ValueError:
raise CommandFailedError(f"Invalid stop mode: '{mode}'")
raise CommandFailedError(f"Invalid stop mode: '{mode}'") from None
schd._set_stop(mode)
if mode is StopMode.REQUEST_KILL:
schd.time_next_kill = time()
Expand Down Expand Up @@ -309,7 +309,7 @@ async def set_verbosity(schd: 'Scheduler', level: Union[int, str]):
lvl = int(level)
LOG.setLevel(lvl)
except (TypeError, ValueError) as exc:
raise CommandFailedError(exc)
raise CommandFailedError(exc) from None
cylc.flow.flags.verbosity = log_level_to_verbosity(lvl)


Expand Down
55 changes: 32 additions & 23 deletions cylc/flow/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,11 +199,11 @@ def interpolate_template(tmpl, params_dict):
try:
return tmpl % params_dict
except KeyError:
raise ParamExpandError('bad parameter')
raise ParamExpandError('bad parameter') from None
except TypeError:
raise ParamExpandError('wrong data type for parameter')
raise ParamExpandError('wrong data type for parameter') from None
except ValueError:
raise ParamExpandError('bad template syntax')
raise ParamExpandError('bad template syntax') from None


class WorkflowConfig:
Expand Down Expand Up @@ -480,8 +480,8 @@ def __init__(
get_interval(offset_string).standardise())
except IntervalParsingError:
raise WorkflowConfigError(
"Illegal %s spec: %s" % (
s_type, offset_string))
"Illegal %s spec: %s" % (s_type, offset_string)
) from None
extn = "(" + offset_string + ")"

# Replace family names with members.
Expand Down Expand Up @@ -709,7 +709,7 @@ def process_initial_cycle_point(self) -> None:
try:
icp = ingest_time(orig_icp, get_current_time_string())
except IsodatetimeError as exc:
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None
if orig_icp != icp:
# now/next()/previous() was used, need to store
# evaluated point in DB
Expand Down Expand Up @@ -761,7 +761,7 @@ def process_start_cycle_point(self) -> None:
for taskid in self.options.starttask
]
except ValueError as exc:
raise InputError(str(exc))
raise InputError(str(exc)) from None
self.start_point = min(
get_point(cycle).standardise()
for cycle in cycle_points if cycle
Expand Down Expand Up @@ -1114,7 +1114,7 @@ def _check_completion_expression(self, task_name: str, expr: str) -> None:
f'\n {expr}'
'\nThe "finished" output cannot be used in completion'
' expressions, use "succeeded or failed".'
)
) from None

for alt_qualifier, qualifier in ALT_QUALIFIERS.items():
_alt_compvar = trigger_to_completion_variable(alt_qualifier)
Expand All @@ -1125,21 +1125,21 @@ def _check_completion_expression(self, task_name: str, expr: str) -> None:
f'\n {expr}'
f'\nUse "{_compvar}" not "{_alt_compvar}" '
'in completion expressions.'
)
) from None

raise WorkflowConfigError(
# NOTE: str(exc) == "name 'x' is not defined" tested in
# tests/integration/test_optional_outputs.py
f'Error in [runtime][{task_name}]completion:'
f'\n{error}'
)
) from None
except Exception as exc: # includes InvalidCompletionExpression
# expression contains non-whitelisted syntax or any other error in
# the expression e.g. SyntaxError
raise WorkflowConfigError(
f'Error in [runtime][{task_name}]completion:'
f'\n{str(exc)}'
)
) from None

# ensure consistency between the graph and the completion expression
for compvar in (
Expand Down Expand Up @@ -1415,11 +1415,12 @@ def compute_family_tree(self):
c3_single.mro(name))
except RecursionError:
raise WorkflowConfigError(
"circular [runtime] inheritance?")
"circular [runtime] inheritance?"
) from None
except Exception as exc:
# catch inheritance errors
# TODO - specialise MRO exceptions
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None

for name in self.cfg['runtime']:
ancestors = self.runtime['linearized ancestors'][name]
Expand Down Expand Up @@ -1758,7 +1759,7 @@ def _check_task_event_handlers(self):
f' {taskdef.name}:'
f' {handler_template}:'
f' {repr(exc)}'
)
) from None

def _check_special_tasks(self):
"""Check declared special tasks are valid, and detect special
Expand Down Expand Up @@ -1865,7 +1866,9 @@ def generate_triggers(self, lexpression, left_nodes, right, seq,
try:
expr_list = listify(lexpression)
except SyntaxError:
raise WorkflowConfigError('Error in expression "%s"' % lexpression)
raise WorkflowConfigError(
'Error in expression "%s"' % lexpression
) from None

triggers = {}
xtrig_labels = set()
Expand Down Expand Up @@ -1942,7 +1945,9 @@ def generate_triggers(self, lexpression, left_nodes, right, seq,
xtrig = xtrigs[label]
except KeyError:
if label != 'wall_clock':
raise WorkflowConfigError(f"xtrigger not defined: {label}")
raise WorkflowConfigError(
f"xtrigger not defined: {label}"
) from None
else:
# Allow "@wall_clock" in graph as implicit zero-offset.
xtrig = SubFuncContext('wall_clock', 'wall_clock', [], {})
Expand Down Expand Up @@ -2276,7 +2281,7 @@ def load_graph(self):
msg += ' (final cycle point=%s)' % fcp
if isinstance(exc, CylcError):
msg += ' %s' % exc.args[0]
raise WorkflowConfigError(msg)
raise WorkflowConfigError(msg) from None
self.sequences.append(seq)
parser = GraphParser(
family_map,
Expand Down Expand Up @@ -2431,7 +2436,7 @@ def get_taskdef(
except TaskDefError as exc:
if orig_expr:
LOG.error(orig_expr)
raise WorkflowConfigError(str(exc))
raise WorkflowConfigError(str(exc)) from None
else:
# Record custom message outputs from [runtime].
for output, message in (
Expand All @@ -2443,14 +2448,14 @@ def get_taskdef(
f'Invalid task output "'
f'[runtime][{name}][outputs]'
f'{output} = {message}" - {msg}'
)
) from None
valid, msg = TaskMessageValidator.validate(message)
if not valid:
raise WorkflowConfigError(
f'Invalid task message "'
f'[runtime][{name}][outputs]'
f'{output} = {message}" - {msg}'
)
) from None
self.taskdefs[name].add_output(output, message)

return self.taskdefs[name]
Expand All @@ -2462,7 +2467,7 @@ def _get_taskdef(self, name: str) -> TaskDef:
try:
rtcfg = self.cfg['runtime'][name]
except KeyError:
raise WorkflowConfigError("Task not defined: %s" % name)
raise WorkflowConfigError("Task not defined: %s" % name) from None
# We may want to put in some handling for cases of changing the
# initial cycle via restart (accidentally or otherwise).

Expand Down Expand Up @@ -2554,7 +2559,9 @@ def process_metadata_urls(self):
'workflow': self.workflow,
}
except (KeyError, ValueError):
raise InputError(f'Invalid template [meta]URL: {url}')
raise InputError(
f'Invalid template [meta]URL: {url}'
) from None
else:
LOG.warning(
'Detected deprecated template variables in [meta]URL.'
Expand Down Expand Up @@ -2590,7 +2597,9 @@ def process_metadata_urls(self):
'task': name,
}
except (KeyError, ValueError):
raise InputError(f'Invalid template [meta]URL: {url}')
raise InputError(
f'Invalid template [meta]URL: {url}'
) from None
else:
LOG.warning(
'Detected deprecated template variables in'
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/cycling/integer.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def standardise(self):
try:
self.value = str(int(self))
except (TypeError, ValueError) as exc:
raise PointParsingError(type(self), self.value, exc)
raise PointParsingError(type(self), self.value, exc) from None
return self

def __int__(self):
Expand Down
6 changes: 3 additions & 3 deletions cylc/flow/cycling/iso8601.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def standardise(self):
WorkflowSpecifics.NUM_EXPANDED_YEAR_DIGITS)
else:
message = str(exc)
raise PointParsingError(type(self), self.value, message)
raise PointParsingError(type(self), self.value, message) from None
return self

def sub(self, other):
Expand Down Expand Up @@ -176,7 +176,7 @@ def standardise(self):
try:
self.value = str(interval_parse(self.value))
except IsodatetimeError:
raise IntervalParsingError(type(self), self.value)
raise IntervalParsingError(type(self), self.value) from None
return self

def add(self, other):
Expand Down Expand Up @@ -782,7 +782,7 @@ def prev_next(
raise WorkflowConfigError(
f'Invalid offset: {my_time}:'
f' Offset lists are semicolon separated, try {suggest}'
)
) from None

timepoints.append(parsed_point + now)

Expand Down
6 changes: 3 additions & 3 deletions cylc/flow/dbstatecheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,15 +78,15 @@ def __init__(self, rund, workflow, db_path=None):
try:
self.db_point_fmt = self._get_db_point_format()
self.c7_back_compat_mode = False
except sqlite3.OperationalError as exc:
except sqlite3.OperationalError:
# BACK COMPAT: Cylc 7 DB (see method below).
try:
self.db_point_fmt = self._get_db_point_format_compat()
self.c7_back_compat_mode = True
except sqlite3.OperationalError:
with suppress(Exception):
self.conn.close()
raise exc # original error
raise

def __enter__(self):
return self
Expand Down Expand Up @@ -137,7 +137,7 @@ def adjust_point_to_db(self, cycle, offset):
raise InputError(
f'Cycle point "{cycle}" is not compatible'
f' with DB point format "{self.db_point_fmt}"'
)
) from None
return cycle

@staticmethod
Expand Down
8 changes: 5 additions & 3 deletions cylc/flow/graph_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ def parse_graph(self, graph_string: str) -> None:
raise GraphParseError(
f"Dangling {seq}:"
f"{this_line}"
)
) from None
part_lines.append(this_line)

# Check that a continuation sequence doesn't end this line and
Expand Down Expand Up @@ -638,7 +638,8 @@ def _proc_dep_pair(
except KeyError:
# "FAM:bad => foo" in LHS (includes "FAM => bar" too).
raise GraphParseError(
f"Illegal family trigger in {expr}")
f"Illegal family trigger in {expr}"
) from None
else:
# Not a family.
if trig in self.__class__.fam_to_mem_trigger_map:
Expand Down Expand Up @@ -911,7 +912,8 @@ def _compute_triggers(
except KeyError:
# Illegal family trigger on RHS of a pair.
raise GraphParseError(
f"Illegal family trigger: {name}:{output}")
f"Illegal family trigger: {name}:{output}"
) from None
else:
fam = False
if not output:
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/host_select.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ def _filter_by_ranking(hosts, rankings, results, data=None):
f'\n Expression: {item}'
f'\n Configuration: {GLBL_CFG_STR}'
f'\n Error: {exc}'
)
) from None
if isinstance(result, bool):
host_rankings[item] = result
data[host][item] = result
Expand Down
2 changes: 1 addition & 1 deletion cylc/flow/id.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __getitem__(self, key):
return dict.__getitem__(self, key)
except KeyError:
if key not in self._KEYS:
raise ValueError(f'Invalid token: {key}')
raise ValueError(f'Invalid token: {key}') from None
return None

def __str__(self):
Expand Down
Loading

0 comments on commit c24ff05

Please sign in to comment.