Skip to content

Commit

Permalink
Flynt updates
Browse files Browse the repository at this point in the history
  • Loading branch information
PalNilsson committed Nov 8, 2023
1 parent 8ca7176 commit 937494b
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 35 deletions.
16 changes: 8 additions & 8 deletions pilot/user/rubin/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ def get_analysis_trf(transform, workdir):
logger.debug(f"jobopt_file = {jobopt_file} workdir = {workdir}")
try:
copy(jobopt_file, workdir)
except Exception as e:
logger.error(f"could not copy file {jobopt_file} to {workdir} : {e}")
except Exception as exc:
logger.error(f"could not copy file {jobopt_file} to {workdir} : {exc}")

if '/' in transform:
transform_name = transform.split('/')[-1]
Expand Down Expand Up @@ -101,9 +101,9 @@ def get_analysis_trf(transform, workdir):
path = os.path.join(workdir, transform_name)
logger.debug(f"changing permission of {path} to 0o755")
try:
os.chmod(path, 0o755) # Python 2/3
except Exception as e:
diagnostics = f"failed to chmod {transform_name}: {e}"
os.chmod(path, 0o755)
except Exception as exc:
diagnostics = f"failed to chmod {transform_name}: {exc}"
return errors.CHMODTRF, diagnostics, ""

return ec, diagnostics, transform_name
Expand Down Expand Up @@ -149,7 +149,7 @@ def download_transform(url, transform_name, workdir):
path = os.path.join(workdir, transform_name)
ip_version = os.environ.get('PILOT_IP_VERSION', 'IPv6')
command = 'curl' if ip_version == 'IPv6' else 'curl -4'
cmd = f'{command} -sS \"%s\" > %s' % (url, path)
cmd = f'{command} -sS \"{url}\" > {path}'
trial = 1
max_trials = 3

Expand All @@ -169,14 +169,14 @@ def download_transform(url, transform_name, workdir):

# try to download the trf a maximum of 3 times
while trial <= max_trials:
logger.info("executing command [trial %d/%d]: %s" % (trial, max_trials, cmd))
logger.info(f"executing command [trial {trial}/{max_trials}]: {cmd}")

exit_code, stdout, stderr = execute(cmd, mute=True)
if not stdout:
stdout = "(None)"
if exit_code != 0:
# Analyze exit code / output
diagnostics = "curl command failed: %d, %s, %s" % (exit_code, stdout, stderr)
diagnostics = f"curl command failed: {exit_code}, {stdout}, {stderr}"
logger.warning(diagnostics)
if trial == max_trials:
logger.fatal(f'could not download transform: {stdout}')
Expand Down
54 changes: 27 additions & 27 deletions pilot/user/rubin/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def get_memory_monitor_output_filename(suffix='txt'):
:return: File name (string).
"""

return "memory_monitor_output.%s" % suffix
return f"memory_monitor_output.{suffix}"


def get_memory_monitor_info_path(workdir, allowtxtfile=False):
Expand All @@ -74,13 +74,13 @@ def get_memory_monitor_info_path(workdir, allowtxtfile=False):
if os.path.exists(init_path):
path = init_path
else:
logger.info("neither %s, nor %s exist" % (path, init_path))
logger.info(f"neither {path}, nor {init_path} exist")
path = ""

if path == "" and allowtxtfile:
path = os.path.join(workdir, get_memory_monitor_output_filename())
if not os.path.exists(path):
logger.warning("file does not exist either: %s" % (path))
logger.warning(f"file does not exist either: {path}")

return path

Expand All @@ -101,11 +101,11 @@ def get_memory_monitor_info(workdir, allowtxtfile=False, name=""): # noqa: C901
# Note that only the final json file will contain the totRBYTES, etc
try:
summary_dictionary = get_memory_values(workdir, name=name)
except Exception as e:
logger.warning('failed to get memory values from memory monitor tool: %s' % e)
except Exception as exc:
logger.warning(f'failed to get memory values from memory monitor tool: {exc}')
summary_dictionary = {}
else:
logger.debug("summary_dictionary=%s" % str(summary_dictionary))
logger.debug(f"summary_dictionary={str(summary_dictionary)}")

# Fill the node dictionary
if summary_dictionary and summary_dictionary != {}:
Expand All @@ -126,8 +126,8 @@ def get_memory_monitor_info(workdir, allowtxtfile=False, name=""): # noqa: C901
node['avgVMEM'] = summary_dictionary['Avg']['avgVMEM']
node['avgSWAP'] = summary_dictionary['Avg']['avgSwap']
node['avgPSS'] = summary_dictionary['Avg']['avgPSS']
except Exception as e:
logger.warning("exception caught while parsing memory monitor file: %s" % e)
except Exception as exc:
logger.warning(f"exception caught while parsing memory monitor file: {exc}")
logger.warning("will add -1 values for the memory info")
node['maxRSS'] = -1
node['maxVMEM'] = -1
Expand Down Expand Up @@ -162,8 +162,8 @@ def get_memory_monitor_info(workdir, allowtxtfile=False, name=""): # noqa: C901
node['avgVMEM'] = summary_dictionary['Avg']['vmem']
node['avgSWAP'] = summary_dictionary['Avg']['swap']
node['avgPSS'] = summary_dictionary['Avg']['pss']
except Exception as e:
logger.warning("exception caught while parsing prmon file: %s" % e)
except Exception as exc:
logger.warning(f"exception caught while parsing prmon file: {exc}")
logger.warning("will add -1 values for the memory info")
node['maxRSS'] = -1
node['maxVMEM'] = -1
Expand Down Expand Up @@ -210,8 +210,8 @@ def get_max_memory_monitor_value(value, maxvalue, totalvalue): # noqa: C90
ec = 0
try:
value_int = int(value)
except Exception as e:
logger.warning("exception caught: %s" % e)
except Exception as exc:
logger.warning(f"exception caught: {exc}")
ec = 1
else:
totalvalue += value_int
Expand Down Expand Up @@ -270,7 +270,7 @@ def filter_value(value):
keys = ['vmem', 'pss', 'rss', 'swap']
values = {}
for key in keys:
value_list = list(filter(filter_value, dictionary.get(key, 0))) # Python 2/3
value_list = list(filter(filter_value, dictionary.get(key, 0)))
n = len(value_list)
average = int(float(sum(value_list)) / float(n)) if n > 0 else 0
maximum = max(value_list)
Expand Down Expand Up @@ -316,7 +316,7 @@ def get_metadata_dict_from_txt(path, storejson=False, jobid=None):
dictionary['pandaid'] = jobid

path = os.path.join(os.path.dirname(path), get_memory_monitor_output_filename(suffix='json'))
logger.debug('writing prmon dictionary to: %s' % path)
logger.debug(f'writing prmon dictionary to: {path}')
write_json(path, dictionary)
else:
logger.debug('nothing to write (no prmon dictionary)')
Expand Down Expand Up @@ -366,7 +366,7 @@ def convert_text_file_to_dictionary(path):
value = convert_to_int(key)
dictionary[key_entry].append(value)
except Exception:
logger.warning("unexpected format of utility output: %s" % line)
logger.warning(f"unexpected format of utility output: {line}")

return dictionary

Expand Down Expand Up @@ -435,16 +435,16 @@ def get_average_summary_dictionary(path):
rbytes = None
wbytes = None
except Exception:
logger.warning("unexpected format of utility output: %s (expected format: Time, VMEM,"
" PSS, RSS, Swap [, RCHAR, WCHAR, RBYTES, WBYTES])" % (line))
logger.warning(f"unexpected format of utility output: {line} (expected format: Time, VMEM, PSS, "
f"RSS, Swap [, RCHAR, WCHAR, RBYTES, WBYTES])")
else:
# Convert to int
ec1, maxvmem, totalvmem = get_max_memory_monitor_value(vmem, maxvmem, totalvmem)
ec2, maxpss, totalpss = get_max_memory_monitor_value(pss, maxpss, totalpss)
ec3, maxrss, totalrss = get_max_memory_monitor_value(rss, maxrss, totalrss)
ec4, maxswap, totalswap = get_max_memory_monitor_value(swap, maxswap, totalswap)
if ec1 or ec2 or ec3 or ec4:
logger.warning("will skip this row of numbers due to value exception: %s" % (line))
logger.warning(f"will skip this row of numbers due to value exception: {line}")
else:
n += 1

Expand Down Expand Up @@ -491,7 +491,7 @@ def get_memory_values(workdir, name=""):
# Get the path to the proper memory info file (priority ordered)
path = get_memory_monitor_info_path(workdir, allowtxtfile=True)
if os.path.exists(path):
logger.info("using path: %s (trf name=%s)" % (path, name))
logger.info(f"using path: {path} (trf name={name})")

# Does a JSON summary file exist? If so, there's no need to calculate maximums and averages in the pilot
if path.lower().endswith('json'):
Expand All @@ -503,7 +503,7 @@ def get_memory_values(workdir, name=""):
summary_dictionary = get_average_summary_dictionary_prmon(path)
else:
summary_dictionary = get_average_summary_dictionary(path)
logger.debug('summary_dictionary=%s (trf name=%s)' % (str(summary_dictionary), name))
logger.debug(f'summary_dictionary={str(summary_dictionary)} (trf name={name})')
else:
if path == "":
logger.warning("filename not set for memory monitor output")
Expand All @@ -525,20 +525,20 @@ def post_memory_monitor_action(job):
nap = 3
path1 = os.path.join(job.workdir, get_memory_monitor_summary_filename())
path2 = os.environ.get('PILOT_HOME')
i = 0
counter = 0
maxretry = 20
while i <= maxretry:
while counter <= maxretry:
if os.path.exists(path1):
break
logger.info("taking a short nap (%d s) to allow the memory monitor to finish writing to the summary file (#%d/#%d)"
% (nap, i, maxretry))
logger.info(f"taking a short nap ({nap} s) to allow the memory monitor to finish writing to the "
f"summary file (#{counter}/#{maxretry})")
time.sleep(nap)
i += 1
counter += 1

try:
copy(path1, path2)
except Exception as e:
logger.warning('failed to copy memory monitor output: %s' % e)
except Exception as exc:
logger.warning(f'failed to copy memory monitor output: {exc}')


def precleanup():
Expand Down

0 comments on commit 937494b

Please sign in to comment.