Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/aurora/2.6' into mjean/aurora/2.…
Browse files Browse the repository at this point in the history
…6/DAOS-16167-09252024-pr14817-pr14917-pr15234-pr15283

Required-githooks: true
Skipped-githooks: flake,pylint

Signed-off-by: Maureen Jean <[email protected]>
  • Loading branch information
mjean308 committed Oct 10, 2024
2 parents d20ada1 + 1117c41 commit 86e22a3
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 23 deletions.
12 changes: 1 addition & 11 deletions src/tests/ftest/util/soak_test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def pre_tear_down(self):
"""
self.log.info("<<preTearDown Started>> at %s", time.ctime())
errors = []
# clear out any jobs in squeue or jobscripts still in progress;
# clear out any jobs in squeue;
if self.failed_job_id_list and self.job_scheduler == "slurm":
job_id = " ".join([str(job) for job in self.failed_job_id_list])
self.log.info("<<Cancel jobs in queue with ids %s >>", job_id)
Expand Down Expand Up @@ -174,16 +174,6 @@ def pre_tear_down(self):
run_metrics_check(self, prefix="final")
# Gather logs
get_job_logs(self)
try:
get_daos_server_logs(self)
except SoakTestError as error:
errors.append(f"<<FAILED: Failed to gather server logs {error}>>")
# Gather journalctl logs
hosts = list(set(self.hostlist_servers))
since = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.start_time))
until = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self.end_time))
for journalctl_type in ["kernel", "daos_server"]:
get_journalctl_logs(self, hosts, since, until, journalctl_type)

if self.all_failed_harassers:
errors.extend(self.all_failed_harassers)
Expand Down
15 changes: 3 additions & 12 deletions src/tests/ftest/util/soak_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def get_daos_server_logs(self):
self (obj): soak obj
"""
daos_dir = self.outputsoak_dir + "/daos_server_logs"
logs_dir = self.test_env.log_dir
logs_dir = self.test_env.log_dir + "/*log*"
hosts = self.hostlist_servers
if not os.path.exists(daos_dir):
os.mkdir(daos_dir)
Expand Down Expand Up @@ -439,8 +439,6 @@ def launch_jobscript(
results = {"handle": job_id, "state": "CANCELLED", "host_list": host_list}
debug_logging(log, test.enable_debug_msg, f"DBG: JOB {job_id} EXITED launch_jobscript")
job_queue.put(results)
# give time to update the queue before exiting
time.sleep(0.5)
return
if isinstance(host_list, str):
# assume one host in list
Expand All @@ -454,12 +452,6 @@ def launch_jobscript(
joblog = job_log1.replace("RHOST", str(rhost))
errorlog = error_log1.replace("RHOST", str(rhost))
cmd = ";".join([env, f"{script} {hosts} {job_id} {joblog} {errorlog}"])
# if "_fio_" in job_log:
# job_results = run_remote(
# log, rhost, cmd, verbose=False, timeout=timeout * 60, task_debug=False, stderr=False)
# else:
# job_results = run_local(
# log, cmd, verbose=False, timeout=timeout * 60, capture_output=False, stderr=False)
job_results = run_remote(
log, rhost, cmd, verbose=False, timeout=timeout * 60, task_debug=False, stderr=False)
if job_results:
Expand Down Expand Up @@ -1718,8 +1710,8 @@ def build_job_script(self, commands, job, nodesperjob, ppn):
script_file.write("JOB_ID=$2 \n")
script_file.write("JOB_LOG=$3 \n")
script_file.write("JOB_ERROR_LOG=$4 \n")
script_file.write("echo JOB NODES: $HOSTLIST \n")
script_file.write("echo JOB ID: $JOB_ID \n")
script_file.write("echo \"JOB NODES: $HOSTLIST \" \n")
script_file.write("echo \"JOB ID: $JOB_ID \" \n")
script_file.write("if [ -z \"$VIRTUAL_ENV\" ]; then \n")
script_file.write(" echo \"VIRTUAL_ENV not defined\" \n")
script_file.write("else \n")
Expand All @@ -1730,7 +1722,6 @@ def build_job_script(self, commands, job, nodesperjob, ppn):

for cmd in list(job_cmds):
script_file.write(cmd + "\n")
script_file.close()
os.chmod(scriptfile, stat.S_IXUSR | stat.S_IRUSR)
script_list.append([scriptfile, output, error])
return script_list
Expand Down

0 comments on commit 86e22a3

Please sign in to comment.