From f4551b3696d1d265cd74bf4d369deccf2006a4b4 Mon Sep 17 00:00:00 2001 From: Helena Nandi Formentin Date: Fri, 26 Jun 2020 16:23:41 +0200 Subject: [PATCH] Add job index number to jobs.json --- lib/include/ert/job_queue/ext_job.hpp | 2 +- lib/job_queue/ext_job.cpp | 16 +++++-- lib/job_queue/forward_model.cpp | 10 ++--- lib/job_queue/tests/ext_job_test.cpp | 2 +- python/job_runner/job.py | 4 +- python/tests/job_runner/test_file_reporter.py | 6 +-- python/tests/job_runner/test_jobmanager.py | 24 +++++------ .../test_forward_model_formatted_print.py | 42 +++++++++---------- 8 files changed, 56 insertions(+), 50 deletions(-) diff --git a/lib/include/ert/job_queue/ext_job.hpp b/lib/include/ert/job_queue/ext_job.hpp index 88c883d080..3b9827892e 100644 --- a/lib/include/ert/job_queue/ext_job.hpp +++ b/lib/include/ert/job_queue/ext_job.hpp @@ -51,7 +51,7 @@ void ext_job_set_private_arg(ext_job_type * , const char * , void ext_job_set_argc(ext_job_type * , const char ** , int); void ext_job_python_fprintf(const ext_job_type * , FILE * , const subst_list_type *); -void ext_job_json_fprintf(const ext_job_type*, FILE*, const subst_list_type*); +void ext_job_json_fprintf(const ext_job_type*, int job_index, FILE*, const subst_list_type*); ext_job_type * ext_job_fscanf_alloc(const char * , const char * , bool private_job , const char *, bool search_path); const stringlist_type * ext_job_get_arglist( const ext_job_type * ext_job ); bool ext_job_is_shared( const ext_job_type * ext_job ); diff --git a/lib/job_queue/ext_job.cpp b/lib/job_queue/ext_job.cpp index 66259a3b79..cca362b2a3 100644 --- a/lib/job_queue/ext_job.cpp +++ b/lib/job_queue/ext_job.cpp @@ -866,8 +866,15 @@ static void __fprintf_python_arg_types(FILE * stream, } -void ext_job_json_fprintf(const ext_job_type * ext_job, FILE * stream, const subst_list_type * global_args) { +void ext_job_json_fprintf(const ext_job_type * ext_job, int job_index, FILE * stream, const subst_list_type * global_args) { const char * null_value = "null"; + + char * file_stdout_index = NULL; + char * file_stderr_index = NULL; + + file_stdout_index = util_alloc_sprintf("%s.%d",ext_job->stdout_file, job_index); + file_stderr_index = util_alloc_sprintf("%s.%d",ext_job->stderr_file, job_index); + fprintf(stream," {"); { __fprintf_python_string( stream, "", "name", ext_job->name, ",\n", ext_job->private_args, NULL, null_value); @@ -875,8 +882,8 @@ void ext_job_json_fprintf(const ext_job_type * ext_job, FILE * stream, const sub __fprintf_python_string( stream, " ", "target_file", ext_job->target_file, ",\n", ext_job->private_args, global_args, null_value); __fprintf_python_string( stream, " ", "error_file", ext_job->error_file, ",\n", ext_job->private_args, global_args, null_value); __fprintf_python_string( stream, " ", "start_file", ext_job->start_file, ",\n", ext_job->private_args, global_args, null_value); - __fprintf_python_string( stream, " ", "stdout", ext_job->stdout_file, ",\n", ext_job->private_args, global_args, null_value); - __fprintf_python_string( stream, " ", "stderr", ext_job->stderr_file, ",\n", ext_job->private_args, global_args, null_value); + __fprintf_python_string( stream, " ", "stdout", file_stdout_index, ",\n", ext_job->private_args, global_args, null_value); + __fprintf_python_string( stream, " ", "stderr", file_stderr_index, ",\n", ext_job->private_args, global_args, null_value); __fprintf_python_string( stream, " ", "stdin", ext_job->stdin_file, ",\n", ext_job->private_args, global_args, null_value); __fprintf_python_argList( stream, " ", ext_job, ",\n", global_args ); __fprintf_python_hash( stream, " ", "environment", ext_job->environment, ",\n", ext_job->private_args, global_args, null_value); @@ -892,6 +899,9 @@ void ext_job_json_fprintf(const ext_job_type * ext_job, FILE * stream, const sub } fprintf(stream,"}"); + + free( file_stdout_index ); + free( file_stderr_index ); } diff --git a/lib/job_queue/forward_model.cpp b/lib/job_queue/forward_model.cpp index 43369b9cbf..07295ffd2b 100644 --- a/lib/job_queue/forward_model.cpp +++ b/lib/job_queue/forward_model.cpp @@ -183,7 +183,7 @@ static void forward_model_json_fprintf(const forward_model_type * forward_model, const env_varlist_type * varlist) { char * json_file = (char*)util_alloc_filename(path , DEFAULT_JOB_JSON, NULL); FILE * stream = util_fopen(json_file, "w"); - int i; + int job_index; fprintf(stream, "{\n"); @@ -191,10 +191,10 @@ static void forward_model_json_fprintf(const forward_model_type * forward_model, fprintf(stream, "\"DATA_ROOT\": \"%s\",\n", data_root); env_varlist_json_fprintf(varlist, stream); fprintf(stream, ",\n"); fprintf(stream, "\"jobList\" : ["); - for (i=0; i < vector_get_size(forward_model->jobs); i++) { - const ext_job_type * job = (const ext_job_type*)vector_iget_const(forward_model->jobs , i); - ext_job_json_fprintf(job , stream , global_args); - if (i < (vector_get_size( forward_model->jobs ) - 1)) + for (job_index=0; job_index < vector_get_size(forward_model->jobs); job_index++) { + const ext_job_type * job = (const ext_job_type*)vector_iget_const(forward_model->jobs , job_index); + ext_job_json_fprintf(job , job_index, stream , global_args); + if (job_index < (vector_get_size( forward_model->jobs ) - 1)) fprintf(stream,",\n"); } fprintf(stream, "],\n"); diff --git a/lib/job_queue/tests/ext_job_test.cpp b/lib/job_queue/tests/ext_job_test.cpp index 9bce6fcbfc..1cbaac8931 100644 --- a/lib/job_queue/tests/ext_job_test.cpp +++ b/lib/job_queue/tests/ext_job_test.cpp @@ -44,7 +44,7 @@ void test_angular() { ext_job_type * ext_job = ext_job_fscanf_alloc("ANGULAR", NULL, false, "ANGULAR", false); { FILE * stream = util_fopen("angular.json", "w"); - ext_job_json_fprintf(ext_job, stream, subst_list); + ext_job_json_fprintf(ext_job, 0, stream, subst_list); fclose(stream); } cJSON *json; diff --git a/python/job_runner/job.py b/python/job_runner/job.py index f1fc5d76d0..4c2eebae03 100644 --- a/python/job_runner/job.py +++ b/python/job_runner/job.py @@ -21,9 +21,9 @@ def __init__(self, job_data, index, sleep_interval=1): self.std_err = None self.std_out = None if "stderr" in job_data and job_data["stderr"]: - self.std_err = "%s.%d" % (job_data["stderr"], index) + self.std_err = job_data["stderr"] if "stdout" in job_data and job_data["stdout"]: - self.std_out = "%s.%d" % (job_data["stdout"], index) + self.std_out = job_data["stdout"] def run(self): start_message = Start(self) diff --git a/python/tests/job_runner/test_file_reporter.py b/python/tests/job_runner/test_file_reporter.py index 5f254b4c1c..60b66a7fac 100644 --- a/python/tests/job_runner/test_file_reporter.py +++ b/python/tests/job_runner/test_file_reporter.py @@ -36,8 +36,8 @@ def test_report_with_successful_start_message_argument(self): Job( { "name": "job1", - "stdout": "/stdout", - "stderr": "/stderr", + "stdout": "/stdout.0", + "stderr": "/stderr.0", "argList": ["--foo", "1", "--bar", "2"], "executable": "/bin/bash", }, @@ -196,7 +196,7 @@ def test_dump_error_file_with_stderr(self): stderr.write("E_MASSIVE_FAILURE\n") self.reporter._dump_error_file( - Job({"name": "job1", "stderr": "stderr.out"}, 0), "massive_failure" + Job({"name": "job1", "stderr": "stderr.out.0"}, 0), "massive_failure" ) with open(self.reporter.ERROR_file, "r") as f: diff --git a/python/tests/job_runner/test_jobmanager.py b/python/tests/job_runner/test_jobmanager.py index 2ef19a31d8..57d43ff112 100644 --- a/python/tests/job_runner/test_jobmanager.py +++ b/python/tests/job_runner/test_jobmanager.py @@ -153,13 +153,13 @@ def test_run_output_rename(self): def test_run_multiple_ok(self): joblist = [] dir_list = ["1", "2", "3", "4", "5"] - for d in dir_list: + for job_index in dir_list: job = { "name": "MKDIR", "executable": "/bin/mkdir", - "stdout": "mkdir_out", - "stderr": "mkdir_err", - "argList": ["-p", "-v", d], + "stdout": "mkdir_out.{}".format(job_index), + "stderr": "mkdir_err.{}".format(job_index), + "argList": ["-p", "-v", job_index], } joblist.append(job) create_jobs_json(joblist) @@ -171,11 +171,11 @@ def test_run_multiple_ok(self): for status in statuses: self.assertEqual(status.exit_code, 0) - for index, dir_number in enumerate(dir_list): - self.assertTrue(os.path.isdir(dir_list[index])) - self.assertTrue(os.path.isfile("mkdir_out.%d" % index)) - self.assertTrue(os.path.isfile("mkdir_err.%d" % index)) - self.assertEqual(0, os.path.getsize("mkdir_err.%d" % index)) + for dir_number in dir_list: + self.assertTrue(os.path.isdir(dir_number)) + self.assertTrue(os.path.isfile("mkdir_out.{}".format(dir_number))) + self.assertTrue(os.path.isfile("mkdir_err.{}".format(dir_number))) + self.assertEqual(0, os.path.getsize("mkdir_err.{}".format(dir_number))) @tmpdir(None) def test_run_multiple_fail_only_runs_one(self): @@ -206,7 +206,7 @@ def test_run_multiple_fail_only_runs_one(self): @tmpdir(None) def test_given_global_env_and_update_path_executable_env_is_updated(self): executable = "./x.py" - outfile = "outfile.stdout" + outfile = "outfile.stdout.0" with open(executable, "w") as f: f.write("#!/usr/bin/env python\n") @@ -222,7 +222,7 @@ def test_given_global_env_and_update_path_executable_env_is_updated(self): "name": "TEST_GET_ENV1", "executable": executable, "stdout": outfile, - "stderr": "outfile.stderr", + "stderr": "outfile.stderr.0", "argList": [], } @@ -256,7 +256,7 @@ def test_given_global_env_and_update_path_executable_env_is_updated(self): "guard check, script must finish successfully", ) - with open(outfile + ".0", "r") as out0: + with open(outfile, "r") as out0: content = list(out0.read().splitlines()) self.assertEqual(content[0], "FirstValue") self.assertEqual(content[1], "SecondValue") diff --git a/python/tests/res/job_queue/test_forward_model_formatted_print.py b/python/tests/res/job_queue/test_forward_model_formatted_print.py index 795c477628..fc03049a24 100644 --- a/python/tests/res/job_queue/test_forward_model_formatted_print.py +++ b/python/tests/res/job_queue/test_forward_model_formatted_print.py @@ -195,18 +195,17 @@ def load_configs(config_file): return jobs -def create_stdout_file(config): - if config["stdout"]: - return config["stdout"] - else: - return (config["name"] + ".stdout") - - -def create_stderr_file(config): - if config["stderr"]: - return config["stderr"] +def create_std_file(config, std="stdout", job_index=None): + if job_index is None: + if config[std]: + return "{}".format(config[std]) + else: + return "{}.{}".format(config["name"], std) else: - return (config["name"] + ".stderr") + if config[std]: + return "{}.{}".format(config[std], job_index) + else: + return "{}.{}.{}".format(config["name"], std, job_index) class ForwardModelFormattedPrintTest(ResTest): @@ -248,11 +247,11 @@ def validate_ext_job(self, ext_job, ext_job_config): ) self.assertEqual( ext_job.get_stdout_file(), - create_stdout_file(ext_job_config) + create_std_file(ext_job_config, std="stdout") ) self.assertEqual( ext_job.get_stderr_file(), - create_stderr_file(ext_job_config) + create_std_file(ext_job_config, std="stderr") ) self.assertEqual( ext_job.get_stdin_file(), @@ -304,7 +303,7 @@ def generate_job_from_dict(self, ext_job_config, private = True): ext_job_config["max_running"], get_license_root_path(ext_job_config["license_path"]), private - ); + ) self.validate_ext_job(ext_job, ext_job_config) return ext_job @@ -332,9 +331,9 @@ def verify_json_dump(self, selected_jobs, global_args, umask, run_id): self.assertEqual(umask, int(config["umask"], 8)) self.assertEqual(len(selected_jobs), len(config["jobList"])) - for i in range(len(selected_jobs)): - job = joblist[selected_jobs[i]] - loaded_job = config["jobList"][i] + for job_index in range(len(selected_jobs)): + job = joblist[selected_jobs[job_index]] + loaded_job = config["jobList"][job_index] # Since no argList is loaded as an empty list by ext_job arg_list_back_up = job["argList"] @@ -345,13 +344,10 @@ def verify_json_dump(self, selected_jobs, global_args, umask, run_id): job["name"] = default_name_if_none(job["name"]) for key in json_keywords: - - if (key == "stdout"): - self.assertEqual(create_stdout_file(job), loaded_job[key]) - elif (key == "stderr"): - self.assertEqual(create_stderr_file(job), loaded_job[key]) + if (key in ["stdout", "stderr"]): + self.assertEqual(create_std_file(job, std=key, job_index=job_index), loaded_job[key]) else: - self.assertEqual(job[key], loaded_job[key]) + self.assertEqual(job[key], loaded_job[key]) job["argList"] = arg_list_back_up job["name"] = name_back_up