Skip to content
This repository has been archived by the owner on Jul 19, 2021. It is now read-only.

Commit

Permalink
Add job index number to jobs.json
Browse files Browse the repository at this point in the history
  • Loading branch information
hnformentin committed Jul 28, 2020
1 parent a99ad36 commit f4551b3
Show file tree
Hide file tree
Showing 8 changed files with 56 additions and 50 deletions.
2 changes: 1 addition & 1 deletion lib/include/ert/job_queue/ext_job.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ void ext_job_set_private_arg(ext_job_type * , const char * ,

void ext_job_set_argc(ext_job_type * , const char ** , int);
void ext_job_python_fprintf(const ext_job_type * , FILE * , const subst_list_type *);
void ext_job_json_fprintf(const ext_job_type*, FILE*, const subst_list_type*);
void ext_job_json_fprintf(const ext_job_type*, int job_index, FILE*, const subst_list_type*);
ext_job_type * ext_job_fscanf_alloc(const char * , const char * , bool private_job , const char *, bool search_path);
const stringlist_type * ext_job_get_arglist( const ext_job_type * ext_job );
bool ext_job_is_shared( const ext_job_type * ext_job );
Expand Down
16 changes: 13 additions & 3 deletions lib/job_queue/ext_job.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -866,17 +866,24 @@ static void __fprintf_python_arg_types(FILE * stream,
}


void ext_job_json_fprintf(const ext_job_type * ext_job, FILE * stream, const subst_list_type * global_args) {
void ext_job_json_fprintf(const ext_job_type * ext_job, int job_index, FILE * stream, const subst_list_type * global_args) {
const char * null_value = "null";

char * file_stdout_index = NULL;
char * file_stderr_index = NULL;

file_stdout_index = util_alloc_sprintf("%s.%d",ext_job->stdout_file, job_index);
file_stderr_index = util_alloc_sprintf("%s.%d",ext_job->stderr_file, job_index);

fprintf(stream," {");
{
__fprintf_python_string( stream, "", "name", ext_job->name, ",\n", ext_job->private_args, NULL, null_value);
__fprintf_python_string( stream, " ", "executable", ext_job->executable, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "target_file", ext_job->target_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "error_file", ext_job->error_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "start_file", ext_job->start_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "stdout", ext_job->stdout_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "stderr", ext_job->stderr_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "stdout", file_stdout_index, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "stderr", file_stderr_index, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_string( stream, " ", "stdin", ext_job->stdin_file, ",\n", ext_job->private_args, global_args, null_value);
__fprintf_python_argList( stream, " ", ext_job, ",\n", global_args );
__fprintf_python_hash( stream, " ", "environment", ext_job->environment, ",\n", ext_job->private_args, global_args, null_value);
Expand All @@ -892,6 +899,9 @@ void ext_job_json_fprintf(const ext_job_type * ext_job, FILE * stream, const sub

}
fprintf(stream,"}");

free( file_stdout_index );
free( file_stderr_index );
}


Expand Down
10 changes: 5 additions & 5 deletions lib/job_queue/forward_model.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -183,18 +183,18 @@ static void forward_model_json_fprintf(const forward_model_type * forward_model,
const env_varlist_type * varlist) {
char * json_file = (char*)util_alloc_filename(path , DEFAULT_JOB_JSON, NULL);
FILE * stream = util_fopen(json_file, "w");
int i;
int job_index;

fprintf(stream, "{\n");

fprintf(stream, "\"umask\" : \"%04o\",\n", umask);
fprintf(stream, "\"DATA_ROOT\": \"%s\",\n", data_root);
env_varlist_json_fprintf(varlist, stream); fprintf(stream, ",\n");
fprintf(stream, "\"jobList\" : [");
for (i=0; i < vector_get_size(forward_model->jobs); i++) {
const ext_job_type * job = (const ext_job_type*)vector_iget_const(forward_model->jobs , i);
ext_job_json_fprintf(job , stream , global_args);
if (i < (vector_get_size( forward_model->jobs ) - 1))
for (job_index=0; job_index < vector_get_size(forward_model->jobs); job_index++) {
const ext_job_type * job = (const ext_job_type*)vector_iget_const(forward_model->jobs , job_index);
ext_job_json_fprintf(job , job_index, stream , global_args);
if (job_index < (vector_get_size( forward_model->jobs ) - 1))
fprintf(stream,",\n");
}
fprintf(stream, "],\n");
Expand Down
2 changes: 1 addition & 1 deletion lib/job_queue/tests/ext_job_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ void test_angular() {
ext_job_type * ext_job = ext_job_fscanf_alloc("ANGULAR", NULL, false, "ANGULAR", false);
{
FILE * stream = util_fopen("angular.json", "w");
ext_job_json_fprintf(ext_job, stream, subst_list);
ext_job_json_fprintf(ext_job, 0, stream, subst_list);
fclose(stream);
}
cJSON *json;
Expand Down
4 changes: 2 additions & 2 deletions python/job_runner/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ def __init__(self, job_data, index, sleep_interval=1):
self.std_err = None
self.std_out = None
if "stderr" in job_data and job_data["stderr"]:
self.std_err = "%s.%d" % (job_data["stderr"], index)
self.std_err = job_data["stderr"]
if "stdout" in job_data and job_data["stdout"]:
self.std_out = "%s.%d" % (job_data["stdout"], index)
self.std_out = job_data["stdout"]

def run(self):
start_message = Start(self)
Expand Down
6 changes: 3 additions & 3 deletions python/tests/job_runner/test_file_reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def test_report_with_successful_start_message_argument(self):
Job(
{
"name": "job1",
"stdout": "/stdout",
"stderr": "/stderr",
"stdout": "/stdout.0",
"stderr": "/stderr.0",
"argList": ["--foo", "1", "--bar", "2"],
"executable": "/bin/bash",
},
Expand Down Expand Up @@ -196,7 +196,7 @@ def test_dump_error_file_with_stderr(self):
stderr.write("E_MASSIVE_FAILURE\n")

self.reporter._dump_error_file(
Job({"name": "job1", "stderr": "stderr.out"}, 0), "massive_failure"
Job({"name": "job1", "stderr": "stderr.out.0"}, 0), "massive_failure"
)

with open(self.reporter.ERROR_file, "r") as f:
Expand Down
24 changes: 12 additions & 12 deletions python/tests/job_runner/test_jobmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,13 @@ def test_run_output_rename(self):
def test_run_multiple_ok(self):
joblist = []
dir_list = ["1", "2", "3", "4", "5"]
for d in dir_list:
for job_index in dir_list:
job = {
"name": "MKDIR",
"executable": "/bin/mkdir",
"stdout": "mkdir_out",
"stderr": "mkdir_err",
"argList": ["-p", "-v", d],
"stdout": "mkdir_out.{}".format(job_index),
"stderr": "mkdir_err.{}".format(job_index),
"argList": ["-p", "-v", job_index],
}
joblist.append(job)
create_jobs_json(joblist)
Expand All @@ -171,11 +171,11 @@ def test_run_multiple_ok(self):
for status in statuses:
self.assertEqual(status.exit_code, 0)

for index, dir_number in enumerate(dir_list):
self.assertTrue(os.path.isdir(dir_list[index]))
self.assertTrue(os.path.isfile("mkdir_out.%d" % index))
self.assertTrue(os.path.isfile("mkdir_err.%d" % index))
self.assertEqual(0, os.path.getsize("mkdir_err.%d" % index))
for dir_number in dir_list:
self.assertTrue(os.path.isdir(dir_number))
self.assertTrue(os.path.isfile("mkdir_out.{}".format(dir_number)))
self.assertTrue(os.path.isfile("mkdir_err.{}".format(dir_number)))
self.assertEqual(0, os.path.getsize("mkdir_err.{}".format(dir_number)))

@tmpdir(None)
def test_run_multiple_fail_only_runs_one(self):
Expand Down Expand Up @@ -206,7 +206,7 @@ def test_run_multiple_fail_only_runs_one(self):
@tmpdir(None)
def test_given_global_env_and_update_path_executable_env_is_updated(self):
executable = "./x.py"
outfile = "outfile.stdout"
outfile = "outfile.stdout.0"

with open(executable, "w") as f:
f.write("#!/usr/bin/env python\n")
Expand All @@ -222,7 +222,7 @@ def test_given_global_env_and_update_path_executable_env_is_updated(self):
"name": "TEST_GET_ENV1",
"executable": executable,
"stdout": outfile,
"stderr": "outfile.stderr",
"stderr": "outfile.stderr.0",
"argList": [],
}

Expand Down Expand Up @@ -256,7 +256,7 @@ def test_given_global_env_and_update_path_executable_env_is_updated(self):
"guard check, script must finish successfully",
)

with open(outfile + ".0", "r") as out0:
with open(outfile, "r") as out0:
content = list(out0.read().splitlines())
self.assertEqual(content[0], "FirstValue")
self.assertEqual(content[1], "SecondValue")
Expand Down
42 changes: 19 additions & 23 deletions python/tests/res/job_queue/test_forward_model_formatted_print.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,18 +195,17 @@ def load_configs(config_file):
return jobs


def create_stdout_file(config):
if config["stdout"]:
return config["stdout"]
else:
return (config["name"] + ".stdout")


def create_stderr_file(config):
if config["stderr"]:
return config["stderr"]
def create_std_file(config, std="stdout", job_index=None):
if job_index is None:
if config[std]:
return "{}".format(config[std])
else:
return "{}.{}".format(config["name"], std)
else:
return (config["name"] + ".stderr")
if config[std]:
return "{}.{}".format(config[std], job_index)
else:
return "{}.{}.{}".format(config["name"], std, job_index)


class ForwardModelFormattedPrintTest(ResTest):
Expand Down Expand Up @@ -248,11 +247,11 @@ def validate_ext_job(self, ext_job, ext_job_config):
)
self.assertEqual(
ext_job.get_stdout_file(),
create_stdout_file(ext_job_config)
create_std_file(ext_job_config, std="stdout")
)
self.assertEqual(
ext_job.get_stderr_file(),
create_stderr_file(ext_job_config)
create_std_file(ext_job_config, std="stderr")
)
self.assertEqual(
ext_job.get_stdin_file(),
Expand Down Expand Up @@ -304,7 +303,7 @@ def generate_job_from_dict(self, ext_job_config, private = True):
ext_job_config["max_running"],
get_license_root_path(ext_job_config["license_path"]),
private
);
)

self.validate_ext_job(ext_job, ext_job_config)
return ext_job
Expand Down Expand Up @@ -332,9 +331,9 @@ def verify_json_dump(self, selected_jobs, global_args, umask, run_id):
self.assertEqual(umask, int(config["umask"], 8))
self.assertEqual(len(selected_jobs), len(config["jobList"]))

for i in range(len(selected_jobs)):
job = joblist[selected_jobs[i]]
loaded_job = config["jobList"][i]
for job_index in range(len(selected_jobs)):
job = joblist[selected_jobs[job_index]]
loaded_job = config["jobList"][job_index]

# Since no argList is loaded as an empty list by ext_job
arg_list_back_up = job["argList"]
Expand All @@ -345,13 +344,10 @@ def verify_json_dump(self, selected_jobs, global_args, umask, run_id):
job["name"] = default_name_if_none(job["name"])

for key in json_keywords:

if (key == "stdout"):
self.assertEqual(create_stdout_file(job), loaded_job[key])
elif (key == "stderr"):
self.assertEqual(create_stderr_file(job), loaded_job[key])
if (key in ["stdout", "stderr"]):
self.assertEqual(create_std_file(job, std=key, job_index=job_index), loaded_job[key])
else:
self.assertEqual(job[key], loaded_job[key])
self.assertEqual(job[key], loaded_job[key])

job["argList"] = arg_list_back_up
job["name"] = name_back_up
Expand Down

0 comments on commit f4551b3

Please sign in to comment.