Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a table for completed jobs #460

Open
wants to merge 7 commits into
base: development
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions manager.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import argparse

from plotmanager.library.utilities.exceptions import InvalidArgumentException
from plotmanager.library.utilities.commands import start_manager, stop_manager, view, json_output, analyze_logs
from plotmanager.library.utilities.commands import start_manager, stop_manager, view, json_output, analyze_logs, view_history


parser = argparse.ArgumentParser(description='This is the central manager for Swar\'s Chia Plot Manager.')
Expand Down Expand Up @@ -41,7 +41,9 @@
view(loop=False)
elif args.action == 'analyze_logs':
analyze_logs()
elif args.action == 'history':
view_history()
else:
error_message = 'Invalid action provided. The valid options are "start", "restart", "stop", "view", "status", "json" and ' \
'"analyze_logs".'
error_message = 'Invalid action provided. The valid options are "start", "restart", "stop", "view", "status", "json", ' \
'"analyze_logs", and "history".'
raise InvalidArgumentException(error_message)
13 changes: 11 additions & 2 deletions plotmanager/library/utilities/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
from plotmanager.library.utilities.configuration import test_configuration
from plotmanager.library.utilities.exceptions import ManagerError, TerminationException
from plotmanager.library.utilities.jobs import load_jobs
from plotmanager.library.utilities.log import analyze_log_dates, check_log_progress, analyze_log_times
from plotmanager.library.utilities.log import analyze_log_dates, analyze_log_date_history, check_log_progress, analyze_log_times
from plotmanager.library.utilities.notifications import send_notifications
from plotmanager.library.utilities.print import print_view, print_json
from plotmanager.library.utilities.print import print_view, print_json, get_job_history
from plotmanager.library.utilities.processes import is_windows, get_manager_processes, get_running_plots, \
start_process, identify_drive, get_system_drives

Expand Down Expand Up @@ -187,6 +187,15 @@ def view(loop=True):
exit()


def view_history():
chia_location, log_directory, config_jobs, manager_check_interval, max_concurrent, max_for_phase_1, \
minimum_minutes_between_jobs, progress_settings, notification_settings, debug_level, view_settings, \
instrumentation_settings = get_config_info()
analysis = {'files': {}}
analysis = analyze_log_date_history(log_directory=log_directory, analysis=analysis)
print(get_job_history(analysis, view_settings))


def analyze_logs():
chia_location, log_directory, config_jobs, manager_check_interval, max_concurrent, max_for_phase_1, \
minimum_minutes_between_jobs, progress_settings, notification_settings, debug_level, view_settings, \
Expand Down
130 changes: 130 additions & 0 deletions plotmanager/library/utilities/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,125 @@ def _analyze_log_end_date(contents):
)


def _analyze_log_file(contents):
# ID
match = re.search(r'ID: (\w+)\n', contents, flags=re.I)
if not match:
return False
id_val = match.groups()[0]

# Plot size
match = re.search(r'Plot size is: (\d+)\n', contents, flags=re.I)
if not match:
return False
plot_size = int(match.groups()[0])

# Buffer size
match = re.search(r'Buffer size is: (\d+)MiB\n', contents, flags=re.I)
if not match:
return False
buffer_size = int(match.groups()[0])

# Threads
match = re.search(r'Using (\d+) threads of stripe size \d+\n', contents, flags=re.I)
if not match:
return False
threads = int(match.groups()[0])

# Temp dirs
match = re.search(r'Starting plotting progress into temporary dirs: (.*) and (.*)\n', contents, flags=re.I)
if not match:
return False
temp_dir_1 = match.groups()[0]
temp_dir_2 = match.groups()[1]

# Used working space
match = re.search(r'Approximate working space used \(without final file\): ([\d\.]+) GiB\n', contents, flags=re.I)
if not match:
return False
working_space = float(match.groups()[0])

# Phase 1
match = re.search(r'Starting phase 1/4: Forward Propagation into tmp files... [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_1_start = dateparser.parse(match.groups()[0])

match = re.search(r'Time for phase 1 = ([\d\.]+) seconds. CPU \([\d\.]+%\) [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_1_seconds = int(float(match.groups()[0]))
phase_1_end_date = dateparser.parse(match.groups()[1])

# Phase 2
match = re.search(r'Starting phase 2/4: Backpropagation into tmp files... [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_2_start = dateparser.parse(match.groups()[0])

match = re.search(r'Time for phase 2 = ([\d\.]+) seconds. CPU \([\d\.]+%\) [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_2_seconds = int(float(match.groups()[0]))
phase_2_end_date = dateparser.parse(match.groups()[1])

# Phase 3
phase_3_start = phase_2_end_date

match = re.search(r'Time for phase 3 = ([\d\.]+) seconds. CPU \([\d\.]+%\) [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_3_seconds = int(float(match.groups()[0]))
phase_3_end_date = dateparser.parse(match.groups()[1])

# Phase 4
phase_4_start = phase_3_end_date

match = re.search(r'Time for phase 4 = ([\d\.]+) seconds. CPU \([\d\.]+%\) [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
phase_4_seconds = int(float(match.groups()[0]))
phase_4_end_date = dateparser.parse(match.groups()[1])

# Total time
match = re.search(r'Total time = ([\d\.]+) seconds\. CPU \([\d\.]+%\) [A-Za-z]+\s([^\n]+)\n', contents, flags=re.I)
if not match:
return False
total_seconds = pretty_print_time(int(float(match.groups()[0])))
end_date = dateparser.parse(match.groups()[1])

return dict(
id=id_val,
plot_size=plot_size,
buffer_size=buffer_size,
threads=threads,
working_space=working_space,
temp_dirs = [temp_dir_1, temp_dir_2],
phase1 = dict(
start=phase_1_start,
total_seconds=phase_1_seconds,
end_date=phase_1_end_date
),
phase2 = dict(
start=phase_2_start,
total_seconds=phase_2_seconds,
end_date=phase_2_end_date
),
phase3 = dict(
start=phase_3_start,
total_seconds=phase_3_seconds,
end_date=phase_3_end_date
),
phase4 = dict(
start=phase_4_start,
total_seconds=phase_4_seconds,
end_date=phase_4_end_date
),
total_seconds=total_seconds,
date=end_date
)


def _get_date_summary(analysis):
summary = analysis.get('summary', {})
for file_path in analysis['files'].keys():
Expand Down Expand Up @@ -78,6 +197,17 @@ def analyze_log_dates(log_directory, analysis):
return analysis


def analyze_log_date_history(log_directory, analysis):
files = get_completed_log_files(log_directory, skip=list(analysis['files'].keys()))
for file_path, contents in files.items():
data = _analyze_log_file(contents)
if data is None:
continue
analysis['files'][file_path] = {'data': data, 'checked': False}
analysis = _get_date_summary(analysis)
return analysis


def analyze_log_times(log_directory):
total_times = {1: 0, 2: 0, 3: 0, 4: 0}
line_numbers = {1: [], 2: [], 3: [], 4: []}
Expand Down
32 changes: 32 additions & 0 deletions plotmanager/library/utilities/print.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,38 @@ def pretty_print_job_data(job_data):
return pretty_print_table(rows)


def _get_history_row_info(analysis_data, view_settings):
phase_times = [
pretty_print_time(analysis_data['phase1']['total_seconds']),
pretty_print_time(analysis_data['phase2']['total_seconds']),
pretty_print_time(analysis_data['phase3']['total_seconds']),
pretty_print_time(analysis_data['phase4']['total_seconds']),
]

row = [
analysis_data['plot_size'],
analysis_data['phase1']['start'].strftime(view_settings['datetime_format']),
analysis_data['date'].strftime(view_settings['datetime_format']),
analysis_data['total_seconds'],
' / '.join(phase_times),
f'{int(analysis_data["buffer_size"])} MiB',
analysis_data['threads'],
f'{int(analysis_data["working_space"])} GiB',
analysis_data['temp_dirs'][0]
]
return [str(cell) for cell in row]


def get_job_history(analysis, view_settings):
rows = []
headers = ['k', 'start', 'end', 'total_time', 'phase_times', 'buffer', 'threads', 'working_space', 'temp_dir']
for file_name, data in analysis['files'].items():
rows.append(_get_history_row_info(data['data'], view_settings))
rows.sort(key=lambda x: (x[1]))
rows = [headers] + rows
return pretty_print_table(rows)


def get_drive_data(drives, running_work, job_data):
headers = ['type', 'drive', 'used', 'total', '%', '#', 'temp', 'dest']
rows = []
Expand Down