Skip to content

Commit

Permalink
fixed flake8 problem in .ci dir
Browse files Browse the repository at this point in the history
  • Loading branch information
JHofman728 committed Jun 12, 2024
1 parent abb025b commit 4b2a4c7
Show file tree
Hide file tree
Showing 7 changed files with 185 additions and 164 deletions.
11 changes: 9 additions & 2 deletions .ci/scripts/disp_s1/disp_s1_compare.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
#!/usr/bin/env python
"""Compare DISP-S1 products"""
import argparse
import logging
import sys
from pathlib import Path

import h5py
import numpy as np
from dolphin import io
from dolphin._types import Filename

import h5py

import numpy as np
from numpy.typing import ArrayLike

logging.basicConfig(level=logging.INFO)
Expand All @@ -29,21 +32,25 @@ class ComparisonError(ValidationError):


def validation_failed():
"""Set flag to indicate validation failure"""
global validation_match
validation_match = False


def ValidationError(msg):
"""Handler function for validation failure"""
logger.error(msg)
validation_failed()


def ComparisonError(msg):
"""Handler function for comparison failure"""
logger.error(msg)
validation_failed()


def ValueError(msg):
"""Handler function for value error"""
logger.error(msg)
validation_failed()

Expand Down
9 changes: 5 additions & 4 deletions .ci/scripts/dswx_hls/dswx_hls_compare.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#!/usr/bin/env python3

"""Compare DSWX-HLS products"""
import argparse
import numpy as np
import os

import numpy as np

from osgeo import gdal

COMPARE_DSWX_HLS_PRODUCTS_ERROR_TOLERANCE_ATOL = 1e-6
Expand All @@ -16,7 +18,7 @@


def _get_prefix_str(current_flag, flag_all_ok):
""" Return an updated cumulative flag status and an OK/FAIL string for the current flag
"""Return an updated cumulative flag status and an OK/FAIL string for the current flag
Parameters
----------
Expand Down Expand Up @@ -56,7 +58,6 @@ def compare_dswx_hls_products(file_1, file_2, metadata_exclude_list):
flag_all_ok: bool
Overall comparison status
"""

if not os.path.isfile(file_1):
print(f'ERROR file not found: {file_1}')
return False
Expand Down
9 changes: 5 additions & 4 deletions .ci/scripts/dswx_s1/diff_dswx_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def _parse_args():
if only one argument is given, it gives a warning message and aborts.
Returns
--------
-------
result : <-1 if FAIL>
<0 if HELP>
<list - string if PASS>
Expand Down Expand Up @@ -61,16 +61,16 @@ def get_files(options):
compares them file by file.
Notes
------
-----
Calls external python script, dswx_comparison.py, to perform the file comparison.
Parameters
------------
----------
options : <list - string>
Directory names of expected_dir and output
Returns
--------
-------
result : <-1 if FAIL>
FAILS if number of files in 2 directories are 0, or unequal.
Expand Down Expand Up @@ -126,6 +126,7 @@ def get_files(options):


def main():
"""Get options and start"""
options = _parse_args()
get_files(options)

Expand Down
36 changes: 23 additions & 13 deletions .ci/scripts/dswx_s1/dswx_comparison.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#!/usr/bin/env python3
#

"""Compare DSWX products"""
import argparse
import os
import sys

import numpy as np

from osgeo import gdal

COMPARE_DSWX_SAR_PRODUCTS_ERROR_TOLERANCE = 1e-6
Expand Down Expand Up @@ -44,14 +44,14 @@ def _print_first_value_diff(image_1, image_2, prefix):
"""
Print first value difference between two images.
Parameters
----------
Parameters
----------
image_1 : numpy.ndarray
First input image
First input image
image_2: numpy.ndarray
Second input image
Second input image
prefix: str
Prefix to the message printed to the user
Prefix to the message printed to the user
"""
flag_error_found = False
for i in range(image_1.shape[0]):
Expand All @@ -74,8 +74,8 @@ def _compare_dswx_sar_metadata(metadata_1, metadata_2):
"""
Compare DSWx-SAR products' metadata
Parameters
----------
Parameters
----------
metadata_1 : dict
Metadata of the first DSWx-SAR product
metadata_2: dict
Expand Down Expand Up @@ -122,7 +122,16 @@ def _compare_dswx_sar_metadata(metadata_1, metadata_2):


def compare_dswx_sar_products(file_1, file_2):

"""
Compare DSWx-SAR products
Parameters
----------
file_1 : dict
First DSWx-SAR product
file_2: dict
Second DSWx-SAR product
"""
if not os.path.isfile(file_1):
print(f'ERROR file not found: {file_1}')
return False
Expand Down Expand Up @@ -150,7 +159,7 @@ def compare_dswx_sar_products(file_1, file_2):
nbands_2 = layer_gdal_dataset_2.RasterCount

# compare number of bands
flag_same_nbands = nbands_1 == nbands_2
flag_same_nbands = nbands_1 == nbands_2
flag_same_nbands_str = _get_prefix_str(flag_same_nbands, flag_all_ok)
prefix = ' ' * 7
print(f'{flag_same_nbands_str}Comparing number of bands')
Expand All @@ -167,7 +176,7 @@ def compare_dswx_sar_products(file_1, file_2):
image_1 = gdal_band_1.ReadAsArray()
image_2 = gdal_band_2.ReadAsArray()
flag_bands_are_equal = np.allclose(
image_1, image_2, atol = COMPARE_DSWX_SAR_PRODUCTS_ERROR_TOLERANCE,
image_1, image_2, atol=COMPARE_DSWX_SAR_PRODUCTS_ERROR_TOLERANCE,
equal_nan=True)
flag_bands_are_equal_str = _get_prefix_str(flag_bands_are_equal,
flag_all_ok)
Expand Down Expand Up @@ -211,6 +220,7 @@ def compare_dswx_sar_products(file_1, file_2):


def main():
"""Parse arguments and compare 2 files"""
parser = _get_parser()

args = parser.parse_args()
Expand Down
126 changes: 64 additions & 62 deletions .ci/scripts/metrics/plot_metric_data.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
#!/usr/bin/env python3
"""Plat functions for metrics generation"""
# import datetime
import os
import sys

import datetime
import matplotlib.pyplot as plt
import os

import pandas
import sys


def generate_plots_from_csv_file(metrics_csv_file, metrics_plot_file):
"""Generate plots of the metrics collected in the .csv file
Expand All @@ -16,14 +19,13 @@ def generate_plots_from_csv_file(metrics_csv_file, metrics_plot_file):
metrics_plot_file: str
Path to output plot file. Matplotlib will use the extension to determine saved format.
"""

columns = "SECONDS,Name,PIDs,CPU,Memory,MemoryP,NetSend,NetRecv,DiskRead,DiskWrite,Disk,Swap,Threads"
convert = {'SECONDS':int(),'Name':str(),'PIDs':int(),'CPU':float(),'Memory':float(),
'MemoryP':float(),'NetSend':float(),'NetRecv':float(),'DiskRead':float(),
'DiskWrite':float(),'Disk':int(),'Swap':int(),'Threads':int()}
convert = {'SECONDS': int(), 'Name': str(), 'PIDs': int(), 'CPU': float(), 'Memory': float(),
'MemoryP': float(), 'NetSend': float(), 'NetRecv': float(), 'DiskRead': float(),
'DiskWrite': float(), 'Disk': int(), 'Swap': int(), 'Threads': int()}

# read in the new data and make lists out of the columns for analysis
colnames = columns.split(',')
colnames: list[str] = columns.split(',')

data = pandas.read_csv(metrics_csv_file, header=1, names=colnames, converters=convert)

Expand All @@ -46,104 +48,104 @@ def generate_plots_from_csv_file(metrics_csv_file, metrics_plot_file):
max_pids = max(pids)
max_cpu = max(cpu)
max_mem = max(mem)
max_mem_p = max(mem_p)
max_net_s = max(net_s)
max_net_r = max(net_r)
max_disk_r = max(disk_r)
max_disk_w = max(disk_w)
# max_mem_p = max(mem_p)
# max_net_s = max(net_s)
# max_net_r = max(net_r)
# max_disk_r = max(disk_r)
# max_disk_w = max(disk_w)
max_disk = round(max(disk), 2)
min_disk = min(disk)
max_swap = max(swap)
max_threads = max(threads)

duration_s = secs[-1]
duration_hms = str(datetime.timedelta(seconds=duration_s))
# duration_s = secs[-1]
# duration_hms = str(datetime.timedelta(seconds=duration_s))

disk_used = round(max_disk - min_disk, 2)

# create list of plots to create
pl = [
{
'y' : pids,
'title' : 'Container Process IDs (max {})'.format(max_pids),
'xlabel' : 'Seconds',
'ylabel' : '# Processes'
'y': pids,
'title': 'Container Process IDs (max {})'.format(max_pids),
'xlabel': 'Seconds',
'ylabel': '# Processes'
},
{
'y' : threads,
'title' : 'Host System Threads (max {})'.format(max_threads),
'xlabel' : 'Seconds',
'ylabel' : '# Threads'
'y': threads,
'title': 'Host System Threads (max {})'.format(max_threads),
'xlabel': 'Seconds',
'ylabel': '# Threads'
},
{
'y' : cpu,
'title' : 'Container CPU % (max {})'.format(max_cpu),
'xlabel' : 'Seconds',
'ylabel' : 'CPU % Usage'
'y': cpu,
'title': 'Container CPU % (max {})'.format(max_cpu),
'xlabel': 'Seconds',
'ylabel': 'CPU % Usage'
},
{
'y' : mem,
'title' : 'Container Memory (max {:.2f} GB)'.format(max_mem),
'xlabel' : 'Seconds',
'ylabel' : 'Memory GB'
'y': mem,
'title': 'Container Memory (max {:.2f} GB)'.format(max_mem),
'xlabel': 'Seconds',
'ylabel': 'Memory GB'
},
{
'y' : mem_p,
'title' : 'Container Memory %',
'xlabel' : 'Seconds',
'ylabel' : 'Memory %'
'y': mem_p,
'title': 'Container Memory %',
'xlabel': 'Seconds',
'ylabel': 'Memory %'
},
{
'y' : swap,
'title' : 'Host System Swap Used (max {} GB)'.format(max_swap),
'xlabel' : 'Seconds',
'ylabel' : 'Swap Used GB'
'y': swap,
'title': 'Host System Swap Used (max {} GB)'.format(max_swap),
'xlabel': 'Seconds',
'ylabel': 'Swap Used GB'
},
{
'y' : disk,
'title' : 'Host System Disk, max {} GB (Container start/end delta {} GB)'.format(max_disk, disk_used),
'xlabel' : 'Seconds',
'ylabel' : 'Disk GB'
'y': disk,
'title': 'Host System Disk, max {} GB (Container start/end delta {} GB)'.format(max_disk, disk_used),
'xlabel': 'Seconds',
'ylabel': 'Disk GB'
},
{
'y' : disk_r,
'title' : 'Container Disk Read',
'xlabel' : 'Seconds',
'ylabel' : 'Disk Read GB'
'y': disk_r,
'title': 'Container Disk Read',
'xlabel': 'Seconds',
'ylabel': 'Disk Read GB'
},
{
'y' : disk_w,
'title' : 'Container Disk Write',
'xlabel' : 'Seconds',
'ylabel' : 'Disk Write GB'
'y': disk_w,
'title': 'Container Disk Write',
'xlabel': 'Seconds',
'ylabel': 'Disk Write GB'
},
{
'y' : net_r,
'title' : 'Container Net Recv',
'xlabel' : 'Seconds',
'ylabel' : 'Net Recv GB'
'y': net_r,
'title': 'Container Net Recv',
'xlabel': 'Seconds',
'ylabel': 'Net Recv GB'
},
{
'y' : net_s,
'title' : 'Container Net Send',
'xlabel' : 'Seconds',
'ylabel' : 'Net Send GB'
'y': net_s,
'title': 'Container Net Send',
'xlabel': 'Seconds',
'ylabel': 'Net Send GB'
}
]

# create figure with plots of data
plot_width = 12
plot_height = 5
fig, axs = plt.subplots(len(pl), figsize=(plot_width, plot_height*(len(pl))))
fig, axs = plt.subplots(len(pl), figsize=(plot_width, plot_height * (len(pl))))
fig.suptitle(os.path.basename(metrics_csv_file))
x = secs

for i in range(len(pl)):
y = pl[i]['y']
axs[i].set_title(pl[i]['title'])
axs[i].grid(True)
axs[i].plot(x,y,'.-')
axs[i].set(xlabel=pl[i]['xlabel'],ylabel=pl[i]['ylabel'])
axs[i].plot(x, y, '.-')
axs[i].set(xlabel=pl[i]['xlabel'], ylabel=pl[i]['ylabel'])

plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig(metrics_plot_file)
Expand Down
Loading

0 comments on commit 4b2a4c7

Please sign in to comment.