Skip to content

Commit

Permalink
refactor: apply fixes for flake8 comprehensions (C4) (#2376)
Browse files Browse the repository at this point in the history
This PR applies the automated "unsafe fix" suggestions from ruff check C4 for comprehensions. All of the changes look like good simplifications.
  • Loading branch information
mwtoews authored Nov 18, 2024
1 parent b8d3707 commit 22b5992
Show file tree
Hide file tree
Showing 40 changed files with 236 additions and 328 deletions.
4 changes: 2 additions & 2 deletions .docs/Notebooks/dis_triangle_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
radius = 100.0
x = radius * np.cos(theta)
y = radius * np.sin(theta)
circle_poly = [(x, y) for x, y in zip(x, y)]
circle_poly = list(zip(x, y))
fig = plt.figure(figsize=(10, 10))
ax = plt.subplot(1, 1, 1, aspect="equal")
ax.plot(x, y, "bo-")
Expand Down Expand Up @@ -94,7 +94,7 @@
radius = 30.0
x = radius * np.cos(theta) + 25.0
y = radius * np.sin(theta) + 25.0
inner_circle_poly = [(x, y) for x, y in zip(x, y)]
inner_circle_poly = list(zip(x, y))

# The hole is created by passing in another polygon and
# then passing a point inside the hole polygon with the
Expand Down
8 changes: 4 additions & 4 deletions .docs/Notebooks/dis_voronoi_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@
radius = 100.0
x = radius * np.cos(theta)
y = radius * np.sin(theta)
circle_poly = [(x, y) for x, y in zip(x, y)]
circle_poly = list(zip(x, y))
tri = Triangle(maximum_area=5, angle=30, model_ws=workspace)
tri.add_polygon(circle_poly)
tri.build(verbose=False)
Expand All @@ -336,7 +336,7 @@
radius = 30.0
x = radius * np.cos(theta) + 25.0
y = radius * np.sin(theta) + 25.0
inner_circle_poly = [(x, y) for x, y in zip(x, y)]
inner_circle_poly = list(zip(x, y))

tri = Triangle(maximum_area=10, angle=30, model_ws=workspace)
tri.add_polygon(circle_poly)
Expand Down Expand Up @@ -396,7 +396,7 @@
radius = 10.0
x = radius * np.cos(theta) + 50.0
y = radius * np.sin(theta) + 70.0
circle_poly0 = [(x, y) for x, y in zip(x, y)]
circle_poly0 = list(zip(x, y))
tri.add_polygon(circle_poly0)
tri.add_hole((50, 70))

Expand All @@ -405,7 +405,7 @@
radius = 10.0
x = radius * np.cos(theta) + 70.0
y = radius * np.sin(theta) + 20.0
circle_poly1 = [(x, y) for x, y in zip(x, y)]
circle_poly1 = list(zip(x, y))
tri.add_polygon(circle_poly1)
# tri.add_hole((70, 20))

Expand Down
2 changes: 1 addition & 1 deletion .docs/Notebooks/groundwater2023_watershed_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def densify_geometry(line, step, keep_internal_nodes=True):
def set_idomain(grid, boundary):
ix = GridIntersect(grid, method="vertex", rtree=True)
result = ix.intersect(Polygon(boundary))
idx = [coords for coords in result.cellids]
idx = list(result.cellids)
idx = np.array(idx, dtype=int)
nr = idx.shape[0]
if idx.ndim == 1:
Expand Down
2 changes: 1 addition & 1 deletion .docs/Notebooks/vtk_pathlines_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def fill_zone_1():
# +
tracks = {}
particle_ids = set()
release_locs = list()
release_locs = []

for i, t in enumerate(pathlines["time"]):
pid = str(round(float(pathlines["particleid"][i])))
Expand Down
2 changes: 1 addition & 1 deletion .docs/groundwater_paper/scripts/uspb_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def cf_model(model, k, i, j, base, Q=-100):
fs.close()

# clean up working directory
filelist = [f for f in os.listdir(cf_pth)]
filelist = list(os.listdir(cf_pth))
for f in filelist:
os.remove(os.path.join(cf_pth, f))

Expand Down
4 changes: 2 additions & 2 deletions .docs/groundwater_paper/scripts/uspb_capture_par.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def copy_files(ml, nproc):
else:
if not os.path.exists(cf_pths[idx]):
os.makedirs(cf_pths[idx])
filelist = [f for f in os.listdir(cf_pths[0])]
filelist = list(os.listdir(cf_pths[0]))
sys.stdout.write(f"copying files from {cf_pths[0]} to {cf_pths[idx]}\n")
for f in filelist:
if os.path.splitext(f)[1].lower() in exclude:
Expand Down Expand Up @@ -314,7 +314,7 @@ def doit():

# clean up working directories
for idx in range(nproc):
filelist = [f for f in os.listdir(cf_pths[idx])]
filelist = list(os.listdir(cf_pths[idx]))
for f in filelist:
os.remove(os.path.join(cf_pths[idx], f))

Expand Down
4 changes: 2 additions & 2 deletions autotest/test_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ def list_is_copy(mflist1, mflist2):
if mflist2 is mflist1:
return False
if isinstance(mflist1, MFTransientList):
data1 = {per: ra for per, ra in enumerate(mflist1.array)}
data2 = {per: ra for per, ra in enumerate(mflist2.array)}
data1 = dict(enumerate(mflist1.array))
data2 = dict(enumerate(mflist2.array))
elif isinstance(mflist1, MFList):
data1 = {0: mflist1.array}
data2 = {0: mflist2.array}
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_datautil.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ def test_split_data_line():
assert len(spl) == len(exp)
# whitespace is not removed, todo: can it be?
# or is it needed to support Modflow input file format?
assert all(any([e in s for s in spl]) for e in exp)
assert all(any(e in s for s in spl) for e in exp)
4 changes: 2 additions & 2 deletions autotest/test_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def test_intersection(dis_model, disv_model):
x, y, local=local, forgive=forgive
)
except Exception as e:
if not forgive and any(["outside of the model area" in k for k in e.args]):
if not forgive and any("outside of the model area" in k for k in e.args):
pass
else: # should be forgiving x,y out of grid
raise e
Expand Down Expand Up @@ -447,7 +447,7 @@ def test_structured_from_gridspec(example_data_path, spc_file):
) # ymax
errmsg = f"extents {extents} of {fn} does not equal {rotated_extents}"
assert all(
[np.isclose(x, x0) for x, x0 in zip(modelgrid.extent, rotated_extents)]
np.isclose(x, x0) for x, x0 in zip(modelgrid.extent, rotated_extents)
), errmsg

ncpl = modelgrid.ncol * modelgrid.nrow
Expand Down
10 changes: 5 additions & 5 deletions autotest/test_grid_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def voronoi_circle():
radius = 100.0
x = radius * np.cos(theta)
y = radius * np.sin(theta)
poly = [(x, y) for x, y in zip(x, y)]
poly = list(zip(x, y))
max_area = 50
angle = 30

Expand All @@ -311,13 +311,13 @@ def voronoi_nested_circles():
radius = 100.0
x = radius * np.cos(theta)
y = radius * np.sin(theta)
circle_poly = [(x, y) for x, y in zip(x, y)]
circle_poly = list(zip(x, y))

theta = np.arange(0.0, 2 * np.pi, 0.2)
radius = 30.0
x = radius * np.cos(theta) + 25.0
y = radius * np.sin(theta) + 25.0
inner_circle_poly = [(x, y) for x, y in zip(x, y)]
inner_circle_poly = list(zip(x, y))

polys = [circle_poly, inner_circle_poly]
max_area = 100
Expand Down Expand Up @@ -377,7 +377,7 @@ def voronoi_many_polygons():
radius = 10.0
x = radius * np.cos(theta) + 50.0
y = radius * np.sin(theta) + 70.0
circle_poly0 = [(x, y) for x, y in zip(x, y)]
circle_poly0 = list(zip(x, y))
tri.add_polygon(circle_poly0)
tri.add_hole((50, 70))

Expand All @@ -386,7 +386,7 @@ def voronoi_many_polygons():
radius = 10.0
x = radius * np.cos(theta) + 70.0
y = radius * np.sin(theta) + 20.0
circle_poly1 = [(x, y) for x, y in zip(x, y)]
circle_poly1 = list(zip(x, y))
tri.add_polygon(circle_poly1)

# add line through domain to force conforming cells
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_gridgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def test_gridgen(function_tmpdir):
# test the gridgen line intersection
line = [[(Lx, Ly), (Lx, 0.0)]]
cells = g.intersect(line, "line", 0)
nlist = [n for n in cells["nodenumber"]]
nlist = list(cells["nodenumber"])
nlist2 = [
19,
650,
Expand Down
4 changes: 2 additions & 2 deletions autotest/test_headufile.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,9 +143,9 @@ def test_get_lni(mfusg_model):
head = head_file.get_data()

def get_expected():
exp = dict()
exp = {}
for l, ncpl in enumerate(list(grid.ncpl)):
exp[l] = dict()
exp[l] = {}
for nn in range(ncpl):
exp[l][nn] = head[l][nn]
return exp
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_mnw.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ def test_blank_lines(function_tmpdir):
wellids = ["eb-33", "eb-35", "eb-36"]
rates = [np.float32(-11229.2), np.float32(-534.72), np.float32(-534.72)]

wellids2 = sorted(list(mnw2.mnw.keys()))
wellids2 = sorted(mnw2.mnw.keys())
emsg = "incorrect keys returned from load mnw2"
assert wellids2 == wellids, emsg

Expand Down
2 changes: 1 addition & 1 deletion autotest/test_model_splitter.py
Original file line number Diff line number Diff line change
Expand Up @@ -1264,7 +1264,7 @@ def build_gwt_model(sim, gwtname, rch_package):
new_sim.run_simulation()

# compare results for each of the models
splits = [i for i in range(nparts)]
splits = list(range(nparts))
for name in sim.model_names:
gwm = sim.get_model(name)
if "concentration()" in gwm.output.methods():
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_mp7_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def mf2005(function_tmpdir):
# recharge
ModflowRch(m, ipakcb=iu_cbc, rech=Mp7Cases.rch, nrchop=1)
# wel
wd = [i for i in Mp7Cases.wel_loc] + [Mp7Cases.wel_q]
wd = list(Mp7Cases.wel_loc) + [Mp7Cases.wel_q]
ModflowWel(m, ipakcb=iu_cbc, stress_period_data={0: wd})
# river
rd = []
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_obs.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def test_obs_load_and_write(function_tmpdir, example_data_path):
s = f"nqtfb loaded from {m.drob.fn_path} read incorrectly"
assert drob.nqtfb == m.drob.nqtfb, s
s = f"obsnam loaded from {m.drob.fn_path} read incorrectly"
assert list([n for n in drob.obsnam]) == list([n for n in m.drob.obsnam]), s
assert list(drob.obsnam) == list(m.drob.obsnam), s
s = f"flwobs loaded from {m.drob.fn_path} read incorrectly"
assert np.array_equal(drob.flwobs, m.drob.flwobs), s
s = f"layer loaded from {m.drob.fn_path} read incorrectly"
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_seawat.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def test_seawat_load_only(function_tmpdir):
m = Seawat.load(
model_name, model_ws=function_tmpdir, load_only=load_only, verbose=True
)
assert set([pkg.upper() for pkg in load_only]) == set(m.get_package_list())
assert {pkg.upper() for pkg in load_only} == set(m.get_package_list())


def test_vdf_vsc(function_tmpdir):
Expand Down
4 changes: 2 additions & 2 deletions autotest/test_zonbud_utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def test_compare2zonebudget(cbc_f, zon_f, zbud_f, rtol):
continue
if r1[0].shape[0] != r2[0].shape[0]:
continue
a1 = np.array([v for v in zb_arr[zonenames][r1[0]][0]])
a2 = np.array([v for v in fp_arr[zonenames][r2[0]][0]])
a1 = np.array(list(zb_arr[zonenames][r1[0]][0]))
a2 = np.array(list(fp_arr[zonenames][r2[0]][0]))
allclose = np.allclose(a1, a2, rtol)

mxdiff = np.abs(a1 - a2).max()
Expand Down
4 changes: 2 additions & 2 deletions flopy/discretization/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,8 +642,8 @@ def _set_neighbors(self, reset=False, method="rook"):
"""
if self._neighbors is None or reset:
node_num = 0
neighbors = {i: list() for i in range(len(self.iverts))}
edge_set = {i: list() for i in range(len(self.iverts))}
neighbors = {i: [] for i in range(len(self.iverts))}
edge_set = {i: [] for i in range(len(self.iverts))}
geoms = []
node_nums = []
if method == "rook":
Expand Down
2 changes: 1 addition & 1 deletion flopy/export/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -1253,7 +1253,7 @@ def add_sciencebase_metadata(self, id, check=True):
"get_sciencebase_xml_metadata",
"get_sciencebase_metadata",
}
towrite = sorted(list(attr.difference(skip)))
towrite = sorted(attr.difference(skip))
for k in towrite:
v = md.__getattribute__(k)
if v is not None:
Expand Down
4 changes: 2 additions & 2 deletions flopy/export/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,7 @@ def output_helper(
logger.warn(msg)
elif verbose:
print(msg)
times = [t for t in common_times[::stride]]
times = list(common_times[::stride])
if (isinstance(f, str) or isinstance(f, Path)) and Path(f).suffix.lower() == ".nc":
f = NetCdf(f, ml, time_values=times, logger=logger, forgive=forgive, **kwargs)
elif isinstance(f, NetCdf):
Expand Down Expand Up @@ -1112,7 +1112,7 @@ def transient2d_export(f: Union[str, os.PathLike], t2d, fmt=None, **kwargs):
if hasattr(t2d, "transient_2ds"):
d = t2d.transient_2ds
else:
d = {ix: i for ix, i in enumerate(t2d.array)}
d = dict(enumerate(t2d.array))
else:
raise AssertionError("No data available to export")

Expand Down
16 changes: 8 additions & 8 deletions flopy/export/vtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def _build_grid_geometry(self):
v1 += 1

cell_faces = [
[v for v in range(v0, v1)],
list(range(v0, v1)),
[v + self.nvpl for v in range(v0, v1)],
]

Expand Down Expand Up @@ -788,7 +788,7 @@ def add_transient_array(self, d, name=None, masked_values=None):
self._set_vtk_grid_geometry()

k = list(d.keys())[0]
transient = dict()
transient = {}
if isinstance(d[k], DataInterface):
if d[k].data_type in (DataType.array2d, DataType.array3d):
if name is None:
Expand Down Expand Up @@ -842,7 +842,7 @@ def add_transient_list(self, mflist, masked_values=None):
mfl = mflist.array
if isinstance(mfl, dict):
for arr_name, arr4d in mflist.array.items():
d = {kper: array for kper, array in enumerate(arr4d)}
d = dict(enumerate(arr4d))
name = f"{pkg_name}_{arr_name}"
self.add_transient_array(d, name)
else:
Expand Down Expand Up @@ -1021,7 +1021,7 @@ def add_package(self, pkg, masked_values=None):
value.transient_2ds, item, masked_values
)
else:
d = {ix: i for ix, i in enumerate(value.array)}
d = dict(enumerate(value.array))
self.add_transient_array(d, item, masked_values)

elif value.data_type == DataType.transient3d:
Expand Down Expand Up @@ -1201,11 +1201,11 @@ def add_heads(self, hds, kstpkper=None, masked_values=None):
# reset totim based on values read from head file
times = hds.get_times()
kstpkpers = hds.get_kstpkper()
self._totim = {ki: time for (ki, time) in zip(kstpkpers, times)}
self._totim = dict(zip(kstpkpers, times))

text = hds.text.decode()

d = dict()
d = {}
for ki in kstpkper:
d[ki] = hds.get_data(ki)

Expand Down Expand Up @@ -1239,7 +1239,7 @@ def add_cell_budget(self, cbc, text=None, kstpkper=None, masked_values=None):
)

records = cbc.get_unique_record_names(decode=True)
imeth_dict = {record: imeth for (record, imeth) in zip(records, cbc.imethlist)}
imeth_dict = dict(zip(records, cbc.imethlist))
if text is None:
keylist = records
else:
Expand All @@ -1259,7 +1259,7 @@ def add_cell_budget(self, cbc, text=None, kstpkper=None, masked_values=None):
# reset totim based on values read from budget file
times = cbc.get_times()
kstpkpers = cbc.get_kstpkper()
self._totim = {ki: time for (ki, time) in zip(kstpkpers, times)}
self._totim = dict(zip(kstpkpers, times))

for name in keylist:
d = {}
Expand Down
Loading

0 comments on commit 22b5992

Please sign in to comment.