Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.7.1 #252

Merged
merged 2 commits into from
Oct 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ repos:
# - id: yamlfmt

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.0
rev: v0.7.1
hooks:
# Run the linter.
- id: ruff
Expand Down
4 changes: 2 additions & 2 deletions artistools/deposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def main_analytical(args: argparse.Namespace | None = None, argsraw: list[str] |
# define T52MN (0.0211395*DAY)

t_now = args.timedays * u.day
print(f't_now = {t_now.to("d")}')
print(f"t_now = {t_now.to('d')}")
print("The following assumes that all 56Ni has decayed to 56Co and all energy comes from emitted positrons")

# adata = at.atomic.get_levels(args.modelpath, get_photoionisations=True)
Expand Down Expand Up @@ -118,7 +118,7 @@ def main_analytical(args: argparse.Namespace | None = None, argsraw: list[str] |
# width = ((v_outer - v_inner) * t_now).to('cm').value
# tau = width * phixs * nnlevel
# print(f'width: {width:.3e} cm, phixs: {phixs:.3e} cm^2, nnlevel: {nnlevel:.3e} cm^-3, tau: {tau:.3e}')
print(f'Global posdep: {global_posdep.to("solLum"):.3e}')
print(f"Global posdep: {global_posdep.to('solLum'):.3e}")


def main(args: argparse.Namespace | None = None, argsraw: list[str] | None = None, **kwargs: t.Any) -> None:
Expand Down
2 changes: 1 addition & 1 deletion artistools/estimators/plot3destimators_classic.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def plot_Te_vs_time_lineofsight_3d_model(modelpath, modeldata, estimators, reado
associated_modeldata_row_for_mgi = modeldata.loc[modeldata["inputcellid"] == assoc_cells[mgi][0]]

Te = [estimators[timestep, mgi]["Te"] for timestep, _ in enumerate(times)]
plt.scatter(times, Te, label=f'vel={associated_modeldata_row_for_mgi["vel_y_mid"].to_numpy()[0] / CLIGHT}')
plt.scatter(times, Te, label=f"vel={associated_modeldata_row_for_mgi['vel_y_mid'].to_numpy()[0] / CLIGHT}")

plt.xlabel("time [days]")
plt.ylabel("Te [K]")
Expand Down
26 changes: 13 additions & 13 deletions artistools/inputmodel/describeinputmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,17 +146,17 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
if "cellYe" in dfmodel.collect_schema().names():
electronfrac = dfmodel.select(pl.col("cellYe").dot(pl.col("mass_g")) / pl.col("mass_g").sum()).collect().item()
assert electronfrac is not None
print(f' {"electron frac Ye":19s} {electronfrac:.3f}')
print(f" {'electron frac Ye':19s} {electronfrac:.3f}")
if args.isotopes:
# currently assumes that all isotopes are specified (i.e. not for Type Ia models)
calcelectronfrac = calculate_model_electron_frac(dfmodel)
assert calcelectronfrac is not None
print(f' {"snapshot Ye":19s} {calcelectronfrac:.3f}')
print(f" {'snapshot Ye':19s} {calcelectronfrac:.3f}")

if "q" in dfmodel.collect_schema().names():
initial_energy = dfmodel.select(pl.col("q").dot(pl.col("mass_g"))).collect().item()
assert initial_energy is not None
print(f' {"initial energy":19s} {initial_energy:.3e} erg')
print(f" {'initial energy':19s} {initial_energy:.3e} erg")
else:
initial_energy = 0.0

Expand All @@ -176,7 +176,7 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
.item()
) * 1e7

print(f' {"kinetic energy":19s} {ejecta_ke_erg:.2e} [erg]')
print(f" {'kinetic energy':19s} {ejecta_ke_erg:.2e} [erg]")

mass_msun_rho = dfmodel.select(pl.col("mass_g").sum() / msun_g).collect().item()

Expand All @@ -203,18 +203,18 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
)

print(
f' {"initial energy":19s} {initial_energy_mapped:.3e} erg (when mapped to'
f" {'initial energy':19s} {initial_energy_mapped:.3e} erg (when mapped to"
f" {ncoordgridx}^3 cubic grid, error"
f" {100 * (initial_energy_mapped / initial_energy - 1):.2f}%)"
)

mtot_mapped_msun = sum(cellmass_mapped) / msun_g
print(
f' {"M_tot_rho_map":19s} {mtot_mapped_msun:7.5f} MSun (density * volume when mapped to {ncoordgridx}^3'
f" {'M_tot_rho_map':19s} {mtot_mapped_msun:7.5f} MSun (density * volume when mapped to {ncoordgridx}^3"
f" cubic grid, error {100 * (mtot_mapped_msun / mass_msun_rho - 1):.2f}%)"
)

print(f' {"M_tot_rho":19s} {mass_msun_rho:7.5f} MSun (density * volume)')
print(f" {'M_tot_rho':19s} {mass_msun_rho:7.5f} MSun (density * volume)")

if modelmeta["dimensions"] > 1:
corner_mass = (
Expand All @@ -225,7 +225,7 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
.item()
) / msun_g
print(
f' {"M_corners":19s} {corner_mass:7.5f} MSun ('
f" {'M_corners':19s} {corner_mass:7.5f} MSun ("
f" {100 * corner_mass / mass_msun_rho:.2f}% of M_tot in cells with v_r_mid > vmax)"
)

Expand Down Expand Up @@ -273,28 +273,28 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
mass_msun_actinides += species_mass_msun

print(
f' {"M_tot_elem":19s} {mass_msun_elem:7.5f} MSun ({mass_msun_elem / mass_msun_rho * 100:6.2f}% of M_tot_rho)'
f" {'M_tot_elem':19s} {mass_msun_elem:7.5f} MSun ({mass_msun_elem / mass_msun_rho * 100:6.2f}% of M_tot_rho)"
)

if args.isotopes:
print(
f' {"M_tot_iso":19s} {mass_msun_isotopes:7.5f} MSun ({mass_msun_isotopes / mass_msun_rho * 100:6.2f}% '
f" {'M_tot_iso':19s} {mass_msun_isotopes:7.5f} MSun ({mass_msun_isotopes / mass_msun_rho * 100:6.2f}% "
"of M_tot_rho, but can be < 100% if stable isotopes not tracked)"
)

mass_msun_fegroup = dfmodel.select(pl.col("X_Fegroup").dot(pl.col("mass_g"))).collect().item() / msun_g
print(
f' {"M_Fegroup":19s} {mass_msun_fegroup:7.5f} MSun'
f" {'M_Fegroup':19s} {mass_msun_fegroup:7.5f} MSun"
f" ({mass_msun_fegroup / mass_msun_rho * 100:6.2f}% of M_tot_rho)"
)

print(
f' {"M_lanthanide_isosum":19s} {mass_msun_lanthanides:7.5f} MSun'
f" {'M_lanthanide_isosum':19s} {mass_msun_lanthanides:7.5f} MSun"
f" ({mass_msun_lanthanides / mass_msun_rho * 100:6.2f}% of M_tot_rho)"
)

print(
f' {"M_actinide_isosum":19s} {mass_msun_actinides:7.5f} MSun'
f" {'M_actinide_isosum':19s} {mass_msun_actinides:7.5f} MSun"
f" ({mass_msun_actinides / mass_msun_rho * 100:6.2f}% of M_tot_rho)"
)

Expand Down
4 changes: 2 additions & 2 deletions artistools/inputmodel/energyinputfiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
def write_energydistribution_file(energydistdata, outputfilepath="."):
print("Writing energydistribution.txt")
with Path(outputfilepath, "energydistribution.txt").open("w", encoding="utf-8") as fmodel:
fmodel.write(f'{len(energydistdata["cell_energy"])}\n') # write number of points
fmodel.write(f"{len(energydistdata['cell_energy'])}\n") # write number of points
energydistdata.to_csv(fmodel, header=False, sep="\t", index=False, float_format="%g")


def write_energyrate_file(energy_rate_data, outputfilepath="."):
print("Writing energyrate.txt")
with Path(outputfilepath, "energyrate.txt").open("w", encoding="utf-8") as fmodel:
fmodel.write(f'{len(energy_rate_data["times"])}\n') # write number of points
fmodel.write(f"{len(energy_rate_data['times'])}\n") # write number of points
energy_rate_data.to_csv(fmodel, sep="\t", index=False, header=False, float_format="%.10f")


Expand Down
8 changes: 4 additions & 4 deletions artistools/inputmodel/inputmodel_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,9 +311,9 @@ def vectormatch(vec1: list[float], vec2: list[float]) -> bool:
if not vectormatch(pos3_in, [pos_z_mid, pos_y_mid, pos_x_mid]):
matched_pos_zyx_mid = False

assert (
sum((matched_pos_xyz_min, matched_pos_zyx_min, matched_pos_xyz_mid, matched_pos_zyx_mid)) == 1
), "one option must match uniquely"
assert sum((matched_pos_xyz_min, matched_pos_zyx_min, matched_pos_xyz_mid, matched_pos_zyx_mid)) == 1, (
"one option must match uniquely"
)

colrenames = {}
if matched_pos_xyz_min:
Expand Down Expand Up @@ -934,7 +934,7 @@ def save_modeldata(
fmodel.write(f"{vmax:.4e}\n")

if customcols:
fmodel.write(f'#{" ".join(standardcols)} {" ".join(customcols)}\n')
fmodel.write(f"#{' '.join(standardcols)} {' '.join(customcols)}\n")

abundandcustomcols = [*[col for col in standardcols if col.startswith("X_")], *customcols]

Expand Down
2 changes: 1 addition & 1 deletion artistools/inputmodel/maptogrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def maptogrid(
assert isinstance(rmax, float)
with Path(outputfolderpath, "ejectapartanalysis.dat").open(mode="w", encoding="utf-8") as fpartanalysis:
fpartanalysis.writelines(
f'{part["dis"]} {part["h"]} {part["h"] / part["dis"]} {part["vrad"]} {part["vperp"]} {part["vtot"]}\n'
f"{part['dis']} {part['h']} {part['h'] / part['dis']} {part['vrad']} {part['vperp']} {part['vtot']}\n"
for part in dfsnapshot.select(["dis", "h", "vrad", "vperp", "vtot"]).iter_rows(named=True)
)

Expand Down
4 changes: 2 additions & 2 deletions artistools/inputmodel/modelfromhydro.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ def makemodelfromgriddata(
)

if dfelabundances is not None:
print(f'Writing to {Path(outputpath) / "abundances.txt"}...')
print(f"Writing to {Path(outputpath) / 'abundances.txt'}...")
at.inputmodel.save_initelemabundances(
dfelabundances=dfelabundances, outpath=outputpath, headercommentlines=modelmeta["headercommentlines"]
)
Expand All @@ -411,7 +411,7 @@ def makemodelfromgriddata(
if "tracercount" in dfmodel:
dfmodel = dfmodel.with_columns(pl.col("tracercount").cast(pl.Int32))

print(f'Writing to {Path(outputpath) / "model.txt"}...')
print(f"Writing to {Path(outputpath) / 'model.txt'}...")
at.inputmodel.save_modeldata(outpath=outputpath, dfmodel=dfmodel, modelmeta=modelmeta)


Expand Down
4 changes: 2 additions & 2 deletions artistools/inputmodel/opacityinputfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def opacity_by_Ye(outputfilepath, griddata):
griddata["opacity"] = cell_opacities

with Path(outputfilepath, "opacity.txt").open("w", encoding="utf-8") as fopacity:
fopacity.write(f'{len(griddata["inputcellid"])}\n')
fopacity.write(f"{len(griddata['inputcellid'])}\n")
griddata[["inputcellid", "opacity"]].to_csv(fopacity, sep="\t", index=False, header=False, float_format="%.10f")


Expand All @@ -59,7 +59,7 @@ def write_Ye_file(outputfilepath: Path | str, griddata: pd.DataFrame | pl.DataFr
assert griddata.schema["inputcellid"].is_integer()

with Path(outputfilepath, "Ye.txt").open("w", encoding="utf-8") as fYe:
fYe.write(f'{len(griddata["inputcellid"])}\n')
fYe.write(f"{len(griddata['inputcellid'])}\n")
griddata.to_pandas(use_pyarrow_extension_array=True)[["inputcellid", "cellYe"]].to_csv(
fYe, sep="\t", index=False, header=False, float_format="%.10f", na_rep="0.0"
)
Expand Down
4 changes: 2 additions & 2 deletions artistools/inputmodel/shen2018.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
v_inner = v_outer
m_enc_inner = m_enc_outer

print(f'M_tot = {m_enc_outer / u.solMass.to("g"):.3f} solMass')
print(f'M_Ni56 = {tot_ni56mass / u.solMass.to("g"):.3f} solMass')
print(f"M_tot = {m_enc_outer / u.solMass.to('g'):.3f} solMass")
print(f"M_Ni56 = {tot_ni56mass / u.solMass.to('g'):.3f} solMass")

at.save_modeldata(dfmodel=dfmodel, t_model_init_days=t_model_init_days, outpath=args.outputpath)
at.inputmodel.save_initelemabundances(dfelabundances, outpath=args.outputpath)
Expand Down
6 changes: 3 additions & 3 deletions artistools/inputmodel/test_inputmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,9 +99,9 @@ def verify_file_checksums(checksums_expected: dict, digest: str = "sha256", fold

for filename, checksum_expected in checksums_expected.items():
fullpath = Path(folder) / filename
assert (
checksums_actual[fullpath] == checksum_expected
), f"{folder}/{filename} checksum mismatch. Expecting {checksum_expected} but calculated {checksums_actual[fullpath]}"
assert checksums_actual[fullpath] == checksum_expected, (
f"{folder}/{filename} checksum mismatch. Expecting {checksum_expected} but calculated {checksums_actual[fullpath]}"
)


def test_makeartismodelfrom_sph_particles() -> None:
Expand Down
4 changes: 2 additions & 2 deletions artistools/inputmodel/to_tardis.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ def main(args: argparse.Namespace | None = None, argsraw: t.Sequence[str] | None
# fileout.write(f'{0.},{0.:.4e},{0.},{0.},{",".join([f"{0.:.4e}" for _ in listspecies])}\n')

for cell in dfmodel.itertuples(index=False):
abundlist = [f'{getattr(cell, f"X_{strnuc}"):.4e}' for strnuc in listspecies]
abundlist = [f"{getattr(cell, f'X_{strnuc}'):.4e}" for strnuc in listspecies]
fileout.write(
f'{cell.vel_r_max_kmps},{cell.rho:.4e},{temperature},{dilution_factor},{",".join(abundlist)}\n'
f"{cell.vel_r_max_kmps},{cell.rho:.4e},{temperature},{dilution_factor},{','.join(abundlist)}\n"
)

print(f"Saved {outputfilepath}")
Expand Down
2 changes: 1 addition & 1 deletion artistools/linefluxes.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,7 +766,7 @@ def make_emitting_regions_plot(args: argparse.Namespace) -> None:
emdata = emdata_all[modelindex][tmid, feature.colname]

if not bars:
print(f' {len(emdata["em_log10nne"])} points plotted for {feature.featurelabel}')
print(f" {len(emdata['em_log10nne'])} points plotted for {feature.featurelabel}")

serieslabel = (
(modellabel + " " + feature.featurelabel)
Expand Down
2 changes: 1 addition & 1 deletion artistools/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,7 @@ def merge_pdf_files(pdf_files: list[str]) -> None:
merger.append(pdffile) # type: ignore[attr-defined]
Path(pdfpath).unlink()

resultfilename = f'{pdf_files[0].replace(".pdf", "")}-{pdf_files[-1].replace(".pdf", "")}'
resultfilename = f"{pdf_files[0].replace('.pdf', '')}-{pdf_files[-1].replace('.pdf', '')}"
with Path(f"{resultfilename}.pdf").open("wb") as resultfile:
merger.write(resultfile) # type: ignore[attr-defined]

Expand Down
2 changes: 1 addition & 1 deletion artistools/nltepops/nltepops.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def texifyconfiguration(levelname: str) -> str:
strout = strout.replace(strorbitalocc, strorbitalocctex)

for parentterm in re.findall(r"\([0-9][A-Z][^)]?\)", strout):
parentermtex = f'({texifyterm(parentterm.strip("()"))})'
parentermtex = f"({texifyterm(parentterm.strip('()'))})"
strout = strout.replace(parentterm, parentermtex)
strterm = levelname.split("_")[-1]
strout += " " + texifyterm(strterm)
Expand Down
2 changes: 1 addition & 1 deletion artistools/packets/packets.py
Original file line number Diff line number Diff line change
Expand Up @@ -819,7 +819,7 @@ def make_3d_histogram_from_packets(
# print(dfpackets[['emission_velocity', 'em_velx', 'em_vely', 'em_velz']])
# select only type escape and type r-pkt (don't include gamma-rays)
dfpackets = dfpackets.query(
f'type_id == {type_ids["TYPE_ESCAPE"]} and escape_type_id == {type_ids["TYPE_RPKT"]}'
f"type_id == {type_ids['TYPE_ESCAPE']} and escape_type_id == {type_ids['TYPE_RPKT']}"
)
if em_time:
dfpackets = dfpackets.query("@timeminarray[@timestep_min] < em_time/@DAY < @timemaxarray[@timestep_max]")
Expand Down
2 changes: 1 addition & 1 deletion artistools/plotspherical.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def plot_spherical(
.sort(["costhetabin", "phibin"])
).collect()

print(f'packets plotted: {alldirbins.select("count").sum().item(0, 0):.1e}')
print(f"packets plotted: {alldirbins.select('count').sum().item(0, 0):.1e}")

# these phi and theta angle ranges are defined differently to artis
phigrid = np.linspace(-np.pi, np.pi, nphibins + 1, endpoint=True, dtype=np.float64)
Expand Down
2 changes: 1 addition & 1 deletion artistools/spectra/sampleblackbodyfrompacket_tr.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def planck(nu, temperature):
for npacketfile in range(nprocs):
dfpackets = at.packets.readfile(packetsfiles[npacketfile]) # , type='TYPE_ESCAPE', escape_type='TYPE_RPKT')
dfpackets = at.packets.bin_packet_directions(modelpath, dfpackets)
dfpackets = dfpackets.query(f'type_id == {type_ids["TYPE_ESCAPE"]} and escape_type_id == {type_ids["TYPE_RPKT"]}')
dfpackets = dfpackets.query(f"type_id == {type_ids['TYPE_ESCAPE']} and escape_type_id == {type_ids['TYPE_RPKT']}")

# print(max(dfpackets['t_arrive_d']))
# print(dfpackets)
Expand Down
2 changes: 1 addition & 1 deletion artistools/spectra/spectra.py
Original file line number Diff line number Diff line change
Expand Up @@ -1258,7 +1258,7 @@ def print_integrated_flux(
lambda_max = arr_lambda_angstroms.max()
assert isinstance(lambda_min, int | float)
assert isinstance(lambda_max, int | float)
print(f" integrated flux ({lambda_min:.1f} to " f"{lambda_max:.1f} A): {integrated_flux:.3e} erg/s/cm2")
print(f" integrated flux ({lambda_min:.1f} to {lambda_max:.1f} A): {integrated_flux:.3e} erg/s/cm2")
assert isinstance(integrated_flux, float)
return integrated_flux

Expand Down
10 changes: 5 additions & 5 deletions artistools/writecomparisondata.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def write_spectra(modelpath: str | Path, model_id: str, selected_timesteps: t.Se
with outfilepath.open("w", encoding="utf-8") as outfile:
outfile.write(f"#NTIMES: {len(selected_timesteps)}\n")
outfile.write(f"#NWAVE: {len(lambdas)}\n")
outfile.write(f'#TIMES[d]: {" ".join([f"{times[ts]:.2f}" for ts in selected_timesteps])}\n')
outfile.write(f"#TIMES[d]: {' '.join([f'{times[ts]:.2f}' for ts in selected_timesteps])}\n")
outfile.write("#wavelength[Ang] flux_t0[erg/s/Ang] flux_t1[erg/s/Ang] ... flux_tn[erg/s/Ang]\n")

for n in reversed(range(len(lambdas))):
Expand All @@ -53,7 +53,7 @@ def write_ntimes_nvel(outfile: TextIOWrapper, selected_timesteps: t.Sequence[int
_, modelmeta = at.inputmodel.get_modeldata(modelpath, getheadersonly=True)
outfile.write(f"#NTIMES: {len(selected_timesteps)}\n")
outfile.write(f"#NVEL: {modelmeta['npts_model']}\n")
outfile.write(f'#TIMES[d]: {" ".join([f"{times[ts]:.2f}" for ts in selected_timesteps])}\n')
outfile.write(f"#TIMES[d]: {' '.join([f'{times[ts]:.2f}' for ts in selected_timesteps])}\n")


def write_single_estimator(modelpath, selected_timesteps, estimators, allnonemptymgilist, outfile, keyname) -> None:
Expand Down Expand Up @@ -103,12 +103,12 @@ def write_ionfracts(
with pathfileout.open("w", encoding="utf-8") as f:
f.write(f"#NTIMES: {len(selected_timesteps)}\n")
f.write(f"#NSTAGES: {nions}\n")
f.write(f'#TIMES[d]: {" ".join([f"{times[ts]:.2f}" for ts in selected_timesteps])}\n')
f.write(f"#TIMES[d]: {' '.join([f'{times[ts]:.2f}' for ts in selected_timesteps])}\n")
f.write("#\n")
for timestep in selected_timesteps:
f.write(f"#TIME: {times[timestep]:.2f}\n")
f.write(f"#NVEL: {len(allnonemptymgilist)}\n")
f.write(f'#vel_mid[km/s] {" ".join([f"{elsymb.lower()}{ion}" for ion in range(nions)])}\n')
f.write(f"#vel_mid[km/s] {' '.join([f'{elsymb.lower()}{ion}' for ion in range(nions)])}\n")
for modelgridindex, cell in modeldata.iterrows():
if modelgridindex not in allnonemptymgilist:
continue
Expand All @@ -134,7 +134,7 @@ def write_phys(modelpath, model_id, selected_timesteps, estimators, allnonemptym
modeldata, modelmeta = at.inputmodel.get_modeldata(modelpath, derived_cols=["vel_r_min_kmps"])
with Path(outputpath, f"phys_{model_id}_artisnebular.txt").open("w", encoding="utf-8") as f:
f.write(f"#NTIMES: {len(selected_timesteps)}\n")
f.write(f'#TIMES[d]: {" ".join([f"{times[ts]:.2f}" for ts in selected_timesteps])}\n')
f.write(f"#TIMES[d]: {' '.join([f'{times[ts]:.2f}' for ts in selected_timesteps])}\n")
f.write("#\n")
for timestep in selected_timesteps:
f.write(f"#TIME: {times[timestep]:.2f}\n")
Expand Down
6 changes: 3 additions & 3 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ def pytest_configure(config):
assert isinstance(repopath, Path)
if outputpath.exists():
is_descendant = repopath.resolve() in outputpath.resolve().parents
assert (
is_descendant
), f"Refusing to delete {outputpath.resolve()} as it is not a descendant of the repository {repopath.resolve()}"
assert is_descendant, (
f"Refusing to delete {outputpath.resolve()} as it is not a descendant of the repository {repopath.resolve()}"
)
shutil.rmtree(outputpath, ignore_errors=True)
outputpath.mkdir(exist_ok=True)

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pytest-codspeed>=2.2.1
pytest-cov>=5.0.0
pytest-xdist[psutil]>=3.6.1
PyYAML>=6.0.2
ruff>=0.7.0
ruff>=0.7.1
scipy>=1.14.1
setuptools_scm[toml]>=8.1.0
tabulate>=0.9
Expand Down