From 1b5fdbd9b777bf6e866adc0eaba680035ddc23c2 Mon Sep 17 00:00:00 2001 From: UltralyticsAssistant Date: Thu, 5 Sep 2024 11:03:14 +0000 Subject: [PATCH] Auto-format by https://ultralytics.com/actions --- plots.py | 4 ++-- utils/MSV.py | 4 ++-- utils/NLS.py | 6 +++--- utils/vid2images.py | 2 +- vidExample.py | 6 +++--- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/plots.py b/plots.py index e9a2aee..3f68c04 100644 --- a/plots.py +++ b/plots.py @@ -85,7 +85,7 @@ def plotresults(cam, im, P, S, B, bbox): plot_height=300, x_axis_label="image", y_axis_label="distance (m)", - title="Distance = %.2fm in %.3fs" % (S[-1, 7], S[-1, 5] - S[0, 5]), + title=f"Distance = {S[-1, 7]:.2f}m in {S[-1, 5] - S[0, 5]:.3f}s", tools="save,reset,hover", active_inspect="hover", ) @@ -98,7 +98,7 @@ def plotresults(cam, im, P, S, B, bbox): plot_height=300, x_axis_label="image", y_axis_label="speed (km/h)", - title="Speed = %.2f +/- %.2f km/h" % (S[1:, 8].mean(), S[1:, 8].std()), + title=f"Speed = {S[1:, 8].mean():.2f} +/- {S[1:, 8].std():.2f} km/h", tools="save,reset,hover", active_inspect="hover", ) diff --git a/utils/MSV.py b/utils/MSV.py index a6dfe3a..5037ba9 100644 --- a/utils/MSV.py +++ b/utils/MSV.py @@ -84,7 +84,7 @@ def fcnMSV2_t(K, P, B, vg, i): # solves for 1 camera translation JT = (JT - zhat) / dx JTJ = JT @ JT.T # J.T @ J delta = np.linalg.inv(JTJ + mdm) @ JT @ residual * min(((i + 1) * 0.01) ** 2, 1) - print("%g: f=%g, x=%s" % (i, rms(z - zhat), rms(delta))) + print(f"{i:g}: f={rms(z - zhat):g}, x={rms(delta)}") x = x + delta if rms(delta) < 1e-8: break @@ -276,7 +276,7 @@ def grad_func(x, u0, U, K, z): # calculates the gradient v_cap = v / (1 - (beta_2**i)) # calculates the bias-corrected estimates delta = (alpha * m_cap) / (v_cap**0.5 + epsilon) x = x - delta # updates the parameters - print("Residual %g, Params: %s" % (r[i], x[:])) + print(f"Residual {r[i]:g}, Params: {x[:]}") xi[i] = x if rms(delta) < 1e-5: # convergence check break diff --git a/utils/NLS.py b/utils/NLS.py index 8735ecb..9971948 100644 --- a/utils/NLS.py +++ b/utils/NLS.py @@ -234,12 +234,12 @@ def fzKautograd_batch(x, K, nc, nt): # for autograd delta = np.linalg.inv(JT @ JT.T + mdm) @ JT @ (z - zhat) * 0.9 x = x + delta # x[nt * 3:nt * 3 + nc * 3] *= range_cal / norm(x[nt * 3:nt * 3 + 3]) # calibrate scale - print("%g: %.3fs, f=%g, x=%s" % (i, time.time() - tic, rms(z - zhat), rms(delta))) + print(f"{i:g}: {time.time() - tic:.3f}s, f={rms(z - zhat):g}, x={rms(delta)}") if rms(delta) < 1e-7: break else: print("WARNING: fcnNLS_batch() reaching max iterations!") - print("fcnNLS_batch done in %g steps, %.3fs, f=%g" % (i, time.time() - tic, rms(z - zhat))) + print(f"fcnNLS_batch done in {i:g} steps, {time.time() - tic:.3f}s, f={rms(z - zhat):g}") j = nt * 3 pw = x[:j].reshape(nt, 3) @@ -313,7 +313,7 @@ def fzKautograd_batch(x, K, nc, nt): # for autograd break else: print("WARNING: fcnNLS_batch() reaching max iterations!") - print("fcnNLS_batch2 done in %g steps, %.3fs, f=%g" % (i, time.time() - tic, rms(z - zhat))) + print(f"fcnNLS_batch2 done in {i:g} steps, {time.time() - tic:.3f}s, f={rms(z - zhat):g}") j = nt * 3 sc = np.zeros((nc, 3)) diff --git a/utils/vid2images.py b/utils/vid2images.py index 81deca8..aef9ffd 100644 --- a/utils/vid2images.py +++ b/utils/vid2images.py @@ -18,7 +18,7 @@ cap.set(1, i) success, im = cap.read() # read frame if success: - print("image %g/%g ..." % (cap.get(cv2.CAP_PROP_POS_FRAMES), cap.get(cv2.CAP_PROP_FRAME_COUNT))) + print(f"image {cap.get(cv2.CAP_PROP_POS_FRAMES):g}/{cap.get(cv2.CAP_PROP_FRAME_COUNT):g} ...") cv2.imwrite(newdir + str(i) + ".jpg", im) else: cap.release() diff --git a/vidExample.py b/vidExample.py index cbd1418..b1a81e9 100644 --- a/vidExample.py +++ b/vidExample.py @@ -162,7 +162,7 @@ def vidExamplefcn(): # Print image[i] results proc_dt[i] = time.time() - tic S[i, :] = (i, proc_dt[i], vg.sum(), residuals, dt, B[i, 12] - t0, dr, r, dr / dt * 3.6) - print("%13g%13.3f%13g%13.3f%13.3f%13.3f%13.2f%13.2f%13.1f" % tuple(S[i, :])) + print("{:13g}{:13.3f}{:13g}{:13.3f}{:13.3f}{:13.3f}{:13.2f}{:13.2f}{:13.1f}".format(*tuple(S[i, :]))) # imrgb = cv2.cvtColor(imbgr,cv2.COLOR_BGR2RGB) # plots.imshow(cv2.cvtColor(imrgb,cv2.COLOR_BGR2HSV_FULL)[:,:,0]) @@ -174,8 +174,8 @@ def vidExamplefcn(): cap.release() # Release the video capture object dta = time.time() - cput0 - print("\nSpeed = %.2f +/- %.2f km/h\nRes = %.3f pixels" % (S[1:, 8].mean(), S[1:, 8].std(), S[1:, 3].mean())) - print("Processed %g images: %s in %.2fs (%.2ffps)\n" % (n, frames[:], dta, n / dta)) + print(f"\nSpeed = {S[1:, 8].mean():.2f} +/- {S[1:, 8].std():.2f} km/h\nRes = {S[1:, 3].mean():.3f} pixels") + print(f"Processed {n:g} images: {frames[:]} in {dta:.2f}s ({n / dta:.2f}fps)\n") plots.plotresults(cam, im // 2 + imfirst // 2, P, S, B, bbox=boxb) # // is integer division