-
Notifications
You must be signed in to change notification settings - Fork 45
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' into insert_hg_2pt34
- Loading branch information
Showing
3 changed files
with
200 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,197 @@ | ||
#!/usr/bin/env python3 | ||
|
||
# SPDX-License-Identifier: LGPL-3.0-or-later | ||
# Copyright (C) 2024 Chao Peng | ||
''' | ||
A script to plot raw data output from the script g4MaterialScan_to_csv | ||
''' | ||
|
||
import os | ||
import re | ||
import argparse | ||
import pandas as pd | ||
import numpy as np | ||
|
||
import matplotlib as mpl | ||
from matplotlib import pyplot as plt | ||
from matplotlib.collections import LineCollection, PatchCollection | ||
from matplotlib.patches import Wedge | ||
from mpl_toolkits.axes_grid1 import make_axes_locatable | ||
|
||
|
||
''' | ||
A helper function to convert a string (<min>[:<max>[:<step>]]) to an array | ||
''' | ||
def args_array(arg, step=1, include_end=True): | ||
vals = [float(x.strip()) for x in arg.split(':')] | ||
# empty or only one value | ||
if len(vals) < 2: | ||
return np.array(vals) | ||
# has step input | ||
if len(vals) > 2: | ||
step = vals[2] | ||
# inclusion of the endpoint (max) | ||
if include_end: | ||
vals[1] += step | ||
return np.arange(vals[0], vals[1], step) | ||
|
||
|
||
def eta2theta(v_eta): | ||
return 2.*np.arctan(np.exp(-v_eta)) | ||
|
||
|
||
if __name__ == '__main__': | ||
parser = argparse.ArgumentParser( | ||
prog='g4MaterialScan_raw_plot', | ||
description = 'A python script to draw 2d plot of material thickness from raw outputs of g4MaterialScan_to_csv.' | ||
) | ||
parser.add_argument( | ||
'--path-format', default=r'scan_raw_eta={eta:.3f}_phi={phi:.3f}.csv', | ||
help='modify the path format, default is \"scan_raw_eta={eta:.3f}_phi={phi:.3f}.csv\".' | ||
) | ||
parser.add_argument( | ||
'--eta', default='-4.0:4.0:0.1', | ||
help='Eta range, in the format of \"<min>[:<max>[:<step>]]\".' | ||
) | ||
parser.add_argument( | ||
'--eta-values', default=None, | ||
help='a list of eta values, separated by \",\", this option overwrites --eta.' | ||
) | ||
parser.add_argument( | ||
'--phi', default='0', type=float, | ||
help='A single phi value to lookup for the data files.' | ||
) | ||
parser.add_argument( | ||
'--path-lengths', default="0, 180, 600", | ||
help='path length points, separated by \",\".' | ||
) | ||
parser.add_argument( | ||
'--sep', default='\t', | ||
help='Seperator for the CSV file.' | ||
) | ||
parser.add_argument( | ||
'--z-max', type=float, default=600, | ||
help='maximum z (x-axis) of the plot.' | ||
) | ||
parser.add_argument( | ||
'--z-min', type=float, default=-600, | ||
help='minimum z (x-axis) of the plot.' | ||
) | ||
parser.add_argument( | ||
'--r-max', type=float, default=600, | ||
help='maximum r (y-axis) of the plot.' | ||
) | ||
parser.add_argument( | ||
'--r-min', type=float, default=0, | ||
help='minimum r (y-axis) of the plot.' | ||
) | ||
parser.add_argument( | ||
'--x0-max', type=float, default=None, | ||
help='maximum x0 of the plot.' | ||
) | ||
parser.add_argument( | ||
'--x0-min', type=float, default=None, | ||
help='minimum x0 of the plot.' | ||
) | ||
parser.add_argument( | ||
'--lambda-max', type=float, default=None, | ||
help='maximum lambda of the plot.' | ||
) | ||
parser.add_argument( | ||
'--lambda-min', type=float, default=None, | ||
help='minimum lambda of the plot.' | ||
) | ||
parser.add_argument( | ||
'--plot-scale', type=str, default='log', | ||
help='only support \"log\" or \"linear\" scale.' | ||
) | ||
parser.add_argument( | ||
'--output-path', type=str, default='mat_scan_2D.{output_format}', | ||
help='path of the output plot, it supports the string format with inputs of the arguments.' | ||
) | ||
parser.add_argument( | ||
'--output-format', type=str, default='png', | ||
help='format of the plots, default is png, eps is also recommended (vectorized graphics).' | ||
) | ||
args = parser.parse_args() | ||
|
||
# get the path length points | ||
pls = np.array([float(x.strip()) for x in args.path_lengths.split(',')]) | ||
if len(pls) < 2: | ||
print('Need at least two points in --path-lengths') | ||
exit(1) | ||
|
||
if args.eta_values is not None: | ||
etas = np.array([float(xval.strip()) for xval in args.eta_values.split(',')]) | ||
else: | ||
etas = args_array(args.eta) | ||
|
||
norm = None | ||
if args.plot_scale.lower() == 'linear': | ||
norm = mpl.colors.Normalize | ||
elif args.plot_scale.lower() == 'log': | ||
norm = mpl.colors.LogNorm | ||
else: | ||
print('Error: unsupported plot scale {}, please choose it from [log, linear].'.format(args.plot_scale)) | ||
exit(1) | ||
|
||
phi = args.phi | ||
zmin, zmax = args.z_min, args.z_max | ||
rmin, rmax = args.r_min, args.r_max | ||
|
||
# read raw output data, collect information | ||
patches, x0_array, lmd_array = [], [], [] | ||
thetas = eta2theta(etas) | ||
th_diffs = np.hstack([0., -np.diff(thetas)/2., 0.]) | ||
th_mins = thetas - th_diffs[:-1] | ||
th_maxes = thetas + th_diffs[1:] | ||
|
||
# iterate every eta (theta) scan data | ||
for i, (eta, theta, th_min, th_max) in enumerate(zip(etas, thetas, th_mins, th_maxes)): | ||
# read data file | ||
dpath = args.path_format.format(eta=eta, phi=phi) | ||
if not os.path.exists(dpath): | ||
print('Error: cannot find data file \"{}\", please check the path.'.format(dpath)) | ||
exit(1) | ||
df = pd.read_csv(args.path_format.format(eta=eta, phi=phi), sep=args.sep, index_col=0) | ||
pls = df['path_length'].values | ||
x0s = df['X0'].cumsum().values | ||
lmds = df['lambda'].cumsum().values | ||
# a virtual bin size for the scan (fill all the 2D phase space) | ||
angle_min = th_min/np.pi*180. | ||
angle_max = th_max/np.pi*180. | ||
# determine if the lines are in range | ||
# segments of each scan (assuming start from 0) | ||
for seg_start, seg_end, x0, lmd in zip(np.hstack([0., pls[:-1]]), pls, x0s, lmds): | ||
# start point is already out of the plot range | ||
z0, r0 = np.cos(theta)*seg_start, np.sin(theta)*seg_start | ||
in_range = (z0 <= zmax) & (z0 >= zmin) & (r0 <= rmax) & (r0 >= rmin) | ||
if not in_range: | ||
continue | ||
x0_array.append(x0) | ||
lmd_array.append(lmd) | ||
width = seg_end - seg_start | ||
patches.append(Wedge((0., 0.), seg_end, angle_min, angle_max, width=width)) | ||
|
||
# generate plots | ||
cmap = mpl.colormaps['viridis'] | ||
plot_meta = [ | ||
('Cumulative X0', x0_array, dict(cmap=cmap, norm=norm(vmin=args.x0_min, vmax=args.x0_max))), | ||
('Cumulative $\Lambda$', lmd_array, dict(cmap=cmap, norm=norm(vmin=args.lambda_min, vmax=args.lambda_max))), | ||
] | ||
|
||
fig, axs = plt.subplots(len(plot_meta), 1, figsize=(10, 4*len(plot_meta)), dpi=600) | ||
for ax, (zlabel, data, p_kwargs) in zip(axs.flat, plot_meta): | ||
ax.set_xlim(zmin, zmax) | ||
ax.set_ylim(rmin, rmax) | ||
p = PatchCollection(patches, **p_kwargs) | ||
p.set_array(data) | ||
ax.add_collection(p) | ||
ax.set_xlabel('Z [cm]') | ||
ax.set_ylabel('R [cm] ($\phi = {}^{{\circ}}$)'.format(phi)) | ||
# color bar | ||
divider = make_axes_locatable(ax) | ||
cax = divider.append_axes('right', size='3%', pad=0.05) | ||
cbar = fig.colorbar(p, cax=cax, orientation='vertical') | ||
cbar.ax.set_ylabel(zlabel, rotation=90) | ||
fig.savefig(args.output_path.format(**vars(args)), format=args.output_format) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters