Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Roman_Rubin Updates #242

Merged
merged 3 commits into from
Dec 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions batch_scripts/slurm_example_parallel.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
#SBATCH -A m1727
#SBATCH -C cpu
#SBATCH -q regular
#SBATCH -t 8:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=8
#SBATCH --cpus-per-task=32

export OMP_NUM_THREADS=32

./run_master.sh -p /global/cfs/projectdirs/lsst/groups/SRV/gcr-catalogs -c skysim5000_v1.2 -t tpcf_Wang2013_rSDSS_jack
13 changes: 13 additions & 0 deletions batch_scripts/slurm_example_serial.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash
#SBATCH -A m1727
#SBATCH -C cpu
#SBATCH -q regular
#SBATCH -t 8:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=128

export OMP_NUM_THREADS=32

./run_master.sh -c skysim5000_v1.2 -t Nz_r_DEEP2_JAN_fig

12 changes: 12 additions & 0 deletions batch_scripts/slurm_example_serial_DSigma.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
#SBATCH -A m1727
#SBATCH -C cpu
#SBATCH -q regular
#SBATCH -t 8:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=128

export OMP_NUM_THREADS=32

./run_master.sh -c skysim5000_v1.2_image -t delta_sigma_sdss_lowz
12 changes: 12 additions & 0 deletions batch_scripts/slurm_example_serial_shear.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
#SBATCH -A m1727
#SBATCH -C cpu
#SBATCH -q regular
#SBATCH -t 8:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=128

export OMP_NUM_THREADS=32

./run_master.sh -c skysim5000_v1.2 -t shear
12 changes: 12 additions & 0 deletions batch_scripts/slurm_example_serial_tpcf_color.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
#SBATCH -A m1727
#SBATCH -C cpu
#SBATCH -q regular
#SBATCH -t 8:00:00
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --cpus-per-task=128

export OMP_NUM_THREADS=32

./run_master.sh -c skysim5000_v1.2_image -t tpcf_Zehavi2011_color_fig
2 changes: 1 addition & 1 deletion descqa/CheckColors.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class kernelCompare:
def __init__(self,D1, D2):
self._D1 = D1
self._D2 = D2
self._XY = np.vstack((D1, D2))
self._XY = np.vstack(list(D1, D2))
self._scale = self._computeScale(self._XY)
self._n1 = len(D1)
self._n2 = len(D2)
Expand Down
2 changes: 1 addition & 1 deletion descqa/DeltaSigmaTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
gt = gt/counts

outfile = os.path.join(output_dir, 'DS_'+str(self.data)+'_'+str(i)+'.dat')
np.savetxt(outfile, np.vstack((rp.value, gt.value)).T)
np.savetxt(outfile, np.vstack(list(rp.value, gt.value)).T)


if self.data == 'sdss_lowz':
Expand Down
54 changes: 34 additions & 20 deletions descqa/EllipticityDistribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class EllipticityDistribution(BaseValidationTest):
'band_Mag': ['V', 'r', 'g'],
'zlo': 0.0,
'zhi': 2.0,
'definition': 'e_squared',
'definition': 'e_distortion',
'morphology': ('LRG', 'early', 'disk', 'late'),
'filename_template': 'ellipticity/COSMOS/joachimi_et_al_2013/{}{}_{}.dat',
'file-info': {
Expand All @@ -46,22 +46,27 @@ class EllipticityDistribution(BaseValidationTest):
'disk':{'B/T_min':0., 'B/T_max':0.2, 'mag_lo':24., 'Mag_hi':-21., 'Mag_lo':-17.},
'late':{'B/T_min':0.4, 'B/T_max':0.7, 'mag_lo':24., 'Mag_hi':-21., 'Mag_lo':-17.},
'irregular':{'B/T_min':0.0, 'B/T_max':1.0},
'ancillary_quantities':['bulge_to_total_ratio_i', 'bulge_to_total_ratio_stellar'],
'ancillary_quantities':['bulge_to_total_ratio_i', 'bulge_to_total_ratio_stellar',
'bulge_to_total_ratio'],
'ancillary_keys':['B/T'],
},
},
}

#define ellipticity functions
@staticmethod
def e_default(e):
def e_shear(e):
return e

@staticmethod
def e_squared(a, b):
def e_distortion(a, b):
q = b/a
return (1-q**2)/(1+q**2)

@staticmethod
def e_shear_to_distortion(e):
return 2*e/(1+e**2)

#plotting constants
lw2 = 2
fsize = 16
Expand All @@ -86,6 +91,8 @@ def __init__(self, z='redshift_true', zlo=0., zhi=2., N_ebins=40, observation=''
self.yfont_size = kwargs.get('yfont_size', 14)
self.legend_size = kwargs.get('legend_size', 6)
self.legend_title_size = kwargs.get('legend_title_size', 8)
self.catalog_ellipticity_definition = kwargs.get('ellipticity_definition', 'e_shear')
self.convert_to_distortion = kwargs.get('convert_to_distortion', False)

possible_mag_fields = ('mag_{}_lsst',
'mag_{}_sdss',
Expand All @@ -103,16 +110,16 @@ def __init__(self, z='redshift_true', zlo=0., zhi=2., N_ebins=40, observation=''
possible_native_luminosities = {'V':'otherLuminosities/totalLuminositiesStellar:V:rest',
}

possible_ellipticity_definitions = {'e_default':{'possible_quantities':[['ellipticity', 'ellipticity_true']],
'function':self.e_default,
'xaxis_label': r'$e = (1-q)/(1+q)$',
'file_label':'e',
},
'e_squared':{'possible_quantities':[['size', 'size_true'], ['size_minor', 'size_minor_true']],
'function':self.e_squared,
'xaxis_label': r'$e = (1-q^2)/(1+q^2)$',
'file_label':'e2',
},
possible_ellipticity_definitions = {'e_shear':{'possible_quantities':[['ellipticity', 'ellipticity_true']],
'function':self.e_shear,
'xaxis_label': r'$e = (1-q)/(1+q)$',
'file_label':'es',
},
'e_distortion':{'possible_quantities':[['size', 'size_true'], ['size_minor', 'size_minor_true']],
'function':self.e_distortion,
'xaxis_label': r'$e = (1-q^2)/(1+q^2)$',
'file_label':'ed',
},
}
#binning
self.N_ebins = N_ebins
Expand Down Expand Up @@ -150,10 +157,16 @@ def __init__(self, z='redshift_true', zlo=0., zhi=2., N_ebins=40, observation=''
self.Mag_hi = dict(zip(self.morphology, [self.validation_data.get('cuts', {}).get(m, {}).get('Mag_hi', Mag_hi) for m in self.morphology]))

#check for ellipticity definitions
self.possible_quantities = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_default')]['possible_quantities']
self.ellipticity_function = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_default')].get('function')
self.xaxis_label = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_default')].get('xaxis_label')
self.file_label = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_default')].get('file_label')
self.possible_quantities = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_shear')]['possible_quantities']
self.ellipticity_function = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_shear')].get('function')
self.xaxis_label = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_shear')].get('xaxis_label')
self.file_label = possible_ellipticity_definitions[self.validation_data.get('definition', 'e_shear')].get('file_label')

#check for ellipticity conversions and overwrite previous definitions
if self.convert_to_distortion and self.catalog_ellipticity_definition == 'e_shear':
self.possible_quantities = possible_ellipticity_definitions[self.catalog_ellipticity_definition]['possible_quantities']
self.ellipticity_function = self.e_shear_to_distortion
print('Converting from e_shear to e_distortion')

#check for native quantities
self.native_luminosities = dict(zip([band for band in possible_native_luminosities if band in self.band_Mag],\
Expand Down Expand Up @@ -235,6 +248,7 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
required_quantities.append(found_quantity)
if not catalog_instance.has_quantities(required_quantities + self.filter_quantities):
return TestResult(skipped=True, summary='Missing some required quantities: {}'.format(', '.join(required_quantities)))
print('Required quantities', required_quantities)
ancillary_quantity = None
if self.possible_ancillary_quantities is not None:
ancillary_quantity = catalog_instance.first_available(*self.possible_ancillary_quantities)
Expand Down Expand Up @@ -262,7 +276,7 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
fontsize=self.yfont_size) #setup a common axis label

#initialize arrays for storing histogram sums
N_array = np.zeros((self.nrows, self.ncolumns, len(self.ebins)-1), dtype=np.int)
N_array = np.zeros((self.nrows, self.ncolumns, len(self.ebins)-1), dtype=int)
sume_array = np.zeros((self.nrows, self.ncolumns, len(self.ebins)-1))
sume2_array = np.zeros((self.nrows, self.ncolumns, len(self.ebins)-1))

Expand Down Expand Up @@ -477,7 +491,7 @@ def save_quantities(keyname, results, filename, comment=''):
else:
fields = ('e_ave', keyname)
header = ', '.join(('Data columns are: <e>', keyname, ' '))
np.savetxt(filename, np.vstack((results[k] for k in fields)).T, fmt='%12.4e', header=header+comment)
np.savetxt(filename, np.vstack([results[k] for k in fields]).T, fmt='%12.4e', header=header+comment)


def conclude_test(self, output_dir):
Expand Down
22 changes: 15 additions & 7 deletions descqa/NumberDensityVersusRedshift.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,16 @@ def __init__(self, band='i', N_zbins=10, zlo=0., zhi=1.1,
# pylint: disable=W0231

#catalog quantities
self.truncate_cat_name = kwargs.get('truncate_cat_name', False)
self.truncate_cat_name = kwargs.get('truncate_cat_name', 0)
self.replace_cat_name = kwargs.get('replace_cat_name', {})
self.title_in_legend = kwargs.get('title_in_legend', False)
self.legend_location = kwargs.get('legend_location', 'upper left')
self.font_size = kwargs.get('font_size', 16)
self.legend_size = kwargs.get('legend_size', 10)
self.tick_size = kwargs.get('tick_size', 12)
self.adjust_ylim = kwargs.get('adjust_ylim', 1.3)
self.include_reference = kwargs.get('include_reference', False)
self.insert_line_break = kwargs.get('insert_line_break', True)
self.rest_frame = rest_frame
if self.rest_frame:
possible_mag_fields = ('Mag_true_{}_lsst_z0',
Expand Down Expand Up @@ -250,8 +252,11 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
filelabel = '_'.join((filtername, self.band))

#setup plots
if self.truncate_cat_name:
catalog_name = re.split('_', catalog_name)[0]
if self.truncate_cat_name > 0:
possible_names = re.split('_', catalog_name)
catalog_name = possible_names[0]
for n in range(1, self.truncate_cat_name):
catalog_name = '_'.join((catalog_name, possible_names[n]))
if self.replace_cat_name:
for k, v in self.replace_cat_name.items():
catalog_name = re.sub(k, v, catalog_name)
Expand All @@ -261,7 +266,7 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
catalog_marker = next(self.markers)

#initialize arrays for storing histogram sums
N_array = np.zeros((self.nrows, self.ncolumns, len(self.zbins)-1), dtype=np.int)
N_array = np.zeros((self.nrows, self.ncolumns, len(self.zbins)-1), dtype=int)
sumz_array = np.zeros((self.nrows, self.ncolumns, len(self.zbins)-1))

jackknife_data = {}
Expand Down Expand Up @@ -345,8 +350,11 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
Nerrors = np.sqrt(np.diag(covariance))

#make subplot
catalog_label = ' '.join((catalog_name, cut_label.replace(self.band, filtername + ' ' + self.band)))
validation_label = ' '.join((self.validation_data.get('label', ''), cut_label))
line_break = '\n' if self.insert_line_break else ''
catalog_label = ' '.join((catalog_name, line_break,
cut_label.replace(self.band, filtername + ' ' + self.band)))
val_label = self.validation_data.get('label', '') if self.include_reference else ''
validation_label = ' '.join((val_label, cut_label)) if val_label else cut_label
key = cut_label.replace('$', '').replace('\\leq', '<=')
results[key] = {'meanz': meanz, 'total':total, 'N':N, 'N+-':Nerrors}
self.catalog_subplot(ax_this, meanz, N, Nerrors, catalog_color, catalog_marker, catalog_label)
Expand Down Expand Up @@ -403,7 +411,7 @@ def get_jackknife_errors(self, N_jack, jackknife_data, N):
_, jack_labels, _ = k_means(n_clusters=N_jack, random_state=0, X=nn, n_init='auto')

#make histograms for jackknife regions
Njack_array = np.zeros((N_jack, len(self.zbins)-1), dtype=np.int)
Njack_array = np.zeros((N_jack, len(self.zbins)-1), dtype=int)
for nj in range(N_jack):
Njack_array[nj] = np.histogram(jackknife_data[self.zlabel][jack_labels != nj], self.zbins)[0]

Expand Down
4 changes: 2 additions & 2 deletions descqa/StellarMassFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
catalog_name = catalog_name.partition("_")[0]

#initialize arrays for storing histogram sums
N_array = np.zeros((self.nrows, self.ncolumns, len(self.Mbins)-1), dtype=np.int)
N_array = np.zeros((self.nrows, self.ncolumns, len(self.Mbins)-1), dtype=int)
sumM_array = np.zeros((self.nrows, self.ncolumns, len(self.Mbins)-1))
sumM2_array = np.zeros((self.nrows, self.ncolumns, len(self.Mbins)-1))

Expand Down Expand Up @@ -320,7 +320,7 @@ def save_quantities(keyname, results, filename, comment=''):
else:
fields = ('M'+keyname, keyname)
header = ', '.join(('Data columns are: <M>', keyname, ' '))
np.savetxt(filename, np.vstack((results[k] for k in fields)).T, fmt='%12.4e', header=header+comment)
np.savetxt(filename, np.vstack(list(results[k] for k in fields)).T, fmt='%12.4e', header=header+comment)


def conclude_test(self, output_dir):
Expand Down
18 changes: 12 additions & 6 deletions descqa/apparent_mag_func_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(self, band='r', band_lim=(24.0, 27.5), fractional_tol=0.4, observat
"""
# pylint: disable=super-init-not-called

self.truncate_cat_name = kwargs.get('truncate_cat_name', False)
self.truncate_cat_name = kwargs.get('truncate_cat_name', 0)
self.title_in_legend = kwargs.get('title_in_legend', False)
self.skip_label_detail = kwargs.get('skip_label_detail', False)
self.font_size = kwargs.get('font_size', 16)
Expand All @@ -52,6 +52,7 @@ def __init__(self, band='r', band_lim=(24.0, 27.5), fractional_tol=0.4, observat
self.print_title = kwargs.get('print_title', False)
self.min_mag = kwargs.get('min_mag', 19.)
self.replace_cat_name = kwargs.get('replace_cat_name', {})
self.exclude_sky_area = kwargs.get('exclude_sky_area', True)

# catalog quantities needed
possible_mag_fields = ('mag_{}_cModel',
Expand Down Expand Up @@ -206,14 +207,19 @@ def run_on_single_catalog(self, catalog_instance, catalog_name, output_dir):
upper_ax, lower_ax = fig.add_axes(upper_rect), fig.add_axes(lower_rect)

# plot on both this plot and any summary plots
if self.truncate_cat_name:
catalog_name = re.split('_', catalog_name)[0]
if self.truncate_cat_name > 0:
possible_names = re.split('_', catalog_name)
catalog_name = possible_names[0]
for n in range(1, self.truncate_cat_name):
catalog_name = '_'.join((catalog_name, possible_names[n]))
if self.replace_cat_name:
for k, v in self.replace_cat_name.items():
catalog_name = re.sub(k, v, catalog_name)

upper_ax.plot(mag_bins, sampled_N, '-', label=catalog_name + sky_area_label)
self.summary_upper_ax.plot(mag_bins, sampled_N, '-', label=catalog_name + sky_area_label)

cat_label = catalog_name if self.exclude_sky_area else catalog_name + sky_area_label

upper_ax.plot(mag_bins, sampled_N, '-', label=cat_label)
self.summary_upper_ax.plot(mag_bins, sampled_N, '-', label=cat_label)

# plot validation data
n = self.validation_data['n(<mag)']
Expand Down
4 changes: 2 additions & 2 deletions descqa/clf_redmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ def count_galaxies_p_cen(cenmag, lumbins, p_cen):
dlum = lumbins[1] - lumbins[0]
minlum = lumbins[0] - dlum / 2.0
chto_countArray = np.zeros([len(cenmag), nlum])
mybin = np.floor((cenmag[:, :] - minlum) / dlum).astype(np.int)
mybin = np.floor((cenmag[:, :] - minlum) / dlum).astype(int)
p_cen = p_cen.reshape(-1, 1)
ncen = np.zeros(p_cen.shape)
ncen = np.hstack(((np.ones(p_cen.shape[0])).reshape(-1, 1), ncen[:, :-1])).astype(
np.int
int
)
weight = p_cen

Expand Down
4 changes: 2 additions & 2 deletions descqa/configs/ApparentMagFuncTest_HSCr_fig.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ observation: 'HSC'
included_by_default: true
print_title: false
x_lower_limit: 19
truncate_cat_name: true
truncate_cat_name: 2
font_size: 18
legend_size: 12
skip_label_detail: true
replace_cat_name: {'dc2':'DC2'}
replace_cat_name: {'roman_rubin':'OpenUniverse2024'}

description: 'Plot N(<mag) distributions for selected magnitude bounds in specified band and compare with extrpolated fits to HSC deep fields data.'
3 changes: 2 additions & 1 deletion descqa/configs/Nz_r_Coil2004_maglim_fig.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ observation: 'Coil2004_maglim'
jackknife: True
N_jack: 30
use_diagonal_only: True
truncate_cat_name: True
truncate_cat_name: 2
replace_cat_name: {'roman_rubin':'OpenUniverse2024'}
legend_size: 12
font_size: 18

Expand Down
23 changes: 23 additions & 0 deletions descqa/configs/ellipticity_shear_COSMOS_fig.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
subclass_name: EllipticityDistribution.EllipticityDistribution
zlo: 0.
zhi: 2.
N_ebins: 40
observation: 'COSMOS_2013'
normed: True
convert_to_distortion: True
validation_percentile_points:
- 10
- 50
- 90
validation_percentile_ranges:
- [0, 0.4]
- [0.3, 0.7]
- [0.6, 1]
truncate_cat_name: true
title_in_legend: true
yfont_size: 14
xfont_size: 12
legend_size: 10
legend_title_size: 10

description: 'Plot ellipticity distributions and compare with distributions from Joachimi et al 2013 COSMOS data'
Loading
Loading