Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change calib into photoCalib and propose a way to maintain a non GCR/… #348

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 34 additions & 22 deletions scripts/merge_source_cat.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,36 @@
import pandas as pd

from lsst.geom import radians
from lsst.afw.geom.spherePoint import SpherePoint
from lsst.afw.geom import SpherePoint

from lsst.daf.persistence import Butler
from lsst.daf.persistence.butlerExceptions import NoResults

from astropy.coordinates import SkyCoord, matching
import astropy.units as u

import GCRCatalogs
from GCRCatalogs.dc2_source import DC2SourceCatalog
try:
import GCRCatalogs
from GCRCatalogs.dc2_source import DC2SourceCatalog


class DummyDC2SourceCatalog(GCRCatalogs.BaseGenericCatalog):
"""
A dummy reader class that can be used to generate all native quantities
required for the DPDD columns in DC2 Source Catalog
"""
def __init__(self, schema_version=None):
self._quantity_modifiers = DC2SourceCatalog._generate_modifiers(dm_schema_version=schema_version)

@property
def required_native_quantities(self):
class DummyDC2SourceCatalog(GCRCatalogs.BaseGenericCatalog):
"""
the set of native quantities that are required by the quantity modifiers
A dummy reader class that can be used to generate all native quantities
required for the DPDD columns in DC2 Source Catalog
"""
return set(self._translate_quantities(self.list_all_quantities()))

def __init__(self, schema_version=None):
self._quantity_modifiers = DC2SourceCatalog._generate_modifiers(dm_schema_version=schema_version)

@property
def required_native_quantities(self):
"""
the set of native quantities that are required by the quantity modifiers
"""
return set(self._translate_quantities(self.list_all_quantities()))
NOGCR = False
except ImportError:
print('No GCR Catalog available')
NOGCR = True

def extract_and_save_visit(butler, visit, filename, object_table=None,
dm_schema_version=3,
Expand All @@ -53,20 +56,23 @@ def extract_and_save_visit(butler, visit, filename, object_table=None,
"""
data_refs = butler.subset('src', dataId={'visit': visit})

columns_to_keep = list(DummyDC2SourceCatalog(dm_schema_version).required_native_quantities)
if NOGCR:
columns_to_keep = None
else:
list(DummyDC2SourceCatalog(dm_schema_version).required_native_quantities)

collected_cats = pd.DataFrame()
for dr in data_refs:
if not dr.datasetExists():
if verbose:
print("Skipping non-existent dataset: ", dr.dataId)
continue

if verbose:
print("Processing ", dr.dataId)
src_cat = load_detector(dr, object_table=object_table,
columns_to_keep=columns_to_keep,
verbose=verbose, **kwargs)
print(src_cat)
if len(src_cat) == 0:
if verbose:
print(" No good entries for ", dr.dataId)
Expand All @@ -82,7 +88,10 @@ def extract_and_save_visit(butler, visit, filename, object_table=None,
print("No sources collected from ", data_refs.dataId)
return

collected_cats.to_parquet(filename)
if NOGCR:
collected_cats.to_hdf(filename,"forced_%d"%visit,format='fixed')
ellse:
collected_cats.to_parquet(filename)


def load_detector(data_ref, object_table=None, matching_radius=1,
Expand Down Expand Up @@ -129,7 +138,7 @@ def load_detector(data_ref, object_table=None, matching_radius=1,
cat['filter'] = data_ref.dataId['filter']

# Calibrate magnitudes and fluxes
calib = data_ref.get('calexp_calib')
calib = data_ref.get('calexp_photoCalib')
calib.setThrowOnNegativeFlux(False)

mag, mag_err = calib.getMagnitude(cat[flux_names['psf_flux']].values,
Expand Down Expand Up @@ -434,7 +443,10 @@ def unique_in_order(possible_duplicates):
butler = Butler(args.repo)
for visit in args.visits:
filebase = '{:s}_visit_{:d}'.format(args.name, visit)
filename = os.path.join(args.output_dir, filebase + '.parquet')
if NOGCR:
filename = os.path.join(args.output_dir, filebase + '.hdf5')
else:
filename = os.path.join(args.output_dir, filebase + '.parquet')
extract_and_save_visit(butler, visit, filename,
object_table=object_table,
matching_radius=args.radius,
Expand Down
2 changes: 1 addition & 1 deletion scripts/merge_tract_cat.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def load_patch(butler_or_repo, tract, patch,
# Then join in memory space.
cat = cat.asAstropy().to_pandas()

calib = butler.get('deepCoadd_calexp_calib', this_data)
calib = butler.get('deepCoadd_calexp_photoCalib', this_data)
calib.setThrowOnNegativeFlux(False)

mag, mag_err = calib.getMagnitude(cat[flux_names['psf_flux']].values, cat[flux_names['psf_flux_err']].values)
Expand Down