Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add corrections #33

Merged
merged 34 commits into from
Sep 7, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
81e1683
Merge pull request #10 from askap-vast/add-crop
ddobie Jul 6, 2023
3e6dc1d
Pulled in Andrew's code for corrections and re-wrote the entire corre…
AkashA98 Jul 12, 2023
ce61320
Cleaned up minor naming issues with variables
AkashA98 Jul 12, 2023
270ec3e
Removed redundant code
AkashA98 Jul 12, 2023
66919e1
Fixed quantities with units; component files matching made easy
AkashA98 Jul 14, 2023
842267d
Start of reorg
hansenjiang Aug 6, 2023
fcd07cd
created scripts directory
hansenjiang Aug 6, 2023
b417e34
created docker directory
hansenjiang Aug 6, 2023
f696681
scripts directory
hansenjiang Aug 6, 2023
a906540
added READMEs to explain directories
hansenjiang Aug 6, 2023
852ca0e
moved documentation instructions to docs
hansenjiang Aug 6, 2023
d0fb958
separating logic - cleanup module
hansenjiang Aug 9, 2023
66d50f9
moved logic from cli calls into relevant modules
hansenjiang Aug 9, 2023
07ae31c
Corrected the wrong path for the catalog files
AkashA98 Aug 10, 2023
660245f
Merge pull request #34 from askap-vast/dev-reorg
mubdi Aug 10, 2023
b2fccaf
Re-organized code so that this can be passed to cropping, added docst…
AkashA98 Aug 14, 2023
c3c020d
Fixed typos
AkashA98 Aug 14, 2023
cd14713
New log message
AkashA98 Aug 14, 2023
a4d316d
Pulled in Andrew's code for corrections and re-wrote the entire corre…
AkashA98 Jul 12, 2023
77e49b1
Cleaned up minor naming issues with variables
AkashA98 Jul 12, 2023
efaf369
Fixed quantities with units; component files matching made easy
AkashA98 Jul 14, 2023
7e1be37
Corrected the wrong path for the catalog files
AkashA98 Aug 10, 2023
1d3bd39
Re-organized code so that this can be passed to cropping, added docst…
AkashA98 Aug 14, 2023
d4477a8
Fixed typos
AkashA98 Aug 14, 2023
513d2c3
New log message
AkashA98 Aug 14, 2023
bb00cc5
Merge branch 'add_corrections' of github.com:askap-vast/vast-post-pro…
AkashA98 Aug 14, 2023
005f45f
Deleted older corrections cli file
AkashA98 Aug 14, 2023
27395f2
Make new directories only when write_output=True
AkashA98 Aug 15, 2023
ef8698e
Updated the filtering function for catalogs
AkashA98 Aug 16, 2023
b495dba
Deal with all epochs or single epoch the same way
AkashA98 Aug 16, 2023
3450e47
User decides whether to skip entire epoch or a single file
AkashA98 Aug 17, 2023
903b04b
changed variables to function arguments for filtering sources
AkashA98 Aug 21, 2023
ffeae45
Tested it on a couple of epoch, changed code to use f-strings
AkashA98 Aug 21, 2023
f3a5ae8
Added catalog filtering parameters as CLI arguments.
AkashA98 Aug 28, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions vast_post_processing/cli/run_corrections.py
mubdi marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,29 @@ def main(
"arcsec for `catalog`. First argument is major axis followed by nimor axis."
),
),
flux_limit: Optional[float] = typer.Option(
0,
help="Flux limit to select sources (sources with peak flux"
"> this will be selected). Defaults to 0.",
),
snr_limit: Optional[float] = typer.Option(
20,
help="SNR limit to select sources (sources with SNR > this"
"will be selected). Defaults to 20.",
),
nneighbor: Optional[float] = typer.Option(
1,
help="Distance to nearest neighbor (in arcmin). Sources with"
"neighbors < this will be removed. Defaults to 1.",
),
apply_flux_limit: Optional[bool] = typer.Option(
True,
help="Flag to decide to apply flux limit. Defaults to True",
),
select_point_sources: Optional[bool] = typer.Option(
True,
help="Flag to decide to select point sources. Defaults to True",
),
outdir: Optional[str] = typer.Option(
None,
help="Stem of the output directory to store the corrected images and cataloges to. The default"
Expand Down Expand Up @@ -102,6 +125,11 @@ def main(
condon=condon,
psf_ref=psf_ref,
psf=psf,
flux_limit=flux_limit,
snr_limit=snr_limit,
nneighbor=nneighbor,
apply_flux_limit=apply_flux_limit,
select_point_sources=select_point_sources,
outdir=outdir,
overwrite=overwrite,
skip_on_missing=skip_on_missing,
Expand Down
136 changes: 86 additions & 50 deletions vast_post_processing/corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ def vast_xmatch_qc(
fix_b: bool = False,
positional_unit: u.Unit = u.Unit("arcsec"),
flux_unit: u.Unit = u.Unit("mJy"),
flux_limit: float = 0,
snr_limit: float = 20,
nneighbor: float = 1,
apply_flux_limit: bool = True,
select_point_sources: bool = True,
crossmatch_output: Optional[str] = None,
csv_output: Optional[str] = None,
):
Expand All @@ -56,6 +61,15 @@ def vast_xmatch_qc(
Defaults to u.Unit("arcsec").
flux_unit (u.Unit, optional): output unit in which the flux scale is given.
Defaults to u.Unit("mJy").
flux_limit (float, optional): Flux limit to select sources (sources with peak flux
> this will be selected). Defaults to 0.
snr_limit (float, optional): SNR limit to select sources (sources with SNR > this
will be selected). Defaults to 20.
nneighbor (float, optional): Distance to nearest neighbor (in arcmin). Sources with
neighbors < this will be removed. Defaults to 1.
apply_flux_limit (bool, optional): Flag to decide to apply flux limit. Defaults to True.
select_point_sources (bool, optional): Flag to decide to select point sources.
Defaults to True
crossmatch_output (Optional[str], optional): File path to write the crossmatch output.
Defaults to None, which means no file is written
csv_output (Optional[str], optional): File path to write the flux/astrometric corrections.
Expand All @@ -77,12 +91,22 @@ def vast_xmatch_qc(
psf=psf_reference,
condon=condon,
input_format="selavy",
flux_limit=flux_limit,
snr_limit=snr_limit,
nneighbor=nneighbor,
apply_flux_limit=apply_flux_limit,
select_point_sources=select_point_sources,
)
catalog = Catalog(
catalog_path,
psf=psf,
condon=condon,
input_format="selavy",
flux_limit=flux_limit,
snr_limit=snr_limit,
nneighbor=nneighbor,
apply_flux_limit=apply_flux_limit,
select_point_sources=select_point_sources,
)

# perform the crossmatch
Expand Down Expand Up @@ -125,37 +149,28 @@ def vast_xmatch_qc(

if csv_output is not None:
# output has been requested

if True: # csv_output is not None:
csv_output_path = Path(csv_output) # ensure Path object
sbid = catalog.sbid if catalog.sbid is not None else ""
if not csv_output_path.exists():
f = open(csv_output_path, "w")
print(
"field,release_epoch,sbid,ra_correction,dec_correction,ra_madfm,"
"dec_madfm,flux_peak_correction_multiplicative,flux_peak_correction_additive,"
"flux_peak_correction_multiplicative_err,flux_peak_correction_additive_err,"
"n_sources",
file=f,
)
else:
f = open(csv_output_path, "a")
logger.info(
"Writing corrections CSV. To correct positions, add the corrections to"
" the original source positions i.e. RA' = RA + ra_correction /"
" cos(Dec). To correct fluxes, add the additive correction and multiply"
" the result by the multiplicative correction i.e. S' ="
" flux_peak_correction_multiplicative(S +"
" flux_peak_correction_additive)."
)
print(
f"{catalog.field},{catalog.epoch},{sbid},{dra_median_value * -1},"
f"{ddec_median_value * -1},{dra_madfm_value},{ddec_madfm_value},"
f"{flux_corr_mult.nominal_value},{flux_corr_add.nominal_value},"
f"{flux_corr_mult.std_dev},{flux_corr_add.std_dev},{len(data)}",
file=f,
)
f.close()
csv_output_path = Path(csv_output) # ensure Path object
sbid = catalog.sbid if catalog.sbid is not None else ""
if not csv_output_path.exists():
f = open(csv_output_path, "w")
else:
f = open(csv_output_path, "a")
logger.info(
"Writing corrections CSV. To correct positions, add the corrections to"
" the original source positions i.e. RA' = RA + ra_correction /"
" cos(Dec). To correct fluxes, add the additive correction and multiply"
" the result by the multiplicative correction i.e. S' ="
" flux_peak_correction_multiplicative(S +"
" flux_peak_correction_additive)."
)
print(
f"{catalog.field},{catalog.epoch},{sbid},{dra_median_value * -1},"
f"{ddec_median_value * -1},{dra_madfm_value},{ddec_madfm_value},"
f"{flux_corr_mult.nominal_value},{flux_corr_add.nominal_value},"
f"{flux_corr_mult.std_dev},{flux_corr_add.std_dev},{len(data)}",
file=f,
)
f.close()
hansenjiang marked this conversation as resolved.
Show resolved Hide resolved
return dra_median_value, ddec_median_value, flux_corr_mult, flux_corr_add


Expand Down Expand Up @@ -193,12 +208,13 @@ def shift_and_scale_image(
image_hdu.data = flux_scale * (
image_hdu.data + (flux_offset_mJy * (u.mJy.to(data_unit)))
)
image_hdu.header["FLUXOFF"] = flux_offset_mJy * (u.mJy.to(data_unit))
image_hdu.header["FLUXSCL"] = flux_scale
image_hdu.header["FLUXOFFSET"] = flux_offset_mJy * (u.mJy.to(data_unit))
image_hdu.header["FLUXSCALE"] = flux_scale

image_hdu.header[
"HISTORY"
] = "Image has been corrected for flux by a scaling factor and an offset given by FLUXSCL and FLUXOFF."
image_hdu.header.add_history(
"Image has been corrected for flux by a scaling factor and\
an offset given by FLUXSCALE and FLUXOFFSET."
)
# check for NaN
if replace_nan:
if np.any(np.isnan(image_hdu.data)):
Expand All @@ -225,9 +241,11 @@ def shift_and_scale_image(
image_hdu.header["RAOFF"] = ra_offset_arcsec
image_hdu.header["DECOFF"] = dec_offset_arcsec
AkashA98 marked this conversation as resolved.
Show resolved Hide resolved

image_hdu.header[
"HISTORY"
] = "Image has been corrected for astrometric position by a an offset in both directions given by RAOFF and DECOFF using a model RA=RA+RAOFF/COS(DEC), DEC=DEC+DECOFF"
image_hdu.header.add_history(
"Image has been corrected for astrometric position by a an offset\
in both directions given by RAOFF and DECOFF using a model\
RA=RA+RAOFF/COS(DEC), DEC=DEC+DECOFF"
)

return image_hdul

Expand Down Expand Up @@ -311,34 +329,34 @@ def shift_and_scale_catalog(
# Add in the corrections to the votable
flux_scl_param = Param(
votable=votablefile,
ID="flux_scl",
name="flux_scl",
ID="FluxScale",
name="FluxScale",
value=flux_scale,
datatype="float",
unit=None,
)
flux_off_param = Param(
votable=votablefile,
ID="flux_offset",
name="flux_offset",
ID="FluxOffset",
name="FluxOffset",
value=flux_offset_mJy,
datatype="float",
unit=u.mJy,
)

ra_offset_param = Param(
votable=votablefile,
ID="ra_offset",
name="ra_offset",
ID="RAOffset",
name="RAOffset",
value=ra_offset_arcsec,
datatype="float",
unit=u.arcsec,
)

dec_offset_param = Param(
votable=votablefile,
ID="dec_offset",
name="dec_offset",
ID="DECOffset",
name="DECOffset",
value=dec_offset_arcsec,
datatype="float",
unit=u.arcsec,
Expand Down Expand Up @@ -452,6 +470,11 @@ def correct_field(
condon: bool = True,
psf_ref: list[float] = None,
psf: list[float] = None,
flux_limit: float = 0,
snr_limit: float = 20,
nneighbor: float = 1,
apply_flux_limit: bool = True,
select_point_sources: bool = True,
write_output: bool = True,
outdir: str = None,
overwrite: bool = False,
Expand Down Expand Up @@ -536,6 +559,11 @@ def correct_field(
psf=psf_image,
fix_m=False,
fix_b=False,
flux_limit=flux_limit,
snr_limit=snr_limit,
nneighbor=nneighbor,
apply_flux_limit=apply_flux_limit,
select_point_sources=select_point_sources,
crossmatch_output=crossmatch_file,
csv_output=csv_file,
)
Expand Down Expand Up @@ -613,6 +641,11 @@ def correct_files(
condon: bool = True,
psf_ref: list[float] = None,
psf: list[float] = None,
flux_limit: float = 0,
snr_limit: float = 20,
nneighbor: float = 1,
apply_flux_limit: bool = True,
select_point_sources: bool = True,
write_output: bool = True,
outdir: str = None,
overwrite: bool = False,
Expand Down Expand Up @@ -682,16 +715,19 @@ def correct_files(
else:
# get corrections for every image and the correct it
for image_path in image_files:
products = correct_field(
_ = correct_field(
image_path=image_path,
vast_corrections_root=vast_corrections_root,
radius=radius,
condon=condon,
psf_ref=psf_ref,
psf=psf,
flux_limit=flux_limit,
snr_limit=snr_limit,
nneighbor=nneighbor,
apply_flux_limit=apply_flux_limit,
select_point_sources=select_point_sources,
write_output=write_output,
outdir=outdir,
overwrite=overwrite,
)
if products is not None:
hdus, catalogs = products