diff --git a/notebooks_tsqr/ExposureDetail.ipynb b/notebooks_tsqr/ExposureDetail.ipynb index 1f36bae..e28c4ae 100644 --- a/notebooks_tsqr/ExposureDetail.ipynb +++ b/notebooks_tsqr/ExposureDetail.ipynb @@ -14,10 +14,10 @@ "# day_obs values: TODAY, YESTERDAY, YYYY-MM-DD\n", "# Report on observing nights that start upto but not included this day.\n", "#!day_obs = '2024-09-25' # Value to use for local testing (Summit)\n", - "day_obs = \"2024-12-05\" # TODO Change to 'YESTERDAY' to test with default before push\n", + "day_obs = \"2024-09-25\" # TODO Change to 'YESTERDAY' to test with default before push\n", "instrument = \"LSSTComCam\" # LSSTComCam, LATISS\n", "observation_reason = \"ALL\"\n", - "observation_type = \"science\" # TODO: \"science\", \"acq\", default=\"ALL\"\n", + "observation_type = \"flat\" # TODO: \"science\", \"acq\", default=\"ALL\"\n", "science_program = \"ALL\"" ] }, @@ -114,10 +114,9 @@ " min_dayobs=min_day_obs,\n", " max_dayobs=max_day_obs,\n", " verbose=False, # TODO change to False before push\n", + " warning=False, # TODO change to True before push\n", " limit=5000,\n", - ")\n", - "#!allrep = AllReports(allsrc=allsrc)\n", - "#!src_exp = allsrc.exp_src" + ")" ] }, { @@ -126,26 +125,6 @@ "id": "5", "metadata": {}, "outputs": [], - "source": [ - "#! allrep.exp_rep.time_log_as_markdown()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6", - "metadata": {}, - "outputs": [], - "source": [ - "#! display(HTML(allsrc.cdb_src.get_exposures(instrument).to_html()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7", - "metadata": {}, - "outputs": [], "source": [ "if observation_reason:\n", " md(f\"# Observation Reason: {observation_reason}\")\n", @@ -161,13 +140,12 @@ ")\n", "\n", "md(f\"The number of exposures in this filtered result is {len(df.index)}\")\n", - "display(HTML(df.to_html(index=False)))\n", - "#!print(f'{df.columns.to_list()=}')" + "display(HTML(df.to_html(index=False)))" ] }, { "cell_type": "markdown", - "id": "8", + "id": "6", "metadata": {}, "source": [ "| Symbol | Meaning |\n", @@ -179,7 +157,7 @@ }, { "cell_type": "markdown", - "id": "9", + "id": "7", "metadata": {}, "source": [ "-----------------" @@ -187,7 +165,7 @@ }, { "cell_type": "markdown", - "id": "10", + "id": "8", "metadata": {}, "source": [ "# Developer Only Section" @@ -196,7 +174,7 @@ { "cell_type": "code", "execution_count": null, - "id": "11", + "id": "9", "metadata": {}, "outputs": [], "source": [ @@ -208,7 +186,7 @@ { "cell_type": "code", "execution_count": null, - "id": "12", + "id": "10", "metadata": {}, "outputs": [], "source": [ @@ -218,20 +196,23 @@ { "cell_type": "code", "execution_count": null, - "id": "13", + "id": "11", "metadata": {}, "outputs": [], "source": [ - "# src_exp.exposures['LATISS']" + "allsrc" ] }, { "cell_type": "code", "execution_count": null, - "id": "14", + "id": "12", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "print(f\"Elapsed time (excluding code import): {timer.toc:.1f} seconds\")\n", + "print(f\"Finished {str(dt.datetime.now().replace(microsecond=0))} UTC\")" + ] } ], "metadata": { diff --git a/notebooks_tsqr/NightLog.ipynb b/notebooks_tsqr/NightLog.ipynb index 46f386b..1c5a7be 100644 --- a/notebooks_tsqr/NightLog.ipynb +++ b/notebooks_tsqr/NightLog.ipynb @@ -9,6 +9,9 @@ "***What are we missing?*** See the slack *#ts-logging* channel for discussion about this page. Use it to report problems, ask questions, and make requests for changes. \n", "\n", "## What is new in this application?(newest change at top of list)\n", + "- Stakeholders decided that supported instruments will be: LSSTComCam, LSSTCam, and LATISS. Of those, LSSTCam is not supported in ConsDB and limitations in LATISS will force some of the requested fields for ExposureDetail to be given as NA.\n", + "- Added optional WARNING behavior to alert when no records are found, instruments are excluded from results, etc.\n", + "- Name change of this page to *Nightly Digest* (but file name remains the same)\n", "- Added Merged time-log with compaction. Merges all sources by time and compacts them into a single time period (currently 4 hour). Rendering of DataFrame is now done via a jinja2 Template which gives much greater (largely unrealized) control over display.\n", "- Added initial Consolidated Database section\n", "- Add section for \"Links to related resources\". Let us know is other links should be added.\n", @@ -47,7 +50,7 @@ "# day_obs values: TODAY, v, YYYY-MM-DD\n", "# Report on observing nights that start upto but not included this day.\n", "# day_obs = '2024-09-25' # 2024-12-05 Value to use for local testing (Summit)\n", - "day_obs = \"2024-12-15\" # TODO Change to 'YESTERDAY' and 'TODAY' to test with default before push\n", + "day_obs = \"2024-09-25\" # TODO Change to 'YESTERDAY' and 'TODAY' to test with default before push\n", "\n", "# Total number of days of data to display (ending on day_obs)\n", "number_of_days = \"1\" # TODO Change to '1' to test with default before push\n", @@ -144,6 +147,7 @@ " min_dayobs=min_day_obs,\n", " max_dayobs=max_day_obs,\n", " verbose=verbose,\n", + " warning=False, # TODO change to True before push\n", " limit=5000,\n", " exclude_instruments=[], # TODO change to empty list before push\n", ")\n", @@ -520,13 +524,25 @@ "metadata": {}, "outputs": [], "source": [ - "display(HTML(tl.sutl(allsrc, delta=\"3h\")))" + "allsrc" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "34", "metadata": {}, + "outputs": [], + "source": [ + "# from importlib import reload\n", + "# reload(tl)\n", + "display(HTML(tl.sutl(allsrc, delta=\"3h\", verbose=False)))" + ] + }, + { + "cell_type": "markdown", + "id": "35", + "metadata": {}, "source": [ "-----------\n", "------------" @@ -534,7 +550,7 @@ }, { "cell_type": "markdown", - "id": "35", + "id": "36", "metadata": {}, "source": [ "# Developer Only Section REMOVE\n", @@ -545,7 +561,7 @@ }, { "cell_type": "markdown", - "id": "36", + "id": "37", "metadata": {}, "source": [ "## Overview \n" @@ -554,7 +570,7 @@ { "cell_type": "code", "execution_count": null, - "id": "37", + "id": "38", "metadata": {}, "outputs": [], "source": [ @@ -587,7 +603,7 @@ }, { "cell_type": "markdown", - "id": "38", + "id": "39", "metadata": {}, "source": [ "## Data Status\n", @@ -597,7 +613,7 @@ { "cell_type": "code", "execution_count": null, - "id": "39", + "id": "40", "metadata": {}, "outputs": [], "source": [ @@ -608,7 +624,7 @@ }, { "cell_type": "markdown", - "id": "40", + "id": "41", "metadata": {}, "source": [ "## This report uses the following data sources\n", @@ -623,7 +639,7 @@ }, { "cell_type": "markdown", - "id": "41", + "id": "42", "metadata": {}, "source": [ "## Where was this run?\n", @@ -638,7 +654,7 @@ }, { "cell_type": "markdown", - "id": "42", + "id": "43", "metadata": {}, "source": [ "## Available Consolidated Database fields\n", @@ -652,7 +668,7 @@ { "cell_type": "code", "execution_count": null, - "id": "43", + "id": "44", "metadata": {}, "outputs": [], "source": [ @@ -670,7 +686,7 @@ }, { "cell_type": "markdown", - "id": "44", + "id": "45", "metadata": {}, "source": [ "## Section overviews moved here" @@ -679,7 +695,7 @@ { "cell_type": "code", "execution_count": null, - "id": "45", + "id": "46", "metadata": {}, "outputs": [], "source": [ @@ -694,7 +710,7 @@ { "cell_type": "code", "execution_count": null, - "id": "46", + "id": "47", "metadata": {}, "outputs": [], "source": [ @@ -708,7 +724,7 @@ { "cell_type": "code", "execution_count": null, - "id": "47", + "id": "48", "metadata": {}, "outputs": [], "source": [ @@ -718,7 +734,7 @@ }, { "cell_type": "markdown", - "id": "48", + "id": "49", "metadata": {}, "source": [ "## Finale" @@ -727,7 +743,7 @@ { "cell_type": "code", "execution_count": null, - "id": "49", + "id": "50", "metadata": {}, "outputs": [], "source": [ diff --git a/notebooks_tsqr/NightLog.yaml b/notebooks_tsqr/NightLog.yaml index e0eed69..50358fb 100644 --- a/notebooks_tsqr/NightLog.yaml +++ b/notebooks_tsqr/NightLog.yaml @@ -1,5 +1,5 @@ # For use with a Times Square notebook -title: Night Summary +title: Nightly Digest description: Combined report from Summit logs authors: - name: Steve Pothier diff --git a/python/lsst/ts/logging_and_reporting/all_sources.py b/python/lsst/ts/logging_and_reporting/all_sources.py index 8f5218b..8d37aaf 100644 --- a/python/lsst/ts/logging_and_reporting/all_sources.py +++ b/python/lsst/ts/logging_and_reporting/all_sources.py @@ -48,9 +48,11 @@ def __init__( min_dayobs=None, # INCLUSIVE: default=(max_dayobs - one_day) limit=None, verbose=False, + warning=True, exclude_instruments=None, ): self.verbose = verbose + self.warning = warning self.exclude_instruments = exclude_instruments or [] ut.tic() # Load data for all needed sources for the selected dayobs range. @@ -76,8 +78,8 @@ def __init__( server_url=server_url, min_dayobs=min_dayobs, max_dayobs=max_dayobs, + warning=warning, ) - self.alm_src = alm.Almanac( min_dayobs=min_dayobs, max_dayobs=max_dayobs, @@ -505,8 +507,10 @@ def exposure_detail( ) if 0 == len(crecs) + len(erecs): - msg = f"No records found for ConsDB or ExposureLog for {instrument=}." - warnings.warn(msg, category=ex.NoRecordsWarning, stacklevel=2) + if self.warning: + msg = "No records found for ConsDB or ExposureLog " + msg += f"for {instrument=}." + warnings.warn(msg, category=ex.NoRecordsWarning, stacklevel=2) return pd.DataFrame() # empty # Join records by c.exposure_id = e.id (using Pandas) diff --git a/python/lsst/ts/logging_and_reporting/consdb.py b/python/lsst/ts/logging_and_reporting/consdb.py index 848d645..082283b 100644 --- a/python/lsst/ts/logging_and_reporting/consdb.py +++ b/python/lsst/ts/logging_and_reporting/consdb.py @@ -43,6 +43,7 @@ def __init__( max_dayobs=None, # EXCLUSIVE: default=Today other=YYYY-MM-DD limit=None, verbose=False, + warning=True, ): super().__init__( server_url=server_url, @@ -50,6 +51,7 @@ def __init__( min_dayobs=min_dayobs, limit=limit, verbose=verbose, + warning=warning, ) try: import lsst.rsp @@ -104,7 +106,7 @@ def get_instruments(self, include=None): if include is None: # use the default list include = include_default exclude = available_instruments - include - if exclude: + if exclude and self.warning: elist = ", ".join(sorted(exclude)) warnings.warn(f"Excluding these instruments from results: {elist}") @@ -200,7 +202,7 @@ def query(self, sql): records = [ {c: v for c, v in zip(result["columns"], row)} for row in result["data"] ] - if len(records) == 0: + if len(records) == 0 and self.warning: msg = f"No results returned from {self.abbrev}.query(). " msg += f"{sql=!r} {url=}" warnings.warn(msg, category=ex.ConsdbQueryWarning, stacklevel=2) @@ -258,9 +260,9 @@ def get_exposures(self, instrument): AND e.day_obs < {ut.dayobs_int(self.max_dayobs)} """ sql = " ".join(ssql.split()) # remove redundant whitespace - print(f"DBG cdb.get_exposures {instrument=} {sql=}") records = self.query(sql) if self.verbose and len(records) > 0: + print(f"DBG cdb.get_exposures {instrument=} {sql=}") print(f"DBG cdb.get_exposures: {records[0]=}") self.exposures[instrument] = records @@ -269,8 +271,9 @@ def get_exposures(self, instrument): df = pd.DataFrame(records) return ut.wrap_dataframe_columns(df) else: - msg = f"No records found for ConsDB for {instrument=}." - warnings.warn(msg, category=ex.NoRecordsWarning, stacklevel=2) + if self.warning: + msg = f"No records found for ConsDB for {instrument=}." + warnings.warn(msg, category=ex.NoRecordsWarning, stacklevel=2) return pd.DataFrame() # empty # TODO Remove if this is still here after Feb 2025 diff --git a/python/lsst/ts/logging_and_reporting/exceptions.py b/python/lsst/ts/logging_and_reporting/exceptions.py index 8d671ee..e4b3e9f 100644 --- a/python/lsst/ts/logging_and_reporting/exceptions.py +++ b/python/lsst/ts/logging_and_reporting/exceptions.py @@ -108,10 +108,10 @@ class ConsdbQueryError(BaseLogrepError): class ConsdbQueryWarning(BaseLogrepError): # noqa: N818 """Got no results from 'consdb/query' endpoint. This might be ok, but is often an indication of a bad query or of an unimplemented part of - consdb. + onsdb. """ - error_code = "IFYQUERY" + error_code = "NULQUERY" class NoRecordsWarning(BaseLogrepError): # noqa: N818 diff --git a/python/lsst/ts/logging_and_reporting/source_adapters.py b/python/lsst/ts/logging_and_reporting/source_adapters.py index 88d4401..aec972b 100644 --- a/python/lsst/ts/logging_and_reporting/source_adapters.py +++ b/python/lsst/ts/logging_and_reporting/source_adapters.py @@ -124,6 +124,7 @@ def __init__( connect_timeout=5.05, # seconds read_timeout=20, # seconds verbose=False, + warning=True, ): """Load the relevant data for the Source. @@ -135,6 +136,7 @@ def __init__( """ self.server = server_url or default_server self.verbose = verbose + self.warning = warning self.offset = offset if limit is None: limit = self.__class__.default_record_limit diff --git a/python/lsst/ts/logging_and_reporting/time_logs.py b/python/lsst/ts/logging_and_reporting/time_logs.py index c1fcd20..00a9432 100644 --- a/python/lsst/ts/logging_and_reporting/time_logs.py +++ b/python/lsst/ts/logging_and_reporting/time_logs.py @@ -181,8 +181,8 @@ def sutl(allsrc, delta="2h", allow_data_loss=False, verbose=False): fdf = merge_sources(allsrc) cdf = compact(fdf, delta=delta, allow_data_loss=allow_data_loss) if verbose: - print(f"DBG sutl: {fdf.shape=}") - print(f"DBG sutl: {cdf.shape=}") + print(f"DBG sutl: {fdf.shape=} {fdf.columns.to_list()=}") + print(f"DBG sutl: {cdf.shape=} {cdf.columns.to_list()=}") rdf = reduce_period(cdf) if verbose: print(f"DBG sutl: {rdf.shape=}") @@ -333,7 +333,7 @@ def compact(full_df, delta="4h", allow_data_loss=False, verbose=False): # + In Period: Replace multi-values in a column with a conctenation # of the unique values. # TODO General aggregation using dtypes assigned in allsrc. -def reduce_period(df, verbose=False): +def reduce_period(df, verbose=True): """Group and aggregate by Period. Drops some columns. Reduces Rows.""" def multi_string(group): @@ -421,7 +421,10 @@ def multi_label(group): print(f"DBG {use_agg=}") print(f"DBG {drop_agg=}") print(f"DBG final agg_keys={set(group_aggregator.keys())}") - df = df.groupby(level="Period").agg(group_aggregator) + if group_aggregator: + df = df.groupby(level="Period").agg(group_aggregator) + else: + df = df.groupby(level="Period").last() if verbose: print(f"DBG reduce_period: Output {df.shape=}") return df @@ -439,8 +442,7 @@ def field_distribution(df, available=None): return facets -def foo(df): - +def foo(df): # TODO remove # Create a dictionary to store the aggregated results result = {} dtypes = df.dtypes