From cd6cc3473561c5b9d6a3839415266c46f7d06a3f Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sun, 15 Dec 2024 22:55:41 -0800 Subject: [PATCH 1/2] chore: set dev env logging level to INFO (from DEBUG) Using a higher logging default in dev mode to clean up output. Also setting so TMI messages to debug to prevent not-so-useful information to show up on startup. Devs can easily switch the logging level to DEBUG if/when needed. --- docker/pythonpath_dev/superset_config.py | 1 + superset/models/helpers.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/pythonpath_dev/superset_config.py b/docker/pythonpath_dev/superset_config.py index e8223e53584bc..3243897587858 100644 --- a/docker/pythonpath_dev/superset_config.py +++ b/docker/pythonpath_dev/superset_config.py @@ -103,6 +103,7 @@ class CeleryConfig: # The base URL for the email report hyperlinks. WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL SQLLAB_CTAS_NO_LIMIT = True +LOG_LEVEL = logging.INFO # # Optionally import superset_config_docker.py (which will have been included on diff --git a/superset/models/helpers.py b/superset/models/helpers.py index feb05a401e39c..868c1870028d9 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -329,13 +329,13 @@ def import_from_dict( is_new_obj = True # Create new DB object obj = cls(**dict_rep) - logger.info("Importing new %s %s", obj.__tablename__, str(obj)) + logger.debug("Importing new %s %s", obj.__tablename__, str(obj)) if cls.export_parent and parent: setattr(obj, cls.export_parent, parent) db.session.add(obj) else: is_new_obj = False - logger.info("Updating %s %s", obj.__tablename__, str(obj)) + logger.debug("Updating %s %s", obj.__tablename__, str(obj)) # Update columns for k, v in dict_rep.items(): setattr(obj, k, v) From 29fd4a3094aa4d9fc9f68114865d4a93b51ebbcd Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sun, 15 Dec 2024 23:17:33 -0800 Subject: [PATCH 2/2] clarifying messages, removing some useless output --- docker/docker-bootstrap.sh | 9 ++++----- docker/docker-init.sh | 12 +----------- superset/commands/dataset/importers/v1/utils.py | 4 ++-- superset/examples/birth_names.py | 12 +++++++----- 4 files changed, 14 insertions(+), 23 deletions(-) diff --git a/docker/docker-bootstrap.sh b/docker/docker-bootstrap.sh index 1a4e04be94e7a..78bb24fd6b749 100755 --- a/docker/docker-bootstrap.sh +++ b/docker/docker-bootstrap.sh @@ -20,7 +20,8 @@ set -eo pipefail # Make python interactive if [ "$DEV_MODE" == "true" ]; then - echo "Reinstalling the app in editable mode" + echo "[DEV_MODE detected] Setting the superset package to be in editable mode" + echo "RUN: uv pip install -e ." uv pip install -e . fi REQUIREMENTS_LOCAL="/app/docker/requirements-local.txt" @@ -34,10 +35,8 @@ fi # Make sure we have dev requirements installed # if [ -f "${REQUIREMENTS_LOCAL}" ]; then - echo "Installing local overrides at ${REQUIREMENTS_LOCAL}" - pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" -else - echo "Skipping local overrides" + echo "Installing python packages specified at ${REQUIREMENTS_LOCAL}" + uv pip install --no-cache-dir -r "${REQUIREMENTS_LOCAL}" fi case "${1}" in diff --git a/docker/docker-init.sh b/docker/docker-init.sh index 4016fd898a038..b4650f3803126 100755 --- a/docker/docker-init.sh +++ b/docker/docker-init.sh @@ -30,15 +30,9 @@ fi echo_step() { cat < None: # reuse session when loading data if possible, to make import atomic if database.sqlalchemy_uri == current_app.config.get("SQLALCHEMY_DATABASE_URI"): - logger.info("Loading data inside the import transaction") + logger.debug("Loading data inside the import transaction") connection = db.session.connection() df.to_sql( dataset.table_name, @@ -219,7 +219,7 @@ def load_data(data_uri: str, dataset: SqlaTable, database: Database) -> None: method="multi", ) else: - logger.warning("Loading data outside the import transaction") + logger.debug("Loading data outside the import transaction") with database.get_sqla_engine( catalog=dataset.catalog, schema=dataset.schema, diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py index b69cf0f4a4a10..bd8653b664442 100644 --- a/superset/examples/birth_names.py +++ b/superset/examples/birth_names.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import logging import textwrap from typing import Union @@ -40,6 +41,8 @@ update_slice_ids, ) +logger = logging.getLogger(__name__) + def gen_filter( subject: str, comparator: str, operator: str = "==" @@ -83,8 +86,7 @@ def load_data(tbl_name: str, database: Database, sample: bool = False) -> None: method="multi", index=False, ) - print("Done loading table!") - print("-" * 80) + logging.debug("Done loading table!") def load_birth_names( @@ -104,7 +106,7 @@ def load_birth_names( table = get_table_connector_registry() obj = db.session.query(table).filter_by(table_name=tbl_name, schema=schema).first() if not obj: - print(f"Creating table [{tbl_name}] reference") + logging.debug(f"Creating table [{tbl_name}] reference") obj = table(table_name=tbl_name, schema=schema) db.session.add(obj) @@ -196,7 +198,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: "datasource_type": DatasourceType.TABLE, } - print("Creating some slices") + logger.debug("Creating some slices") slices = [ Slice( **slice_kwargs, @@ -561,7 +563,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: def create_dashboard(slices: list[Slice]) -> Dashboard: - print("Creating a dashboard") + logger.debug("Creating a dashboard") dash = db.session.query(Dashboard).filter_by(slug="births").first() if not dash: dash = Dashboard()