Skip to content

Commit

Permalink
clean up code
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-kschmaus committed Jan 22, 2025
1 parent bf84c7a commit 245585c
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 86 deletions.
2 changes: 0 additions & 2 deletions app_utils/shared_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
import os
import tempfile
import time
import tempfile
import time
from dataclasses import dataclass
from enum import Enum
from io import StringIO
Expand Down
44 changes: 19 additions & 25 deletions journeys/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,36 +2,32 @@

import streamlit as st
from loguru import logger

from app_utils.shared_utils import create_cortex_search_service
from semantic_model_generator.data_processing import proto_utils
from semantic_model_generator.data_processing.data_types import Table
from semantic_model_generator.generate_model import append_comment_to_placeholders
from semantic_model_generator.generate_model import comment_out_section
from semantic_model_generator.generate_model import context_to_yaml
from semantic_model_generator.generate_model import get_table_representations
from semantic_model_generator.generate_model import translate_data_class_tables_to_model_protobuf
from semantic_model_generator.protos import semantic_model_pb2
from snowflake.connector import ProgrammingError
from streamlit_extras.tags import tagger_component

from app_utils.shared_utils import (
GeneratorAppScreen,
create_cortex_search_service,
format_snowflake_context,
get_available_databases,
get_available_schemas,
get_available_tables,
get_available_warehouses,
get_snowflake_connection,
input_semantic_file_name,
)
from app_utils.shared_utils import get_available_warehouses
from app_utils.shared_utils import get_snowflake_connection
from semantic_model_generator.data_processing.data_types import CortexSearchService
from semantic_model_generator.generate_model import _get_placeholder_joins
from semantic_model_generator.generate_model import _raw_table_to_semantic_context_table
from semantic_model_generator.snowflake_utils.snowflake_connector import DIMENSION_DATATYPES
from semantic_model_generator.snowflake_utils.snowflake_connector import get_table_representation
from semantic_model_generator.snowflake_utils.snowflake_connector import get_valid_schemas_tables_columns_df
from semantic_model_generator.validate.context_length import validate_context_length
from semantic_model_generator.data_processing.data_types import (
CortexSearchService,
Table,
)
from semantic_model_generator.generate_model import (
context_to_yaml,
get_table_representations,
translate_data_class_tables_to_model_protobuf,
)
from semantic_model_generator.snowflake_utils.snowflake_connector import (
DIMENSION_DATATYPES,
)


@dataclass(frozen=True)
Expand All @@ -53,7 +49,7 @@ def init_session_state() -> None:
"selected_tables": list(),
"semantic_model_name": "",
"table_selector_submitted": False,
"tables": list()
"tables": list(),
}
for key, value in default_state.items():
if key not in st.session_state:
Expand Down Expand Up @@ -181,9 +177,7 @@ def table_selector_fragment() -> None:
@st.cache_data(show_spinner=False)
def call_get_table_representations(base_tables: list[str]) -> list[Table]:
conn = get_snowflake_connection()
tables = get_table_representations(
conn=conn, base_tables=base_tables
)
tables = get_table_representations(conn=conn, base_tables=base_tables)
return tables


Expand Down Expand Up @@ -220,7 +214,7 @@ def _add_search_form(column: str) -> None:
with st.form(key=f"add_cortex_search_form_{column}"):
st.write("Add Cortex Search Integration")
service_name = st.text_input(
label=f"Cortex Search Service Name",
label="Cortex Search Service Name",
value=f"CORTEX_SEARCH.{column}",
key=f"cortex_search_service_name_{column}",
)
Expand Down Expand Up @@ -317,7 +311,7 @@ def create_cortex_search_services() -> None:
column_name=config.column_name,
table_fqn=config.table_fqn,
warehouse_name=config.warehouse_name,
target_lag=config.target_lag
target_lag=config.target_lag,
)


Expand Down
2 changes: 1 addition & 1 deletion journeys/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
validate_table_schema,
)
from semantic_model_generator.data_processing.proto_utils import proto_to_yaml
from semantic_model_generator.snowflake_utils.snowflake_connector import batch_cortex_complete
from semantic_model_generator.snowflake_utils.snowflake_connector import (
batch_cortex_complete,
create_table_in_schema,
execute_query,
fetch_table,
Expand Down
7 changes: 4 additions & 3 deletions semantic_model_generator/generate_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from loguru import logger
from snowflake.connector import SnowflakeConnection
from snowflake.snowpark import Session

from semantic_model_generator.data_processing import data_types, proto_utils
from semantic_model_generator.data_processing.data_types import Table
Expand Down Expand Up @@ -172,7 +171,9 @@ def _raw_table_to_semantic_context_table(
)


def get_table_representations(conn: SnowflakeConnection, base_tables: List[str]) -> List[Table]:
def get_table_representations(
conn: SnowflakeConnection, base_tables: List[str]
) -> List[Table]:
raw_tables = []
for table_fqn in base_tables:
columns_df = get_valid_schemas_tables_columns_df(conn=conn, table_fqn=table_fqn)
Expand Down Expand Up @@ -243,7 +244,7 @@ def raw_schema_to_semantic_context(
context = translate_data_class_tables_to_model_protobuf(
raw_tables=raw_tables,
semantic_model_name=semantic_model_name,
allow_joins=allow_joins
allow_joins=allow_joins,
)
return context

Expand Down
54 changes: 0 additions & 54 deletions semantic_model_generator/snowflake_utils/snowflake_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from snowflake.connector.connection import SnowflakeConnection
from snowflake.connector.errors import ProgrammingError
from snowflake.connector.pandas_tools import write_pandas
from snowflake.snowpark import Session

from semantic_model_generator.data_processing.data_types import Column, Table
from semantic_model_generator.snowflake_utils import env_vars
Expand Down Expand Up @@ -383,21 +382,6 @@ def fetch_stages_in_schema(conn: SnowflakeConnection, schema_name: str) -> list[
return [f"{result[2]}.{result[3]}.{result[1]}" for result in stages]


def fetch_table_schema(conn: SnowflakeConnection, table_fqn: str) -> dict[str, str]:
"""
Fetches the table schema the current user has access
Args:
conn: SnowflakeConnection to run the query
table_fqn: The fully qualified name of the table to connect to.
Returns: a list of column names
"""
query = f"DESCRIBE TABLE {table_fqn};"
cursor = conn.cursor()
cursor.execute(query)
result = cursor.fetchall()
return dict([x[:2] for x in result])

def fetch_table_schema(conn: SnowflakeConnection, table_fqn: str) -> dict[str, str]:
"""
Fetches the table schema the current user has access
Expand Down Expand Up @@ -478,44 +462,6 @@ def create_table_in_schema(
return False


def fetch_table(conn: SnowflakeConnection, table_fqn: str) -> pd.DataFrame:
query = f"SELECT * FROM {table_fqn};"
cursor = conn.cursor()
cursor.execute(query)
query_result = cursor.fetch_pandas_all()
return query_result


def create_table_in_schema(
conn: SnowflakeConnection, table_fqn: str, columns_schema: Dict[str, str]
) -> bool:
"""
Creates a table in the specified schema with the specified columns
Args:
conn: SnowflakeConnection to run the query
table_fqn: The fully qualified name of the table to create
columns_schema: A list of Column objects representing the columns of the table
Returns: True if the table was created successfully, False otherwise
"""
field_type_list = [f"{k} {v}" for k, v in columns_schema.items()]
# Construct the create table query
create_table_query = f"""
CREATE TABLE IF NOT EXISTS {table_fqn} (
{', '.join(field_type_list)}
)
"""

# Execute the query
cursor = conn.cursor()
try:
cursor.execute(create_table_query)
return True
except ProgrammingError as e:
logger.error(f"Error creating table: {e}")
return False


def get_valid_schemas_tables_columns_df(
conn: SnowflakeConnection, table_fqn: str
) -> pd.DataFrame:
Expand Down
1 change: 0 additions & 1 deletion semantic_model_generator/tests/snowflake_connector_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import pandas as pd
import pytest
from pandas.testing import assert_frame_equal

from semantic_model_generator.data_processing.data_types import Column, Table
from semantic_model_generator.snowflake_utils import snowflake_connector
Expand Down

0 comments on commit 245585c

Please sign in to comment.