diff --git a/my_lib/my_funcs.py b/my_lib/my_funcs.py index f1742f4..925dbf3 100644 --- a/my_lib/my_funcs.py +++ b/my_lib/my_funcs.py @@ -1,3 +1,5 @@ +# Databricks notebook source + def convertFtoC(unitCol, tempCol): from pyspark.sql.functions import when, col return when(col(unitCol) == "F", (col(tempCol) - 32) * (5/9)).otherwise(col(tempCol)).alias("temp_celcius") diff --git a/temperatures.py b/temperatures.py index b5b8778..cfe2cd7 100644 --- a/temperatures.py +++ b/temperatures.py @@ -1,15 +1,4 @@ # Databricks notebook source -# MAGIC %md -# MAGIC **NOTE**: the following cell _**should**_ fail. -# MAGIC -# MAGIC Relative imports of Python libraries are currently not supported. ([Custom libraries can be uploaded to the workspace or installed from PyPi](https://docs.databricks.com/libraries/workspace-libraries.html).) - -# COMMAND ---------- - -from my_lib.my_funcs import * - -# COMMAND ---------- - # MAGIC %md # MAGIC Using `%run` allows you to execute a Databricks notebook in the current SparkSession, bringing any imported modules, declared variables, or defined functions into the current scope. # MAGIC @@ -52,3 +41,8 @@ # COMMAND ---------- display(df.select(convertFtoC("unit", "temp"))) + +# COMMAND ---------- + +# This is a CICD Demo notebook for the getting workloads into production +