From 6fd4977a6d962f2a13d0ba5509fbe7ef701a322f Mon Sep 17 00:00:00 2001
From: Akash-Jain <15995028+akash-jain-10@users.noreply.github.com>
Date: Mon, 29 Jan 2024 14:37:50 +0530
Subject: [PATCH 01/13] chore(beta-release): Version Bump to `1.3.0-beta`
---
.github/workflows/docker-openmetadata-db.yml | 2 +-
.github/workflows/docker-openmetadata-ingestion-base.yml | 2 +-
.github/workflows/docker-openmetadata-ingestion.yml | 2 +-
.github/workflows/docker-openmetadata-postgres.yml | 2 +-
.github/workflows/docker-openmetadata-server.yml | 4 ++--
common/pom.xml | 2 +-
.../docker-compose-ingestion/docker-compose-ingestion.yml | 2 +-
.../docker-compose-openmetadata.yml | 4 ++--
.../docker-compose-quickstart/docker-compose-postgres.yml | 8 ++++----
docker/docker-compose-quickstart/docker-compose.yml | 8 ++++----
openmetadata-clients/openmetadata-java-client/pom.xml | 2 +-
openmetadata-clients/pom.xml | 2 +-
openmetadata-dist/pom.xml | 2 +-
openmetadata-service/pom.xml | 2 +-
openmetadata-shaded-deps/elasticsearch-dep/pom.xml | 2 +-
openmetadata-shaded-deps/opensearch-dep/pom.xml | 2 +-
openmetadata-shaded-deps/pom.xml | 2 +-
openmetadata-spec/pom.xml | 2 +-
openmetadata-ui/pom.xml | 2 +-
pom.xml | 2 +-
20 files changed, 28 insertions(+), 28 deletions(-)
diff --git a/.github/workflows/docker-openmetadata-db.yml b/.github/workflows/docker-openmetadata-db.yml
index 2089d9d6e9c7..36e6a2295edf 100644
--- a/.github/workflows/docker-openmetadata-db.yml
+++ b/.github/workflows/docker-openmetadata-db.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "input=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-ingestion-base.yml b/.github/workflows/docker-openmetadata-ingestion-base.yml
index 22817a35bd26..9b30f19e68a8 100644
--- a/.github/workflows/docker-openmetadata-ingestion-base.yml
+++ b/.github/workflows/docker-openmetadata-ingestion-base.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "input=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-ingestion.yml b/.github/workflows/docker-openmetadata-ingestion.yml
index add92478b860..1e8815ab1f1e 100644
--- a/.github/workflows/docker-openmetadata-ingestion.yml
+++ b/.github/workflows/docker-openmetadata-ingestion.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "input=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-postgres.yml b/.github/workflows/docker-openmetadata-postgres.yml
index b53195ff2e0d..49af4f85d0c1 100644
--- a/.github/workflows/docker-openmetadata-postgres.yml
+++ b/.github/workflows/docker-openmetadata-postgres.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "input=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-server.yml b/.github/workflows/docker-openmetadata-server.yml
index 922033b82a74..7931ce542464 100644
--- a/.github/workflows/docker-openmetadata-server.yml
+++ b/.github/workflows/docker-openmetadata-server.yml
@@ -64,7 +64,7 @@ jobs:
steps:
- name: Check trigger type
id: check_trigger
- run: echo "DOCKER_RELEASE_TAG=1.3.0-SNAPSHOT" >> $GITHUB_OUTPUT
+ run: echo "DOCKER_RELEASE_TAG=1.3.0-beta" >> $GITHUB_OUTPUT
- name: Download application from Artifiact
uses: actions/download-artifact@v2
@@ -129,7 +129,7 @@ jobs:
- name: Check trigger type
id: check_trigger
if: ${{ env.DOCKER_RELEASE_TAG == '' }}
- run: echo "DOCKER_RELEASE_TAG=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "DOCKER_RELEASE_TAG=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/common/pom.xml b/common/pom.xml
index 34a4d9fb4064..eda420eac020 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -18,7 +18,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
diff --git a/docker/docker-compose-ingestion/docker-compose-ingestion.yml b/docker/docker-compose-ingestion/docker-compose-ingestion.yml
index 02cacb954c28..9863edc573a2 100644
--- a/docker/docker-compose-ingestion/docker-compose-ingestion.yml
+++ b/docker/docker-compose-ingestion/docker-compose-ingestion.yml
@@ -18,7 +18,7 @@ volumes:
services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
environment:
AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
AIRFLOW__CORE__EXECUTOR: LocalExecutor
diff --git a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
index 066037fbee18..0a77cd466be3 100644
--- a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
+++ b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
@@ -14,7 +14,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
command: "./bootstrap/openmetadata-ops.sh migrate"
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -202,7 +202,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta-SNAPSHOT
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
diff --git a/docker/docker-compose-quickstart/docker-compose-postgres.yml b/docker/docker-compose-quickstart/docker-compose-postgres.yml
index 3b6c99d2b1d1..afb302ef1dc7 100644
--- a/docker/docker-compose-quickstart/docker-compose-postgres.yml
+++ b/docker/docker-compose-quickstart/docker-compose-postgres.yml
@@ -18,7 +18,7 @@ volumes:
services:
postgresql:
container_name: openmetadata_postgresql
- image: docker.getcollate.io/openmetadata/postgresql:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/postgresql:1.3.0-beta
restart: always
command: "--work_mem=10MB"
environment:
@@ -61,7 +61,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta
command: "./bootstrap/openmetadata-ops.sh migrate"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
@@ -250,7 +250,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -439,7 +439,7 @@ services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
depends_on:
elasticsearch:
condition: service_started
diff --git a/docker/docker-compose-quickstart/docker-compose.yml b/docker/docker-compose-quickstart/docker-compose.yml
index 8c000bf4e5d0..8eec1690cf02 100644
--- a/docker/docker-compose-quickstart/docker-compose.yml
+++ b/docker/docker-compose-quickstart/docker-compose.yml
@@ -18,7 +18,7 @@ volumes:
services:
mysql:
container_name: openmetadata_mysql
- image: docker.getcollate.io/openmetadata/db:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/db:1.3.0-beta
command: "--sort_buffer_size=10M"
restart: always
environment:
@@ -59,7 +59,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta
command: "./bootstrap/openmetadata-ops.sh migrate"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
@@ -248,7 +248,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0-beta
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -437,7 +437,7 @@ services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
depends_on:
elasticsearch:
condition: service_started
diff --git a/openmetadata-clients/openmetadata-java-client/pom.xml b/openmetadata-clients/openmetadata-java-client/pom.xml
index cafef6a0c08b..980f6c5ea634 100644
--- a/openmetadata-clients/openmetadata-java-client/pom.xml
+++ b/openmetadata-clients/openmetadata-java-client/pom.xml
@@ -5,7 +5,7 @@
openmetadata-clients
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
diff --git a/openmetadata-clients/pom.xml b/openmetadata-clients/pom.xml
index 3785c3788318..a83d56f01146 100644
--- a/openmetadata-clients/pom.xml
+++ b/openmetadata-clients/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
diff --git a/openmetadata-dist/pom.xml b/openmetadata-dist/pom.xml
index 91edfdd76834..85ada0c5c03b 100644
--- a/openmetadata-dist/pom.xml
+++ b/openmetadata-dist/pom.xml
@@ -20,7 +20,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
openmetadata-dist
diff --git a/openmetadata-service/pom.xml b/openmetadata-service/pom.xml
index 34a457208695..4c7b077da816 100644
--- a/openmetadata-service/pom.xml
+++ b/openmetadata-service/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
openmetadata-service
diff --git a/openmetadata-shaded-deps/elasticsearch-dep/pom.xml b/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
index 69e75472f47b..dc8a8c5ef98d 100644
--- a/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
+++ b/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
@@ -5,7 +5,7 @@
openmetadata-shaded-deps
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
elasticsearch-deps
diff --git a/openmetadata-shaded-deps/opensearch-dep/pom.xml b/openmetadata-shaded-deps/opensearch-dep/pom.xml
index 0b6eb7caaf32..3283a15f4885 100644
--- a/openmetadata-shaded-deps/opensearch-dep/pom.xml
+++ b/openmetadata-shaded-deps/opensearch-dep/pom.xml
@@ -5,7 +5,7 @@
openmetadata-shaded-deps
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
opensearch-deps
diff --git a/openmetadata-shaded-deps/pom.xml b/openmetadata-shaded-deps/pom.xml
index 6eed28660700..6fb7f5e03608 100644
--- a/openmetadata-shaded-deps/pom.xml
+++ b/openmetadata-shaded-deps/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
openmetadata-shaded-deps
diff --git a/openmetadata-spec/pom.xml b/openmetadata-spec/pom.xml
index 4cf31c4a873a..149cc662a263 100644
--- a/openmetadata-spec/pom.xml
+++ b/openmetadata-spec/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
diff --git a/openmetadata-ui/pom.xml b/openmetadata-ui/pom.xml
index 66e7d9439e51..2e2098cf0c30 100644
--- a/openmetadata-ui/pom.xml
+++ b/openmetadata-ui/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
4.0.0
diff --git a/pom.xml b/pom.xml
index 76d21dcf1fe0..a4e97d1ded0f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,7 +26,7 @@
based on Open Metadata Standards/APIs, supporting connectors to a wide range of data services,
OpenMetadata enables end-to-end metadata management, giving you the freedom to unlock the value of your data assets.
- 1.3.0-SNAPSHOT
+ 1.3.0-beta
https://github.com/open-metadata/OpenMetadata
openmetadata-spec
From f08d36d4d40638f2e5c715ad4ee13b6b70e25baa Mon Sep 17 00:00:00 2001
From: Akash-Jain <15995028+akash-jain-10@users.noreply.github.com>
Date: Mon, 29 Jan 2024 16:31:52 +0530
Subject: [PATCH 02/13] chore(version-bump: Update Dockerfile Release version
---
docker/docker-compose-quickstart/Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docker/docker-compose-quickstart/Dockerfile b/docker/docker-compose-quickstart/Dockerfile
index 72b2064227e7..23780dbb226e 100644
--- a/docker/docker-compose-quickstart/Dockerfile
+++ b/docker/docker-compose-quickstart/Dockerfile
@@ -11,7 +11,7 @@
# Build stage
FROM alpine:3.19 AS build
-ARG RI_VERSION="1.3.0-SNAPSHOT"
+ARG RI_VERSION="1.3.0-beta"
ENV RELEASE_URL="https://github.com/open-metadata/OpenMetadata/releases/download/${RI_VERSION}-release/openmetadata-${RI_VERSION}.tar.gz"
RUN mkdir -p /opt/openmetadata && \
@@ -21,7 +21,7 @@ RUN mkdir -p /opt/openmetadata && \
# Final stage
FROM alpine:3.19
-ARG RI_VERSION="1.3.0-SNAPSHOT"
+ARG RI_VERSION="1.3.0-beta"
ARG BUILD_DATE
ARG COMMIT_ID
LABEL maintainer="OpenMetadata"
From 8272a4cf7eebbf2def3afd3ea0614d44d6c00a88 Mon Sep 17 00:00:00 2001
From: Akash-Jain <15995028+akash-jain-10@users.noreply.github.com>
Date: Mon, 29 Jan 2024 16:35:59 +0530
Subject: [PATCH 03/13] chore(version-bump: Update Github Actions Ingestion
Slim Release version
---
.github/workflows/docker-openmetadata-ingestion-base-slim.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/docker-openmetadata-ingestion-base-slim.yml b/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
index 39f912bebdd6..05036b217c6b 100644
--- a/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
+++ b/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-SNAPSHOT" >> $GITHUB_ENV
+ run: echo "input=1.3.0-beta" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
From ce5938a78382c20491008fab2933c2bc8ca536f8 Mon Sep 17 00:00:00 2001
From: Akash-Jain <15995028+akash-jain-10@users.noreply.github.com>
Date: Tue, 6 Feb 2024 10:04:20 +0530
Subject: [PATCH 04/13] chore: Bump Version to `1.3.0`
---
.github/workflows/docker-openmetadata-db.yml | 2 +-
.../workflows/docker-openmetadata-ingestion-base-slim.yml | 2 +-
.github/workflows/docker-openmetadata-ingestion-base.yml | 2 +-
.github/workflows/docker-openmetadata-ingestion.yml | 2 +-
.github/workflows/docker-openmetadata-postgres.yml | 2 +-
.github/workflows/docker-openmetadata-server.yml | 4 ++--
common/pom.xml | 2 +-
.../docker-compose-ingestion/docker-compose-ingestion.yml | 2 +-
.../docker-compose-openmetadata.yml | 4 ++--
docker/docker-compose-quickstart/Dockerfile | 4 ++--
.../docker-compose-quickstart/docker-compose-postgres.yml | 8 ++++----
docker/docker-compose-quickstart/docker-compose.yml | 8 ++++----
ingestion/Dockerfile | 2 +-
ingestion/operators/docker/Dockerfile | 2 +-
ingestion/pyproject.toml | 2 +-
openmetadata-airflow-apis/pyproject.toml | 2 +-
openmetadata-clients/openmetadata-java-client/pom.xml | 2 +-
openmetadata-clients/pom.xml | 2 +-
openmetadata-dist/pom.xml | 2 +-
openmetadata-service/pom.xml | 2 +-
openmetadata-shaded-deps/elasticsearch-dep/pom.xml | 2 +-
openmetadata-shaded-deps/opensearch-dep/pom.xml | 2 +-
openmetadata-shaded-deps/pom.xml | 2 +-
openmetadata-spec/pom.xml | 2 +-
openmetadata-ui/pom.xml | 2 +-
pom.xml | 2 +-
26 files changed, 35 insertions(+), 35 deletions(-)
diff --git a/.github/workflows/docker-openmetadata-db.yml b/.github/workflows/docker-openmetadata-db.yml
index 36e6a2295edf..149caff196cd 100644
--- a/.github/workflows/docker-openmetadata-db.yml
+++ b/.github/workflows/docker-openmetadata-db.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "input=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-ingestion-base-slim.yml b/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
index 05036b217c6b..5db0a9076c43 100644
--- a/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
+++ b/.github/workflows/docker-openmetadata-ingestion-base-slim.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "input=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-ingestion-base.yml b/.github/workflows/docker-openmetadata-ingestion-base.yml
index 9b30f19e68a8..e91586bff752 100644
--- a/.github/workflows/docker-openmetadata-ingestion-base.yml
+++ b/.github/workflows/docker-openmetadata-ingestion-base.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "input=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-ingestion.yml b/.github/workflows/docker-openmetadata-ingestion.yml
index 1e8815ab1f1e..fdc02a84cac6 100644
--- a/.github/workflows/docker-openmetadata-ingestion.yml
+++ b/.github/workflows/docker-openmetadata-ingestion.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "input=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-postgres.yml b/.github/workflows/docker-openmetadata-postgres.yml
index 49af4f85d0c1..f90fbb4043d2 100644
--- a/.github/workflows/docker-openmetadata-postgres.yml
+++ b/.github/workflows/docker-openmetadata-postgres.yml
@@ -31,7 +31,7 @@ jobs:
steps:
- name: Check trigger type
if: ${{ env.input == '' }}
- run: echo "input=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "input=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/.github/workflows/docker-openmetadata-server.yml b/.github/workflows/docker-openmetadata-server.yml
index 7931ce542464..bbd6c1d8ae67 100644
--- a/.github/workflows/docker-openmetadata-server.yml
+++ b/.github/workflows/docker-openmetadata-server.yml
@@ -64,7 +64,7 @@ jobs:
steps:
- name: Check trigger type
id: check_trigger
- run: echo "DOCKER_RELEASE_TAG=1.3.0-beta" >> $GITHUB_OUTPUT
+ run: echo "DOCKER_RELEASE_TAG=1.3.0" >> $GITHUB_OUTPUT
- name: Download application from Artifiact
uses: actions/download-artifact@v2
@@ -129,7 +129,7 @@ jobs:
- name: Check trigger type
id: check_trigger
if: ${{ env.DOCKER_RELEASE_TAG == '' }}
- run: echo "DOCKER_RELEASE_TAG=1.3.0-beta" >> $GITHUB_ENV
+ run: echo "DOCKER_RELEASE_TAG=1.3.0" >> $GITHUB_ENV
- name: Check out the Repo
uses: actions/checkout@v3
diff --git a/common/pom.xml b/common/pom.xml
index eda420eac020..91bb0922dd3a 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -18,7 +18,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
diff --git a/docker/docker-compose-ingestion/docker-compose-ingestion.yml b/docker/docker-compose-ingestion/docker-compose-ingestion.yml
index 9863edc573a2..919f33ee953f 100644
--- a/docker/docker-compose-ingestion/docker-compose-ingestion.yml
+++ b/docker/docker-compose-ingestion/docker-compose-ingestion.yml
@@ -18,7 +18,7 @@ volumes:
services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0
environment:
AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
AIRFLOW__CORE__EXECUTOR: LocalExecutor
diff --git a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
index 0a77cd466be3..1778de7a1503 100644
--- a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
+++ b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
@@ -14,7 +14,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
command: "./bootstrap/openmetadata-ops.sh migrate"
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/server:1.3.0
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -202,7 +202,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta-SNAPSHOT
+ image: docker.getcollate.io/openmetadata/server:1.3.0
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
diff --git a/docker/docker-compose-quickstart/Dockerfile b/docker/docker-compose-quickstart/Dockerfile
index 23780dbb226e..802fecce2124 100644
--- a/docker/docker-compose-quickstart/Dockerfile
+++ b/docker/docker-compose-quickstart/Dockerfile
@@ -11,7 +11,7 @@
# Build stage
FROM alpine:3.19 AS build
-ARG RI_VERSION="1.3.0-beta"
+ARG RI_VERSION="1.3.0"
ENV RELEASE_URL="https://github.com/open-metadata/OpenMetadata/releases/download/${RI_VERSION}-release/openmetadata-${RI_VERSION}.tar.gz"
RUN mkdir -p /opt/openmetadata && \
@@ -21,7 +21,7 @@ RUN mkdir -p /opt/openmetadata && \
# Final stage
FROM alpine:3.19
-ARG RI_VERSION="1.3.0-beta"
+ARG RI_VERSION="1.3.0"
ARG BUILD_DATE
ARG COMMIT_ID
LABEL maintainer="OpenMetadata"
diff --git a/docker/docker-compose-quickstart/docker-compose-postgres.yml b/docker/docker-compose-quickstart/docker-compose-postgres.yml
index afb302ef1dc7..dbdf394c194a 100644
--- a/docker/docker-compose-quickstart/docker-compose-postgres.yml
+++ b/docker/docker-compose-quickstart/docker-compose-postgres.yml
@@ -18,7 +18,7 @@ volumes:
services:
postgresql:
container_name: openmetadata_postgresql
- image: docker.getcollate.io/openmetadata/postgresql:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/postgresql:1.3.0
restart: always
command: "--work_mem=10MB"
environment:
@@ -61,7 +61,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/server:1.3.0
command: "./bootstrap/openmetadata-ops.sh migrate"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
@@ -250,7 +250,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/server:1.3.0
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -439,7 +439,7 @@ services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0
depends_on:
elasticsearch:
condition: service_started
diff --git a/docker/docker-compose-quickstart/docker-compose.yml b/docker/docker-compose-quickstart/docker-compose.yml
index 8eec1690cf02..faef04c6ae71 100644
--- a/docker/docker-compose-quickstart/docker-compose.yml
+++ b/docker/docker-compose-quickstart/docker-compose.yml
@@ -18,7 +18,7 @@ volumes:
services:
mysql:
container_name: openmetadata_mysql
- image: docker.getcollate.io/openmetadata/db:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/db:1.3.0
command: "--sort_buffer_size=10M"
restart: always
environment:
@@ -59,7 +59,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/server:1.3.0
command: "./bootstrap/openmetadata-ops.sh migrate"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
@@ -248,7 +248,7 @@ services:
openmetadata-server:
container_name: openmetadata_server
restart: always
- image: docker.getcollate.io/openmetadata/server:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/server:1.3.0
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
@@ -437,7 +437,7 @@ services:
ingestion:
container_name: openmetadata_ingestion
- image: docker.getcollate.io/openmetadata/ingestion:1.3.0-beta
+ image: docker.getcollate.io/openmetadata/ingestion:1.3.0
depends_on:
elasticsearch:
condition: service_started
diff --git a/ingestion/Dockerfile b/ingestion/Dockerfile
index a895e68bafc9..437e6fb340ca 100644
--- a/ingestion/Dockerfile
+++ b/ingestion/Dockerfile
@@ -81,7 +81,7 @@ ARG INGESTION_DEPENDENCY="all"
ENV PIP_NO_CACHE_DIR=1
# Make pip silent
ENV PIP_QUIET=1
-ARG RI_VERSION="1.3.0.0.dev0"
+ARG RI_VERSION="1.3.0.0"
RUN pip install --upgrade pip
RUN pip install "openmetadata-managed-apis~=${RI_VERSION}" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt"
RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=${RI_VERSION}"
diff --git a/ingestion/operators/docker/Dockerfile b/ingestion/operators/docker/Dockerfile
index 2221394cecf3..eb7372b0a9d3 100644
--- a/ingestion/operators/docker/Dockerfile
+++ b/ingestion/operators/docker/Dockerfile
@@ -87,7 +87,7 @@ ENV PIP_QUIET=1
RUN pip install --upgrade pip
ARG INGESTION_DEPENDENCY="all"
-ARG RI_VERSION="1.3.0.0.dev0"
+ARG RI_VERSION="1.3.0.0"
RUN pip install --upgrade pip
RUN pip install "openmetadata-ingestion[airflow]~=${RI_VERSION}"
RUN pip install "openmetadata-ingestion[${INGESTION_DEPENDENCY}]~=${RI_VERSION}"
diff --git a/ingestion/pyproject.toml b/ingestion/pyproject.toml
index 37f603ea256b..c28de4dade83 100644
--- a/ingestion/pyproject.toml
+++ b/ingestion/pyproject.toml
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
# since it helps us organize and isolate version management
[project]
name = "openmetadata-ingestion"
-version = "1.3.0.0.dev0"
+version = "1.3.0.0"
dynamic = ["readme", "dependencies", "optional-dependencies"]
authors = [
{name = "OpenMetadata Committers"}
diff --git a/openmetadata-airflow-apis/pyproject.toml b/openmetadata-airflow-apis/pyproject.toml
index 3106d997c2b9..1b012693bcf8 100644
--- a/openmetadata-airflow-apis/pyproject.toml
+++ b/openmetadata-airflow-apis/pyproject.toml
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
# since it helps us organize and isolate version management
[project]
name = "openmetadata_managed_apis"
-version = "1.3.0.0.dev0"
+version = "1.3.0.0"
readme = "README.md"
authors = [
{name = "OpenMetadata Committers"}
diff --git a/openmetadata-clients/openmetadata-java-client/pom.xml b/openmetadata-clients/openmetadata-java-client/pom.xml
index 980f6c5ea634..0f57a6c53502 100644
--- a/openmetadata-clients/openmetadata-java-client/pom.xml
+++ b/openmetadata-clients/openmetadata-java-client/pom.xml
@@ -5,7 +5,7 @@
openmetadata-clients
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
diff --git a/openmetadata-clients/pom.xml b/openmetadata-clients/pom.xml
index a83d56f01146..3b4dd1574b26 100644
--- a/openmetadata-clients/pom.xml
+++ b/openmetadata-clients/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
diff --git a/openmetadata-dist/pom.xml b/openmetadata-dist/pom.xml
index 85ada0c5c03b..686ff6b08bae 100644
--- a/openmetadata-dist/pom.xml
+++ b/openmetadata-dist/pom.xml
@@ -20,7 +20,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
openmetadata-dist
diff --git a/openmetadata-service/pom.xml b/openmetadata-service/pom.xml
index 4c7b077da816..4197bce82017 100644
--- a/openmetadata-service/pom.xml
+++ b/openmetadata-service/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
openmetadata-service
diff --git a/openmetadata-shaded-deps/elasticsearch-dep/pom.xml b/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
index dc8a8c5ef98d..eceee4fd8f12 100644
--- a/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
+++ b/openmetadata-shaded-deps/elasticsearch-dep/pom.xml
@@ -5,7 +5,7 @@
openmetadata-shaded-deps
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
elasticsearch-deps
diff --git a/openmetadata-shaded-deps/opensearch-dep/pom.xml b/openmetadata-shaded-deps/opensearch-dep/pom.xml
index 3283a15f4885..a8dac56e7372 100644
--- a/openmetadata-shaded-deps/opensearch-dep/pom.xml
+++ b/openmetadata-shaded-deps/opensearch-dep/pom.xml
@@ -5,7 +5,7 @@
openmetadata-shaded-deps
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
opensearch-deps
diff --git a/openmetadata-shaded-deps/pom.xml b/openmetadata-shaded-deps/pom.xml
index 6fb7f5e03608..10d274485ea6 100644
--- a/openmetadata-shaded-deps/pom.xml
+++ b/openmetadata-shaded-deps/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
openmetadata-shaded-deps
diff --git a/openmetadata-spec/pom.xml b/openmetadata-spec/pom.xml
index 149cc662a263..1b42efe0820b 100644
--- a/openmetadata-spec/pom.xml
+++ b/openmetadata-spec/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
diff --git a/openmetadata-ui/pom.xml b/openmetadata-ui/pom.xml
index 2e2098cf0c30..e73a5e61bbe0 100644
--- a/openmetadata-ui/pom.xml
+++ b/openmetadata-ui/pom.xml
@@ -5,7 +5,7 @@
platform
org.open-metadata
- 1.3.0-beta
+ 1.3.0
4.0.0
diff --git a/pom.xml b/pom.xml
index a4e97d1ded0f..93190615a462 100644
--- a/pom.xml
+++ b/pom.xml
@@ -26,7 +26,7 @@
based on Open Metadata Standards/APIs, supporting connectors to a wide range of data services,
OpenMetadata enables end-to-end metadata management, giving you the freedom to unlock the value of your data assets.
- 1.3.0-beta
+ 1.3.0
https://github.com/open-metadata/OpenMetadata
openmetadata-spec
From 2bd3cadb441876034a23c3021d9d41e103ee0af0 Mon Sep 17 00:00:00 2001
From: Mayur Singal <39544459+ulixius9@users.noreply.github.com>
Date: Tue, 6 Feb 2024 10:42:53 +0530
Subject: [PATCH 05/13] MINOR: Fix Databricks SDK Breaking Change (#15037)
(cherry picked from commit d76809801d8ee7ac519a76c528e9dcaf5a8998cc)
---
ingestion/setup.py | 2 +-
.../source/database/unitycatalog/metadata.py | 47 +++++++------------
2 files changed, 17 insertions(+), 32 deletions(-)
diff --git a/ingestion/setup.py b/ingestion/setup.py
index 2447b7b6b6b5..804f864e4e7e 100644
--- a/ingestion/setup.py
+++ b/ingestion/setup.py
@@ -41,7 +41,7 @@
"azure-storage-blob": "azure-storage-blob~=12.14",
"azure-identity": "azure-identity~=1.12",
"sqlalchemy-databricks": "sqlalchemy-databricks~=0.1",
- "databricks-sdk": "databricks-sdk~=0.1",
+ "databricks-sdk": "databricks-sdk>=0.18.0,<0.20.0",
"google": "google>=3.0.0",
"trino": "trino[sqlalchemy]",
"spacy": "spacy==3.5.0",
diff --git a/ingestion/src/metadata/ingestion/source/database/unitycatalog/metadata.py b/ingestion/src/metadata/ingestion/source/database/unitycatalog/metadata.py
index 5a1a0db5228b..d2c404c82524 100644
--- a/ingestion/src/metadata/ingestion/source/database/unitycatalog/metadata.py
+++ b/ingestion/src/metadata/ingestion/source/database/unitycatalog/metadata.py
@@ -13,11 +13,10 @@
"""
import json
import traceback
-from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
+from typing import Any, Iterable, List, Optional, Tuple, Union
from databricks.sdk.service.catalog import ColumnInfo
from databricks.sdk.service.catalog import TableConstraint as DBTableConstraint
-from databricks.sdk.service.catalog import TableConstraintList
from metadata.generated.schema.api.data.createDatabase import CreateDatabaseRequest
from metadata.generated.schema.api.data.createDatabaseSchema import (
@@ -75,19 +74,6 @@
logger = ingestion_logger()
-# pylint: disable=not-callable
-@classmethod
-def from_dict(cls, dct: Dict[str, Any]) -> "TableConstraintList":
- return cls(
- table_constraints=[
- DBTableConstraint.from_dict(constraint) for constraint in dct
- ]
- )
-
-
-TableConstraintList.from_dict = from_dict
-
-
class UnitycatalogSource(DatabaseServiceSource, MultiDBSource):
"""
Implements the necessary methods to extract
@@ -351,7 +337,7 @@ def yield_table(
)
def get_table_constraints(
- self, constraints: TableConstraintList
+ self, constraints: List[DBTableConstraint]
) -> Tuple[List[TableConstraint], List[ForeignConstrains]]:
"""
Function to handle table constraint for the current table and add it to context
@@ -359,23 +345,22 @@ def get_table_constraints(
primary_constraints = []
foreign_constraints = []
- if constraints and constraints.table_constraints:
- for constraint in constraints.table_constraints:
- if constraint.primary_key_constraint:
- primary_constraints.append(
- TableConstraint(
- constraintType=ConstraintType.PRIMARY_KEY,
- columns=constraint.primary_key_constraint.child_columns,
- )
+ for constraint in constraints:
+ if constraint.primary_key_constraint:
+ primary_constraints.append(
+ TableConstraint(
+ constraintType=ConstraintType.PRIMARY_KEY,
+ columns=constraint.primary_key_constraint.child_columns,
)
- if constraint.foreign_key_constraint:
- foreign_constraints.append(
- ForeignConstrains(
- child_columns=constraint.foreign_key_constraint.child_columns,
- parent_columns=constraint.foreign_key_constraint.parent_columns,
- parent_table=constraint.foreign_key_constraint.parent_table,
- )
+ )
+ if constraint.foreign_key_constraint:
+ foreign_constraints.append(
+ ForeignConstrains(
+ child_columns=constraint.foreign_key_constraint.child_columns,
+ parent_columns=constraint.foreign_key_constraint.parent_columns,
+ parent_table=constraint.foreign_key_constraint.parent_table,
)
+ )
return primary_constraints, foreign_constraints
def _get_foreign_constraints(self, foreign_columns) -> List[TableConstraint]:
From 9144a650ee37cfa01bce1a18946c3524ce268063 Mon Sep 17 00:00:00 2001
From: Akash-Jain <15995028+akash-jain-10@users.noreply.github.com>
Date: Tue, 6 Feb 2024 15:42:59 +0530
Subject: [PATCH 06/13] fix: Revert Docker Compose to use
`bootstrap_storage.sh` script for migrations
---
docker/development/docker-compose-postgres.yml | 2 +-
docker/development/docker-compose.yml | 2 +-
.../docker-compose-openmetadata/docker-compose-openmetadata.yml | 2 +-
docker/docker-compose-quickstart/docker-compose-postgres.yml | 2 +-
docker/docker-compose-quickstart/docker-compose.yml | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/docker/development/docker-compose-postgres.yml b/docker/development/docker-compose-postgres.yml
index b6f0e05362cb..cb15ae4b9d52 100644
--- a/docker/development/docker-compose-postgres.yml
+++ b/docker/development/docker-compose-postgres.yml
@@ -70,7 +70,7 @@ services:
context: ../../.
dockerfile: docker/development/Dockerfile
container_name: execute_migrate_all
- command: "./bootstrap/openmetadata-ops.sh -d migrate --force"
+ command: "./bootstrap/bootstrap_storage.sh migrate-all debug"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
diff --git a/docker/development/docker-compose.yml b/docker/development/docker-compose.yml
index 6cfa9b0372d2..d852cc45f2bd 100644
--- a/docker/development/docker-compose.yml
+++ b/docker/development/docker-compose.yml
@@ -70,7 +70,7 @@ services:
context: ../../.
dockerfile: docker/development/Dockerfile
container_name: execute_migrate_all
- command: "./bootstrap/openmetadata-ops.sh -d migrate --force"
+ command: "./bootstrap/bootstrap_storage.sh migrate-all debug"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
diff --git a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
index 1778de7a1503..07331c58799f 100644
--- a/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
+++ b/docker/docker-compose-openmetadata/docker-compose-openmetadata.yml
@@ -13,7 +13,7 @@ version: "3.9"
services:
execute-migrate-all:
container_name: execute_migrate_all
- command: "./bootstrap/openmetadata-ops.sh migrate"
+ command: "./bootstrap/bootstrap_storage.sh migrate-all"
image: docker.getcollate.io/openmetadata/server:1.3.0
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
diff --git a/docker/docker-compose-quickstart/docker-compose-postgres.yml b/docker/docker-compose-quickstart/docker-compose-postgres.yml
index dbdf394c194a..b98c76bce0be 100644
--- a/docker/docker-compose-quickstart/docker-compose-postgres.yml
+++ b/docker/docker-compose-quickstart/docker-compose-postgres.yml
@@ -62,7 +62,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
image: docker.getcollate.io/openmetadata/server:1.3.0
- command: "./bootstrap/openmetadata-ops.sh migrate"
+ command: "./bootstrap/bootstrap_storage.sh migrate-all"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
diff --git a/docker/docker-compose-quickstart/docker-compose.yml b/docker/docker-compose-quickstart/docker-compose.yml
index faef04c6ae71..5549910cf194 100644
--- a/docker/docker-compose-quickstart/docker-compose.yml
+++ b/docker/docker-compose-quickstart/docker-compose.yml
@@ -60,7 +60,7 @@ services:
execute-migrate-all:
container_name: execute_migrate_all
image: docker.getcollate.io/openmetadata/server:1.3.0
- command: "./bootstrap/openmetadata-ops.sh migrate"
+ command: "./bootstrap/bootstrap_storage.sh migrate-all"
environment:
OPENMETADATA_CLUSTER_NAME: ${OPENMETADATA_CLUSTER_NAME:-openmetadata}
SERVER_PORT: ${SERVER_PORT:-8585}
From 1b1bc49e4c27136c30163c30cfa10b04ab69f0e6 Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 11:31:08 +0100
Subject: [PATCH 07/13] feat: implement periscope
---
.../source/dashboard/periscope/client.py | 134 +++++++
.../source/dashboard/periscope/connection.py | 32 ++
.../source/dashboard/periscope/metadata.py | 349 ++++++++++++++++++
.../source/dashboard/periscope/models.py | 137 +++++++
.../dashboard/periscope/ometa_client.py | 333 +++++++++++++++++
.../dashboard/periscopeConnection.json | 42 +++
.../entity/services/dashboardService.json | 9 +-
7 files changed, 1035 insertions(+), 1 deletion(-)
create mode 100644 ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
create mode 100644 ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
create mode 100644 ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
create mode 100644 ingestion/src/metadata/ingestion/source/dashboard/periscope/models.py
create mode 100644 ingestion/src/metadata/ingestion/source/dashboard/periscope/ometa_client.py
create mode 100644 openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
new file mode 100644
index 000000000000..a5d11ff57d55
--- /dev/null
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
@@ -0,0 +1,134 @@
+"""
+REST Auth & Client for Periscope
+"""
+import json
+import traceback
+from typing import List, Optional, Dict
+from copy import deepcopy
+
+import requests
+
+from metadata.ingestion.source.dashboard.periscope.ometa_client import REST, ClientConfig
+from metadata.ingestion.source.dashboard.periscope.connection import PeriscopeConnection
+from metadata.ingestion.source.dashboard.periscope.models import (
+ PeriscopeDashboard,
+ PeriscopeDashboardList,
+ PeriscopeDashboardDetails,
+ PeriscopeViewList,
+ PeriscopeView
+)
+
+from metadata.ingestion.connections.test_connections import SourceConnectionException
+from metadata.utils.constants import AUTHORIZATION_HEADER, NO_ACCESS_TOKEN
+from metadata.utils.logger import ingestion_logger
+
+
+logger = ingestion_logger()
+
+SESSION_HEADERS = {"Content-Type": "application/json", "Accept": "*/*"}
+DEFAULT_TIMEOUT = 30
+
+class PeriscopeClient:
+ """
+ Client Handling API communication with Periscope
+ """
+
+ def _check_connection(self) -> dict:
+ try:
+ headers = deepcopy(SESSION_HEADERS)
+ headers["cookie"] = self.config.cookies
+ self.resp = requests.get(
+ f"https://app.periscopedata.com/login_state/dashboards",
+ headers=headers,
+ timeout=DEFAULT_TIMEOUT,
+ params={"client_site_id": self.config.client_site_id},
+ )
+
+ if not self.resp.ok:
+ msg = "Failed to fetch Periscope, please validate credentials"
+ raise SourceConnectionException(msg)
+
+ except Exception as exc:
+ msg = f"Unknown error in connection: {exc}."
+ raise SourceConnectionException(msg) from exc
+
+ logger.debug(f"Periscope connection status: {self.resp.status_code}")
+ return headers
+
+ def __init__(
+ self,
+ config: PeriscopeConnection,
+ ):
+ self.config = config
+ headers = self._check_connection()
+
+ client_config: ClientConfig = ClientConfig(
+ base_url="https://app.periscopedata.com",
+ api_version="",
+ auth_header="no-auth",
+ auth_token=lambda: (NO_ACCESS_TOKEN, 0),
+ extra_headers=headers
+ )
+ self.client = REST(client_config)
+ self.cached_dashboard_dict = self.get_cached_dashboard_per_id()
+
+ def get_dashboards_list(self) -> List[PeriscopeDashboard]:
+ """
+ Get List of all dashboards
+ """
+ try:
+ resp_dashboards = self.client.get(f"login_state/dashboards",
+ data={
+ "client_site_id": self.config.client_site_id
+ }
+ )
+ if resp_dashboards:
+ dashboard_list = PeriscopeDashboardList.parse_obj(resp_dashboards)
+ return dashboard_list.Dashboard
+ except Exception:
+ logger.debug(traceback.format_exc())
+ logger.warning("Failed to fetch the dashboard list")
+ return []
+
+ def get_cached_dashboard_per_id(self) -> Dict[str, PeriscopeDashboard]:
+ dashboards = self.get_dashboards_list()
+ return {dashboard.id: dashboard for dashboard in dashboards}
+
+ def get_views_list(self) -> List[PeriscopeView]:
+ """
+ Get List of all views
+ """
+ try:
+ resp_collections = self.client.get("login_state/sql_views", data={"client_site_id": self.config.client_site_id})
+ if resp_collections:
+ collection_list = PeriscopeViewList.parse_obj(resp_collections)
+ return collection_list.SqlView
+ except Exception:
+ logger.debug(traceback.format_exc())
+ logger.warning("Failed to fetch the collections list")
+ return []
+
+ def get_dashboard_details(
+ self, dashboard_id: str
+ ) -> Optional[PeriscopeDashboardDetails]:
+ """
+ Get Dashboard Details
+ """
+ if not dashboard_id:
+ return None # don't call api if dashboard_id is None
+ try:
+ resp_dashboard = self.client.get(f"welcome/remaining_widgets",
+ data={
+ "current_dashboard": dashboard_id,
+ "client_site_id": self.config.client_site_id
+ }
+ )
+ if resp_dashboard:
+ return PeriscopeDashboardDetails(
+ charts=resp_dashboard.get("Widget"),
+ dashboard=self.cached_dashboard_dict[dashboard_id]
+ )
+ except Exception:
+ logger.debug(traceback.format_exc())
+ logger.warning(f"Failed to fetch the dashboard with id: {dashboard_id}")
+ return None
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
new file mode 100644
index 000000000000..9eb37e1232da
--- /dev/null
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Extra, Field
+
+from metadata.ingestion.models.custom_pydantic import CustomSecretStr
+
+
+class PeriscopeType(Enum):
+ Periscope = 'Periscope'
+
+
+class PeriscopeConnection(BaseModel):
+ class Config:
+ extra = Extra.forbid
+
+ type: Optional[PeriscopeType] = Field(
+ PeriscopeType.Periscope, description='Service Type', title='Service Type'
+ )
+ cookies: str = Field(
+ ...,
+ description='Cookies to connect to Periscope. This user should have privileges to read all the metadata in Periscope.',
+ title='Cookies',
+ )
+ client_site_id: str = Field(
+ None,
+ description='Client Site ID. This is the unique identifier for the Periscope instance.',
+ title='Client Site ID',
+ )
+ supportsMetadataExtraction: Optional[bool] = Field(None, title='Supports Metadata Extraction')
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
new file mode 100644
index 000000000000..5fbefd891e80
--- /dev/null
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
@@ -0,0 +1,349 @@
+import csv
+import json
+import traceback
+from metadata.generated.schema.api.data.createChart import CreateChartRequest
+from metadata.generated.schema.api.data.createDashboard import CreateDashboardRequest
+from metadata.generated.schema.api.lineage.addLineage import AddLineageRequest
+from metadata.ingestion.lineage.models import ConnectionTypeDialectMapper
+from metadata.ingestion.lineage.parser import LineageParser
+from metadata.generated.schema.type.entityReference import EntityReference
+from metadata.generated.schema.entity.data.dashboard import (
+ Dashboard as LineageDashboard,
+)
+
+from pydantic import BaseModel, ValidationError, validator
+from pathlib import Path
+from typing import Iterable, Optional, List, Dict, Any
+
+from metadata.ingestion.api.common import Entity
+from metadata.ingestion.api.models import Either, StackTraceError
+from metadata.ingestion.api.steps import InvalidSourceException
+from metadata.ingestion.source.dashboard.dashboard_service import DashboardServiceSource
+from metadata.generated.schema.entity.services.databaseService import DatabaseService
+from metadata.ingestion.lineage.sql_lineage import search_table_entities
+from metadata.generated.schema.entity.data.chart import Chart
+from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
+ OpenMetadataConnection,
+)
+from metadata.generated.schema.entity.data.dashboardDataModel import DashboardDataModel, DataModelType
+from metadata.generated.schema.api.data.createDashboardDataModel import CreateDashboardDataModelRequest
+from metadata.generated.schema.entity.services.dashboardService import DashboardService
+from metadata.generated.schema.entity.data.table import Column, DataType
+from metadata.generated.schema.metadataIngestion.workflow import (
+ Source as WorkflowSource,
+)
+from metadata.ingestion.ometa.ometa_api import OpenMetadata
+from metadata.utils.logger import ingestion_logger
+from metadata.utils.filters import filter_by_chart
+from metadata.utils.helpers import (
+ clean_uri,
+ get_standard_chart_type,
+ replace_special_with,
+)
+from metadata.utils import fqn
+from metadata.generated.schema.entity.services.dashboardService import DashboardServiceType
+from metadata.generated.schema.entity.services.connections.dashboard.customDashboardConnection import (
+ CustomDashboardConnection,
+)
+from metadata.ingestion.source.dashboard.periscope.connection import PeriscopeConnection
+from metadata.ingestion.source.dashboard.periscope.models import PeriscopeDashboard, PeriscopeChart, PeriscopeView, PeriscopeDashboardDetails
+
+logger = ingestion_logger()
+
+
+class PeriscopeSource(DashboardServiceSource):
+ """
+ Periscope Source Class
+ """
+
+ config: WorkflowSource
+ metadata_config: OpenMetadataConnection
+
+ @classmethod
+ def create(cls, config_dict, metadata: OpenMetadata):
+ config = WorkflowSource.parse_obj(config_dict)
+ connection: CustomDashboardConnection = config.serviceConnection.__root__.config
+ if not isinstance(connection, CustomDashboardConnection):
+ raise InvalidSourceException(
+ f"Expected CustomDashboardConnection, but got {connection}"
+ )
+ return cls(config, metadata)
+
+ def __init__(
+ self,
+ config: WorkflowSource,
+ metadata: OpenMetadata,
+ ):
+ super().__init__(config, metadata)
+ self.views: List[PeriscopeView] = []
+
+ def prepare(self):
+ self.collections = self.client.get()
+ return super().prepare()
+
+ def get_dashboards_list(self) -> Optional[List[PeriscopeDashboard]]:
+ """
+ Get List of all dashboards
+ """
+ return self.client.get_dashboards_list()
+
+ def get_dashboard_name(self, dashboard: PeriscopeDashboard) -> str:
+ """
+ Get Dashboard Name
+ """
+ return dashboard.name
+
+ def get_dashboard_details(self, dashboard: PeriscopeDashboard) -> dict:
+ """
+ Get Dashboard Details
+ """
+ return self.client.get_dashboard_details(dashboard.id)
+
+ def get_project_name(self, dashboard_details: Any) -> Optional[str]:
+ """
+ Method to get the project name by searching the dataset using id in the workspace dict
+ """
+ try:
+ if dashboard_details.collection_id:
+ collection_name = next(
+ (
+ collection.name
+ for collection in self.collections
+ if collection.id == dashboard_details.collection_id
+ ),
+ None,
+ )
+ return collection_name
+ except Exception as exc: # pylint: disable=broad-except
+ logger.debug(traceback.format_exc())
+ logger.warning(
+ f"Error fetching the collection details for [{dashboard_details.collection_id}]: {exc}"
+ )
+ return None
+
+ def yield_dashboard(
+ self, dashboard_details: PeriscopeDashboardDetails
+ ) -> Iterable[Either[CreateDashboardRequest]]:
+ """
+ Method to Get Dashboard Entity
+ """
+ try:
+ dashboard_url = (
+ f"https://app.periscopedata.com/app/gorgias/{dashboard_details.dashboard.id}/"
+ f"{replace_special_with(raw=dashboard_details.dashboard.name.lower(), replacement='-')}"
+ )
+ dashboard_request = CreateDashboardRequest(
+ name=dashboard_details.dashboard.name,
+ sourceUrl=dashboard_url,
+ displayName=dashboard_details.dashboard.name,
+ description=dashboard_details.dashboard.description,
+ project=self.context.project_name,
+ charts=[
+ fqn.build(
+ self.metadata,
+ entity_type=Chart,
+ service_name=self.context.dashboard_service,
+ chart_name=chart,
+ )
+ for chart in self.context.charts or []
+ ],
+ service=self.context.dashboard_service,
+ owner=self.get_owner_ref(dashboard_details=dashboard_details),
+ )
+ yield Either(right=dashboard_request)
+ self.register_record(dashboard_request=dashboard_request)
+ except Exception as exc: # pylint: disable=broad-except
+ yield Either(
+ left=StackTraceError(
+ name=dashboard_details.name,
+ error=f"Error creating dashboard [{dashboard_details.name}]: {exc}",
+ stackTrace=traceback.format_exc(),
+ )
+ )
+
+ def yield_dashboard_chart(
+ self, dashboard_details: PeriscopeDashboardDetails
+ ) -> Iterable[Either[CreateChartRequest]]:
+ """Get chart method
+
+ Args:
+ dashboard_details:
+ Returns:
+ Iterable[CreateChartRequest]
+ """
+ charts = dashboard_details.ordered_cards
+ for chart in charts:
+ try:
+ chart_details = chart.card
+ if not chart_details.id or not chart_details.name:
+ continue
+ chart_url = (
+ f"{clean_uri(self.service_connection.hostPort)}/question/{chart_details.id}-"
+ f"{replace_special_with(raw=chart_details.name.lower(), replacement='-')}"
+ )
+ if filter_by_chart(
+ self.source_config.chartFilterPattern, chart_details.name
+ ):
+ self.status.filter(chart_details.name, "Chart Pattern not allowed")
+ continue
+ yield Either(
+ right=CreateChartRequest(
+ name=chart_details.id,
+ displayName=chart_details.name,
+ description=chart_details.description,
+ chartType=get_standard_chart_type(chart_details.display).value,
+ sourceUrl=chart_url,
+ service=self.context.dashboard_service,
+ )
+ )
+ except Exception as exc: # pylint: disable=broad-except
+ yield Either(
+ left=StackTraceError(
+ name="Chart",
+ error=f"Error creating chart [{chart}]: {exc}",
+ stackTrace=traceback.format_exc(),
+ )
+ )
+
+ def yield_dashboard_lineage_details(
+ self,
+ dashboard_details: PeriscopeDashboardDetails,
+ db_service_name: Optional[str],
+ ) -> Iterable[Either[AddLineageRequest]]:
+ """Get lineage method
+
+ Args:
+ dashboard_details
+ """
+ if not db_service_name:
+ return
+ chart_list, dashboard_name = (
+ dashboard_details.ordered_cards,
+ str(dashboard_details.id),
+ )
+ for chart in chart_list:
+ try:
+ chart_details = chart.card
+ if (
+ chart_details.dataset_query is None
+ or chart_details.dataset_query.type is None
+ ):
+ continue
+ if chart_details.dataset_query.type == "native":
+ yield from self._yield_lineage_from_query(
+ chart_details=chart_details,
+ db_service_name=db_service_name,
+ dashboard_name=dashboard_name,
+ ) or []
+
+ # TODO: this method below only gets a single table, but if the chart of type query has a join the other
+ # table_ids will be ignored within a nested object
+ elif chart_details.dataset_query.type == "query":
+ if not chart_details.table_id:
+ continue
+ yield from self._yield_lineage_from_api(
+ chart_details=chart_details,
+ db_service_name=db_service_name,
+ dashboard_name=dashboard_name,
+ ) or []
+
+ except Exception as exc: # pylint: disable=broad-except
+ yield Either(
+ left=StackTraceError(
+ name="Lineage",
+ error=f"Error adding lineage: {exc}",
+ stackTrace=traceback.format_exc(),
+ )
+ )
+
+ def _get_database_service(self, db_service_name: str):
+ return self.metadata.get_by_name(DatabaseService, db_service_name)
+
+ def _yield_lineage_from_query(
+ self, chart_details: PeriscopeChart, db_service_name: str, dashboard_name: str
+ ) -> Iterable[Either[AddLineageRequest]]:
+ database = self.client.get_database(chart_details.database_id)
+
+ query = None
+ if (
+ chart_details.dataset_query
+ and chart_details.dataset_query.native
+ and chart_details.dataset_query.native.query
+ ):
+ query = chart_details.dataset_query.native.query
+
+ if query is None:
+ return
+
+ database_name = database.details.db if database and database.details else None
+
+ db_service = self._get_database_service(db_service_name)
+
+ lineage_parser = LineageParser(
+ query,
+ ConnectionTypeDialectMapper.dialect_of(db_service.serviceType.value)
+ if db_service
+ else None,
+ )
+
+ for table in lineage_parser.source_tables:
+ database_schema_name, table = fqn.split(str(table))[-2:]
+ database_schema_name = self.check_database_schema_name(database_schema_name)
+ from_entities = search_table_entities(
+ metadata=self.metadata,
+ database=database_name,
+ service_name=db_service_name,
+ database_schema=database_schema_name,
+ table=table,
+ )
+
+ to_fqn = fqn.build(
+ self.metadata,
+ entity_type=LineageDashboard,
+ service_name=self.config.serviceName,
+ dashboard_name=dashboard_name,
+ )
+ to_entity = self.metadata.get_by_name(
+ entity=LineageDashboard,
+ fqn=to_fqn,
+ )
+
+ for from_entity in from_entities:
+ yield self._get_add_lineage_request(
+ to_entity=to_entity, from_entity=from_entity
+ )
+
+ def _yield_lineage_from_api(
+ self, chart_details: PeriscopeChart, db_service_name: str, dashboard_name: str
+ ) -> Iterable[Either[AddLineageRequest]]:
+ table = self.client.get_table(chart_details.table_id)
+ table_name = table.name or table.display_name
+
+ if table is None or table_name is None:
+ return
+
+ database_name = table.db.details.db if table.db and table.db.details else None
+ from_entities = search_table_entities(
+ metadata=self.metadata,
+ database=database_name,
+ service_name=db_service_name,
+ database_schema=table.table_schema,
+ table=table_name,
+ )
+
+ to_fqn = fqn.build(
+ self.metadata,
+ entity_type=LineageDashboard,
+ service_name=self.config.serviceName,
+ dashboard_name=dashboard_name,
+ )
+
+ to_entity = self.metadata.get_by_name(
+ entity=LineageDashboard,
+ fqn=to_fqn,
+ )
+
+ for from_entity in from_entities:
+ yield self._get_add_lineage_request(
+ to_entity=to_entity, from_entity=from_entity
+ )
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/models.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/models.py
new file mode 100644
index 000000000000..8e209c74d226
--- /dev/null
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/models.py
@@ -0,0 +1,137 @@
+"""
+Periscope Models
+"""
+from typing import List, Optional
+from pydantic import BaseModel, validator
+
+from metadata.generated.schema.entity.data.table import DataType
+
+PERISCOPE_TYPE_MAPPER = {
+ "INTEGER": DataType.INT,
+ "STRING": DataType.STRING,
+ "FLOAT": DataType.FLOAT,
+ "DATE": DataType.DATE
+}
+
+class PeriscopeColumnSchema(BaseModel):
+ column_name: str
+ column_type: DataType
+ is_datetime: bool
+
+ @validator("column_type", pre=True)
+ def str_to_enum(cls, value: str):
+ return PERISCOPE_TYPE_MAPPER.get(value, DataType.UNKNOWN)
+
+class PeriscopeViewsModel(BaseModel):
+ name: str
+ description: Optional[str]
+ view_schema: List[PeriscopeColumnSchema]
+ sql: str
+
+
+class PeriscopeDashboard(BaseModel):
+ """
+ Periscope dashboard model
+ """
+ id: int
+ name: Optional[str]
+ description: Optional[str]
+ aggregation: Optional[str]
+ user_name: Optional[str]
+ user_id: Optional[int]
+ collection_id: Optional[str]
+ popularity: Optional[int]
+ last_7_day_usage_hours: Optional[int]
+ visibility: str
+ last_update: str
+ create_time: str
+
+class PeriscopeChart(BaseModel):
+ id: int
+ title: Optional[str]
+ chart_type: str
+ dashboard_id: int
+ sync_version: int
+ user_id: int
+ shared_dashboard_id: None
+ api_enabled: Optional[bool]
+ chart_description: Optional[str]
+ sql: str
+ database_id: int
+ formula_source_hash_key: str
+ api_token: None
+ fluid_row: int
+ fluid_column: int
+ fluid_width: int
+ fluid_height: int
+ default_color_theme_id: None
+ use_periscope_theme: None
+ csv_download_disabled: Optional[bool]
+ sql_limit: Optional[int]
+ code: None
+ code_language: None
+ code_version: None
+ drag_drop_config: None
+ archived_at: None
+ content_type: Optional[str]
+ content_id: Optional[int]
+ code_hash_key: None
+ last_update: float
+ is_shell: bool
+
+class PeriscopeView(BaseModel):
+ id: int
+ archived_at: None
+ archived_by_user_id: None
+ code: Optional[str]
+ code_language: Optional[str]
+ code_preview_source_hash_key: Optional[str]
+ code_version: Optional[str]
+ database_id: int
+ description: Optional[str]
+ error_count: int
+ excluded_from_auto_archive: Optional[bool]
+ guid: str
+ icon_color: str
+ last_materializer_error: Optional[str]
+ last_successful_materialize_at: Optional[str]
+ last_successful_materialize_time_ms: int
+ last_updated_by_user_at: str
+ name: str
+ next_materialize_at: Optional[str]
+ no_cache_results: Optional[bool]
+ owner_user_id: int
+ popularity: Optional[int]
+ preview_source_hash_key: str
+ published_at: None
+ site_id: int
+ sql: str
+ sql_limit: Optional[int]
+ started_materialization_at: None
+ used_by_view_count: int
+ used_by_widget_count: int
+ user_id: int
+ view_schema: List[PeriscopeColumnSchema]
+ columns: List[str]
+ complete_sql_for_redshift: str
+ tag_names: List
+ last_update: float
+ materialize_pending: bool
+ code_hash_key: str
+ user_name: str
+
+ @validator("view_schema", pre=True)
+ def parse_view_schema(cls, value: dict):
+ return [PeriscopeColumnSchema.parse_obj(value[key]) for key in value.keys()]
+
+
+class PeriscopeViewList(BaseModel):
+ SqlView: List[PeriscopeView]
+ CacheSchedule: List
+
+class PeriscopeDashboardList(BaseModel):
+ Dashboard: Optional[List[PeriscopeDashboard]]
+
+class PeriscopeDashboardDetails(BaseModel):
+ charts: Optional[List[PeriscopeChart]]
+ dashboard: PeriscopeDashboard
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/ometa_client.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/ometa_client.py
new file mode 100644
index 000000000000..855a874c991e
--- /dev/null
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/ometa_client.py
@@ -0,0 +1,333 @@
+# Copyright 2021 Collate
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Python API REST wrapper and helpers
+"""
+import datetime
+import time
+import traceback
+from typing import Callable, Dict, List, Optional, Union
+
+import requests
+from requests.exceptions import HTTPError
+
+from metadata.config.common import ConfigModel
+from metadata.ingestion.ometa.credentials import URL, get_api_version
+from metadata.utils.logger import ometa_logger
+
+logger = ometa_logger()
+
+
+class RetryException(Exception):
+ """
+ API Client retry exception
+ """
+
+
+class APIError(Exception):
+ """
+ Represent API related error.
+ error.status_code will have http status code.
+ """
+
+ def __init__(self, error, http_error=None):
+ super().__init__(error["message"])
+ self._error = error
+ self._http_error = http_error
+
+ @property
+ def code(self):
+ """
+ Return error code
+ """
+ return self._error["code"]
+
+ @property
+ def status_code(self):
+ """
+ Return response status code
+
+ Returns:
+ int
+ """
+ http_error = self._http_error
+ if http_error is not None and hasattr(http_error, "response"):
+ return http_error.response.status_code
+
+ return None
+
+ @property
+ def request(self):
+ """
+ Handle requests error
+ """
+ if self._http_error is not None:
+ return self._http_error.request
+
+ return None
+
+ @property
+ def response(self):
+ """
+ Handle response error
+ :return:
+ """
+ if self._http_error is not None:
+ return self._http_error.response
+
+ return None
+
+
+class ClientConfig(ConfigModel):
+ """
+ :param raw_data: should we return api response raw or wrap it with
+ Entity objects.
+ """
+
+ base_url: str
+ api_version: Optional[str] = "v1"
+ retry: Optional[int] = 3
+ retry_wait: Optional[int] = 30
+ retry_codes: List[int] = [429, 504]
+ auth_token: Optional[Callable] = None
+ access_token: Optional[str] = None
+ expires_in: Optional[int] = None
+ auth_header: Optional[str] = None
+ extra_headers: Optional[dict] = None
+ raw_data: Optional[bool] = False
+ allow_redirects: Optional[bool] = False
+ auth_token_mode: Optional[str] = "Bearer"
+ verify: Optional[Union[bool, str]] = None
+
+
+class REST:
+ """
+ REST client wrapper to manage requests with
+ retries, auth and error handling.
+ """
+
+ def __init__(self, config: ClientConfig):
+ self.config = config
+ self._base_url: URL = URL(self.config.base_url)
+ self._api_version = get_api_version(self.config.api_version)
+ self._session = requests.Session()
+ self._use_raw_data = self.config.raw_data
+ self._retry = self.config.retry
+ self._retry_wait = self.config.retry_wait
+ self._retry_codes = self.config.retry_codes
+ self._auth_token = self.config.auth_token
+ self._auth_token_mode = self.config.auth_token_mode
+ self._verify = self.config.verify
+
+ def _request(
+ self,
+ method,
+ path,
+ data: dict =None,
+ base_url: URL = None,
+ api_version: str = None,
+ headers: dict = None,
+ ):
+ # pylint: disable=too-many-locals
+ if not headers:
+ headers = {"Content-type": "application/json"}
+ base_url = base_url or self._base_url
+ version = api_version if api_version else ""
+ url: URL = URL(base_url + "/" + version + path)
+ if (
+ self.config.expires_in
+ and datetime.datetime.utcnow().timestamp() >= self.config.expires_in
+ or not self.config.access_token
+ ):
+ self.config.access_token, expiry = self._auth_token()
+ if not self.config.access_token == "no_token":
+ if isinstance(expiry, datetime.datetime):
+ self.config.expires_in = expiry.timestamp() - 120
+ else:
+ self.config.expires_in = (
+ datetime.datetime.utcnow().timestamp() + expiry - 120
+ )
+
+ headers[self.config.auth_header] = (
+ f"{self._auth_token_mode} {self.config.access_token}"
+ if self._auth_token_mode
+ else self.config.access_token
+ )
+
+ # Merge extra headers if provided.
+ # If a header value is provided in modulo string format and matches an existing header,
+ # the value will be set to that value.
+ # Example: "Proxy-Authorization": "%(Authorization)s"
+ # This will result in the Authorization value being set for the Proxy-Authorization Extra Header
+ if self.config.extra_headers:
+ extra_headers: Dict[str, str] = self.config.extra_headers
+ headers = {**headers, **extra_headers}
+
+ opts = {
+ "headers": headers,
+ # Since we allow users to set endpoint URL via env var,
+ # human error to put non-SSL endpoint could exploit
+ # uncanny issues in non-GET request redirecting http->https.
+ # It's better to fail early if the URL isn't right.
+ "allow_redirects": self.config.allow_redirects,
+ "verify": self._verify,
+ }
+
+ method_key = "params" if method.upper() == "GET" else "data"
+ opts[method_key] = data
+
+ total_retries = self._retry if self._retry > 0 else 0
+ retry = total_retries
+ while retry >= 0:
+ try:
+ return self._one_request(method, url, opts, retry)
+ except RetryException:
+ retry_wait = self._retry_wait * (total_retries - retry + 1)
+ logger.warning(
+ "sleep %s seconds and retrying %s %s more time(s)...",
+ retry_wait,
+ url,
+ retry,
+ )
+ time.sleep(retry_wait)
+ retry -= 1
+ return None
+
+ def _one_request(self, method: str, url: URL, opts: dict, retry: int):
+ """
+ Perform one request, possibly raising RetryException in the case
+ the response is 429. Otherwise, if error text contain "code" string,
+ then it decodes to json object and returns APIError.
+ Returns the body json in the 200 status.
+ """
+ retry_codes = self._retry_codes
+ try:
+ resp = self._session.request(method, url, **opts)
+ resp.raise_for_status()
+
+ if resp.text != "":
+ try:
+ return resp.json()
+ except Exception as exc:
+ logger.debug(traceback.format_exc())
+ logger.warning(
+ f"Unexpected error while returning response {resp} in json format - {exc}"
+ )
+
+ except HTTPError as http_error:
+ # retry if we hit Rate Limit
+ if resp.status_code in retry_codes and retry > 0:
+ raise RetryException() from http_error
+ if "code" in resp.text:
+ error = resp.json()
+ if "code" in error:
+ raise APIError(error, http_error) from http_error
+ else:
+ raise
+ except requests.ConnectionError as conn:
+ # Trying to solve https://github.com/psf/requests/issues/4664
+ try:
+ return self._session.request(method, url, **opts).json()
+ except Exception as exc:
+ logger.debug(traceback.format_exc())
+ logger.warning(
+ f"Unexpected error while retrying after a connection error - {exc}"
+ )
+ raise conn
+ except Exception as exc:
+ logger.debug(traceback.format_exc())
+ logger.warning(
+ f"Unexpected error calling [{url}] with method [{method}]: {exc}"
+ )
+
+ return None
+
+ def get(self, path, data=None):
+ """
+ GET method
+
+ Parameters:
+ path (str):
+ data ():
+
+ Returns:
+ Response
+ """
+ return self._request("GET", path, data)
+
+ def post(self, path, data=None):
+ """
+ POST method
+
+ Parameters:
+ path (str):
+ data ():
+
+ Returns:
+ Response
+ """
+ return self._request("POST", path, data)
+
+ def put(self, path, data=None):
+ """
+ PUT method
+
+ Parameters:
+ path (str):
+ data ():
+
+ Returns:
+ Response
+ """
+ return self._request("PUT", path, data)
+
+ def patch(self, path, data=None):
+ """
+ PATCH method
+
+ Parameters:
+ path (str):
+ data ():
+
+ Returns:
+ Response
+ """
+ return self._request(
+ method="PATCH",
+ path=path,
+ data=data,
+ headers={"Content-type": "application/json-patch+json"},
+ )
+
+ def delete(self, path, data=None):
+ """
+ DELETE method
+
+ Parameters:
+ path (str):
+ data ():
+
+ Returns:
+ Response
+ """
+ return self._request("DELETE", path, data)
+
+ def __enter__(self):
+ return self
+
+ def close(self):
+ """
+ Close requests session
+ """
+ self._session.close()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
new file mode 100644
index 000000000000..711ae2198d1a
--- /dev/null
+++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
@@ -0,0 +1,42 @@
+{
+ "$id": "https://open-metadata.org/schema/entity/services/connections/dashboard/periscopeConnection.json",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "PeriscopeConnection",
+ "description": "Periscope Connection Config",
+ "type": "object",
+ "javaType": "org.openmetadata.schema.services.connections.dashboard.PeriscopeConnection",
+ "definitions": {
+ "periscopeType": {
+ "description": "Periscope service type",
+ "type": "string",
+ "enum": ["Periscope"],
+ "default": "Periscope"
+ }
+ },
+ "properties": {
+ "type": {
+ "title": "Service Type",
+ "description": "Service Type",
+ "$ref": "#/definitions/periscopeType",
+ "default": "Periscope"
+ },
+ "cookies": {
+ "title": "Cookies",
+ "description": "Cookies to connect to Periscope. This user should have privileges to read all the metadata in Periscope.",
+ "type": "string"
+ },
+ "client_id": {
+ "title": "Client ID",
+ "description": "Client ID",
+ "type": "string"
+ },
+ "supportsMetadataExtraction": {
+ "title": "Supports Metadata Extraction",
+ "description": "Supports Metadata Extraction.",
+ "type": "boolean",
+ "default": true
+ }
+ },
+ "additionalProperties": false,
+ "required": ["client_id", "cookies"]
+}
diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json
index 09d06fc9bf1f..5ecbc15d3b93 100644
--- a/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json
+++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/dashboardService.json
@@ -11,7 +11,7 @@
],
"definitions": {
"dashboardServiceType": {
- "description": "Type of Dashboard service - Superset, Looker, Redash, Tableau, Metabase, PowerBi, Mode, or Lightdash",
+ "description": "Type of Dashboard service - Superset, Looker, Redash, Tableau, Metabase, PowerBi, Mode, Periscope or Lightdash",
"type": "string",
"javaInterfaces": ["org.openmetadata.schema.EnumInterface"],
"enum": [
@@ -22,6 +22,7 @@
"Metabase",
"PowerBI",
"Mode",
+ "Periscope",
"CustomDashboard",
"DomoDashboard",
"QuickSight",
@@ -48,6 +49,9 @@
{
"name": "PowerBI"
},
+ {
+ "name": "Periscope"
+ },
{
"name": "Mode"
},
@@ -103,6 +107,9 @@
{
"$ref": "./connections/dashboard/modeConnection.json"
},
+ {
+ "$ref": "./connections/dashboard/periscopeConnection.json"
+ },
{
"$ref": "./connections/dashboard/customDashboardConnection.json"
},
From 8516b96373978ee68b6f40bafe06170eb95b768e Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 11:35:50 +0100
Subject: [PATCH 08/13] fix: connection type
---
.../ingestion/source/dashboard/periscope/metadata.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
index 5fbefd891e80..df41771dc16e 100644
--- a/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/metadata.py
@@ -62,10 +62,10 @@ class PeriscopeSource(DashboardServiceSource):
@classmethod
def create(cls, config_dict, metadata: OpenMetadata):
config = WorkflowSource.parse_obj(config_dict)
- connection: CustomDashboardConnection = config.serviceConnection.__root__.config
- if not isinstance(connection, CustomDashboardConnection):
+ connection: PeriscopeConnection = config.serviceConnection.__root__.config
+ if not isinstance(connection, PeriscopeConnection):
raise InvalidSourceException(
- f"Expected CustomDashboardConnection, but got {connection}"
+ f"Expected PeriscopeConnection, but got {connection}"
)
return cls(config, metadata)
From c10a1b216a1484f8e4cbe0e1e0fe2574441ec9ef Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 11:46:54 +0100
Subject: [PATCH 09/13] Update periscopeConnection.json
---
.../services/connections/dashboard/periscopeConnection.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
index 711ae2198d1a..b10d42389cf1 100644
--- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
+++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/dashboard/periscopeConnection.json
@@ -25,7 +25,7 @@
"description": "Cookies to connect to Periscope. This user should have privileges to read all the metadata in Periscope.",
"type": "string"
},
- "client_id": {
+ "client_site_id": {
"title": "Client ID",
"description": "Client ID",
"type": "string"
From 68d801f71f911e6d3cb537793453f3908face1c4 Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 12:19:39 +0100
Subject: [PATCH 10/13] fix circular dependency
---
.../metadata/ingestion/source/dashboard/periscope/client.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
index a5d11ff57d55..c2144161eb7f 100644
--- a/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/client.py
@@ -9,7 +9,6 @@
import requests
from metadata.ingestion.source.dashboard.periscope.ometa_client import REST, ClientConfig
-from metadata.ingestion.source.dashboard.periscope.connection import PeriscopeConnection
from metadata.ingestion.source.dashboard.periscope.models import (
PeriscopeDashboard,
PeriscopeDashboardList,
@@ -57,7 +56,7 @@ def _check_connection(self) -> dict:
def __init__(
self,
- config: PeriscopeConnection,
+ config,
):
self.config = config
headers = self._check_connection()
From ce69b2d28a52560a339a5ccbd278b091eb565683 Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 12:47:01 +0100
Subject: [PATCH 11/13] wip test periscope connector
---
.../source/dashboard/periscope/connection.py | 38 ++++-
.../unit/topology/dashboard/test_periscope.py | 136 ++++++++++++++++++
2 files changed, 173 insertions(+), 1 deletion(-)
create mode 100644 ingestion/tests/unit/topology/dashboard/test_periscope.py
diff --git a/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py b/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
index 9eb37e1232da..649cfd9883dc 100644
--- a/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
+++ b/ingestion/src/metadata/ingestion/source/dashboard/periscope/connection.py
@@ -6,12 +6,48 @@
from pydantic import BaseModel, Extra, Field
from metadata.ingestion.models.custom_pydantic import CustomSecretStr
+from metadata.generated.schema.entity.automations.workflow import (
+ Workflow as AutomationWorkflow,
+)
+from metadata.ingestion.source.dashboard.periscope.client import PeriscopeClient
+from metadata.ingestion.connections.test_connections import test_connection_steps
+from metadata.ingestion.ometa.ometa_api import OpenMetadata
+
+
+def get_connection(connection: PeriscopeConnection) -> PeriscopeClient:
+ """
+ Create connection
+ """
+ return PeriscopeClient(connection)
+
+
+def test_connection(
+ metadata: OpenMetadata,
+ client: PeriscopeClient,
+ service_connection: PeriscopeConnection,
+ automation_workflow: Optional[AutomationWorkflow] = None,
+) -> None:
+ """
+ Test connection. This can be executed either as part
+ of a metadata workflow or during an Automation Workflow
+ """
+
+ def custom_executor():
+ return client.get_dashboards_list()
+
+ test_fn = {"GetDashboards": custom_executor}
+
+ test_connection_steps(
+ metadata=metadata,
+ test_fn=test_fn,
+ service_type=service_connection.type.value,
+ automation_workflow=automation_workflow,
+ )
class PeriscopeType(Enum):
Periscope = 'Periscope'
-
class PeriscopeConnection(BaseModel):
class Config:
extra = Extra.forbid
diff --git a/ingestion/tests/unit/topology/dashboard/test_periscope.py b/ingestion/tests/unit/topology/dashboard/test_periscope.py
new file mode 100644
index 000000000000..eacf30d6dde6
--- /dev/null
+++ b/ingestion/tests/unit/topology/dashboard/test_periscope.py
@@ -0,0 +1,136 @@
+# Copyright 2021 Collate
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Test Periscope Dashboard using the topology
+"""
+
+from copy import deepcopy
+from types import SimpleNamespace
+from unittest import TestCase
+from unittest.mock import patch
+
+from metadata.generated.schema.entity.data.dashboard import (
+ Dashboard as LineageDashboard,
+)
+from metadata.generated.schema.entity.data.table import Table
+from metadata.generated.schema.entity.services.dashboardService import (
+ DashboardConnection,
+ DashboardService,
+ DashboardServiceType,
+)
+from metadata.generated.schema.entity.services.databaseService import (
+ DatabaseConnection,
+ DatabaseService,
+ DatabaseServiceType,
+)
+from metadata.generated.schema.metadataIngestion.workflow import (
+ OpenMetadataWorkflowConfig,
+)
+from metadata.generated.schema.type.basic import FullyQualifiedEntityName
+from metadata.generated.schema.type.entityReference import EntityReference
+from metadata.ingestion.ometa.ometa_api import OpenMetadata
+from metadata.ingestion.source.dashboard.Periscope.metadata import PeriscopeSource
+from metadata.utils import fqn
+
+MOCK_DASHBOARD_SERVICE = DashboardService(
+ id="c3eb265f-5445-4ad3-ba5e-797d3a3071bb",
+ fullyQualifiedName=FullyQualifiedEntityName(__root__="mock_Periscope"),
+ name="mock_Periscope",
+ connection=DashboardConnection(),
+ serviceType=DashboardServiceType.Periscope,
+)
+
+MOCK_DATABASE_SERVICE = DatabaseService(
+ id="c3eb265f-5445-4ad3-ba5e-797d3a3071bb",
+ fullyQualifiedName=FullyQualifiedEntityName(__root__="mock_mysql"),
+ name="mock_mysql",
+ connection=DatabaseConnection(),
+ serviceType=DatabaseServiceType.Mysql,
+)
+
+Mock_DATABASE_SCHEMA = "my_schema"
+
+Mock_DATABASE_SCHEMA_DEFAULT = ""
+
+EXAMPLE_DASHBOARD = LineageDashboard(
+ id="7b3766b1-7eb4-4ad4-b7c8-15a8b16edfdd",
+ name="lineage_dashboard",
+ service=EntityReference(
+ id="c3eb265f-5445-4ad3-ba5e-797d3a3071bb", type="dashboardService"
+ ),
+)
+
+EXAMPLE_TABLE = [
+ Table(
+ id="0bd6bd6f-7fea-4a98-98c7-3b37073629c7",
+ name="lineage_table",
+ columns=[],
+ )
+]
+mock_config = {
+ "source": {
+ "type": "Periscope",
+ "serviceName": "mock_Periscope",
+ "serviceConnection": {
+ "config": {
+ "type": "Periscope",
+ "client_site_id": "999999",
+ "cookies": "abcdefg",
+ }
+ },
+ "sourceConfig": {
+ "config": {"dashboardFilterPattern": {}, "chartFilterPattern": {}}
+ },
+ },
+ "sink": {"type": "metadata-rest", "config": {}},
+ "workflowConfig": {
+ "loggerLevel": "DEBUG",
+ "openMetadataServerConfig": {
+ "hostPort": "http://localhost:8585/api",
+ "authProvider": "openmetadata",
+ "securityConfig": {
+ "jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGc"
+ "iOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE"
+ "2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXB"
+ "iEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fN"
+ "r3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3u"
+ "d-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg"
+ },
+ },
+ },
+}
+
+
+class PeriscopeUnitTest(TestCase):
+ """
+ Implements the necessary methods to extract
+ Domo Dashboard Unit Test
+ """
+
+ @patch(
+ "metadata.ingestion.source.dashboard.dashboard_service.DashboardServiceSource.test_connection"
+ )
+ @patch("metadata.ingestion.source.dashboard.periscope.connection.get_connection")
+ def __init__(self, methodName, get_connection, test_connection) -> None:
+ super().__init__(methodName)
+ get_connection.return_value = False
+ test_connection.return_value = False
+ self.config = OpenMetadataWorkflowConfig.parse_obj(mock_config)
+ self.Periscope = PeriscopeSource.create(
+ mock_config["source"],
+ OpenMetadata(self.config.workflowConfig.openMetadataServerConfig),
+ )
+ self.Periscope.client = SimpleNamespace()
+ self.Periscope.context.__dict__[
+ "dashboard_service"
+ ] = MOCK_DASHBOARD_SERVICE.fullyQualifiedName.__root__
+ self.Periscope.context.__dict__["project_name"] = "Test Collection"
From e2a4b54985bda62546c1d9e695ad4264f784c50c Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 12:48:22 +0100
Subject: [PATCH 12/13] Update test_periscope.py
---
ingestion/tests/unit/topology/dashboard/test_periscope.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/ingestion/tests/unit/topology/dashboard/test_periscope.py b/ingestion/tests/unit/topology/dashboard/test_periscope.py
index eacf30d6dde6..f385a4f77078 100644
--- a/ingestion/tests/unit/topology/dashboard/test_periscope.py
+++ b/ingestion/tests/unit/topology/dashboard/test_periscope.py
@@ -113,7 +113,7 @@
class PeriscopeUnitTest(TestCase):
"""
Implements the necessary methods to extract
- Domo Dashboard Unit Test
+ Periscope Dashboard Unit Test
"""
@patch(
From 09901b05f3d19291ce24db82f4e90a467c086fe3 Mon Sep 17 00:00:00 2001
From: Antoine Balliet
Date: Mon, 4 Mar 2024 12:58:46 +0100
Subject: [PATCH 13/13] basic test
---
ingestion/tests/unit/test_source_parsing.py | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/ingestion/tests/unit/test_source_parsing.py b/ingestion/tests/unit/test_source_parsing.py
index 1886b061bb86..c52fbaa7d8d8 100644
--- a/ingestion/tests/unit/test_source_parsing.py
+++ b/ingestion/tests/unit/test_source_parsing.py
@@ -29,6 +29,9 @@
from metadata.generated.schema.entity.services.connections.dashboard.tableauConnection import (
TableauConnection,
)
+from metadata.generated.schema.entity.services.connections.dashboard.periscopeConnection import (
+ PeriscopeConnection
+)
from metadata.generated.schema.entity.services.connections.database import (
customDatabaseConnection,
)
@@ -824,6 +827,23 @@ def test_tableau():
config: WorkflowSource = WorkflowSource.parse_obj(source)
assert isinstance(config.serviceConnection.__root__.config, TableauConnection)
+def test_periscope():
+ source = {
+ "type": "periscope",
+ "serviceName": "local_periscope",
+ "serviceConnection": {
+ "config": {
+ "type": "Periscope",
+ "cookies": "jkjkjkjkj",
+ "client_site_id": "999999"
+ }
+ },
+ "sourceConfig": {
+ "config": {"dashboardFilterPattern": {}, "chartFilterPattern": {}}
+ },
+ }
+ config: WorkflowSource = WorkflowSource.parse_obj(source)
+ assert isinstance(config.serviceConnection.__root__.config, PeriscopeConnection)
def test_trino():
source = {