From 337a53f3ee7b1d0cd459db26a1a2a7075befc212 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 10 Dec 2024 14:49:56 -0800 Subject: [PATCH 01/26] feat(gha): check that docker-compose up superset-init runs --- .github/workflows/superset-docs-verify.yml | 2 +- .github/workflows/superset-frontend.yml | 122 +++++++++++++++------ Dockerfile | 9 +- 3 files changed, 95 insertions(+), 38 deletions(-) diff --git a/.github/workflows/superset-docs-verify.yml b/.github/workflows/superset-docs-verify.yml index 7fcc7309a50bb..f3d04a33c06fc 100644 --- a/.github/workflows/superset-docs-verify.yml +++ b/.github/workflows/superset-docs-verify.yml @@ -24,7 +24,7 @@ jobs: - uses: JustinBeckwith/linkinator-action@v1.11.0 continue-on-error: true # This will make the job advisory (non-blocking, no red X) with: - paths: "**/*.md, **/*.mdx" + paths: "**/*.md, **/*.mdx, !superset-frontend/CHANGELOG.md" linksToSkip: >- ^https://github.com/apache/(superset|incubator-superset)/(pull|issue)/\d+, http://localhost:8088/, diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 9451692f5f7a5..6c4bad6c1dc15 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -22,59 +22,111 @@ jobs: with: persist-credentials: false submodules: recursive - - name: Check npm lock file version - run: ./scripts/ci_check_npm_lock_version.sh ./superset-frontend/package-lock.json + - name: Check for file changes id: check uses: ./.github/actions/change-detector/ with: token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Node.js + + - name: Setup Docker Environment if: steps.check.outputs.frontend - uses: actions/setup-node@v4 + uses: ./.github/actions/setup-docker with: - node-version: "20" - - name: Install dependencies + dockerhub-user: ${{ secrets.DOCKERHUB_USER }} + dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Setup supersetbot if: steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: eslint + uses: ./.github/actions/setup-supersetbot/ + + - name: Build Docker Image if: steps.check.outputs.frontend - working-directory: ./superset-frontend + shell: bash + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - npm run eslint -- . --quiet - - name: tsc - if: steps.check.outputs.frontend - working-directory: ./superset-frontend + TAG="superset-node-${{ github.sha }}" + docker buildx build --target superset-node-ci -t $TAG . + docker save $TAG | gzip > superset-node.tar.gz + + - name: Upload Docker Image Artifact + uses: actions/upload-artifact@v3 + with: + name: docker-image + path: superset-node.tar.gz + + unit-tests: + needs: frontend-docker-build + runs-on: ubuntu-latest + steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v3 + with: + name: docker-image + + - name: Load Docker Image run: | - npm run type - - name: Build plugins packages - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: npm run plugins:build - - name: Build plugins Storybook - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: npm run plugins:build-storybook - - name: superset-ui/core coverage - if: steps.check.outputs.frontend + docker load -i superset-node.tar.gz + + - name: npm run test with coverage working-directory: ./superset-frontend run: | - npm run core:cover - - name: unit tests - if: steps.check.outputs.frontend + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run test -- --coverage --silent" + - name: superset-ui/core coverage working-directory: ./superset-frontend run: | - npm run test -- --coverage --silent - # todo: remove this step when fix generator as a project in root jest.config.js - - name: generator-superset unit tests - if: steps.check.outputs.frontend - working-directory: ./superset-frontend/packages/generator-superset - run: npm run test + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run core:cover" - name: Upload code coverage uses: codecov/codecov-action@v5 with: flags: javascript token: ${{ secrets.CODECOV_TOKEN }} verbose: true + + lint-frontend: + needs: frontend-docker-build + runs-on: ubuntu-latest + steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v3 + with: + name: docker-image + + - name: Load Docker Image + run: | + docker load -i superset-node.tar.gz + + - name: eslint + run: | + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run eslint -- . --quiet" + - name: tsc + run: | + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run type" + + validate-frontend: + needs: frontend-docker-build + runs-on: ubuntu-latest + steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v3 + with: + name: docker-image + + - name: Load Docker Image + run: | + docker load -i superset-node.tar.gz + + - name: Build plugins packages + run: | + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run plugins:build" + - name: Build plugins Storybook + working-directory: ./superset-frontend + run: | + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run plugins:build-storybook" diff --git a/Dockerfile b/Dockerfile index 4f24360988101..91af4312c60a8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,9 +24,9 @@ ARG PY_VER=3.10-slim-bookworm ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64} ###################################################################### -# superset-node used for building frontend assets +# superset-node-ci used as a base for building frontend assets and CI ###################################################################### -FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node +FROM --platform=${BUILDPLATFORM} node:20-bullseye-slim AS superset-node-ci ARG BUILD_TRANSLATIONS="false" # Include translations in the final build ENV BUILD_TRANSLATIONS=${BUILD_TRANSLATIONS} ARG DEV_MODE="false" # Skip frontend build in dev mode @@ -66,6 +66,11 @@ RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.j # Runs the webpack build process COPY superset-frontend /app/superset-frontend +###################################################################### +# superset-node used for compile frontend assets +###################################################################### +FROM superset-node-ci AS superset-node + # Build the frontend if not in dev mode RUN --mount=type=cache,target=/app/superset-frontend/.temp_cache \ --mount=type=cache,target=/root/.npm \ From 9e78bc386e7783e8ff6c57a8d378398c5c1b9a96 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 17 Dec 2024 00:27:34 -0800 Subject: [PATCH 02/26] allow .storybook (12k) --- .dockerignore | 1 - .github/workflows/superset-frontend.yml | 103 +++- Dockerfile | 4 + .../databases/DatabaseModal/index.1.test.tsx | 549 ++++++++++++++++++ .../{index.test.tsx => index.2.test.tsx} | 209 ------- .../databases/DatabaseModal/index.3.test.tsx | 549 ++++++++++++++++++ 6 files changed, 1181 insertions(+), 234 deletions(-) create mode 100644 superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx rename superset-frontend/src/features/databases/DatabaseModal/{index.test.tsx => index.2.test.tsx} (87%) create mode 100644 superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx diff --git a/.dockerignore b/.dockerignore index 31c873f0073f9..b650f22c10806 100644 --- a/.dockerignore +++ b/.dockerignore @@ -34,7 +34,6 @@ **/*.sqllite **/*.swp **/.terser-plugin-cache/ -**/.storybook/ **/node_modules/ tests/ diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 6c4bad6c1dc15..94030a35e5e43 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -1,4 +1,4 @@ -name: Frontend +name: "Frontend Build CI (unit tests, linting & sanity checks)" on: push: @@ -21,7 +21,6 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false - submodules: recursive - name: Check for file changes id: check @@ -35,6 +34,7 @@ jobs: with: dockerhub-user: ${{ secrets.DOCKERHUB_USER }} dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }} + build: "true" - name: Setup supersetbot if: steps.check.outputs.frontend @@ -47,21 +47,32 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | TAG="superset-node-${{ github.sha }}" - docker buildx build --target superset-node-ci -t $TAG . + + supersetbot docker \ + --load \ + --preset superset-node-ci \ + --platform "linux/amd64" \ + --extra-flags "--tag $TAG" + docker save $TAG | gzip > superset-node.tar.gz - name: Upload Docker Image Artifact - uses: actions/upload-artifact@v3 + if: steps.check.outputs.frontend + uses: actions/upload-artifact@v4 with: name: docker-image path: superset-node.tar.gz - unit-tests: - needs: frontend-docker-build - runs-on: ubuntu-latest + sharded-jest-tests: + needs: frontend-build + if: needs.frontend-build.result == 'success' + strategy: + matrix: + shard: [1, 2, 3, 4, 5, 6, 7, 8] + runs-on: ubuntu-22.04 steps: - name: Download Docker Image Artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: docker-image @@ -70,15 +81,39 @@ jobs: docker load -i superset-node.tar.gz - name: npm run test with coverage - working-directory: ./superset-frontend - run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ - "npm run test -- --coverage --silent" - - name: superset-ui/core coverage - working-directory: ./superset-frontend run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ - "npm run core:cover" + mkdir -p ${{ github.workspace }}/coverage + docker run \ + --rm superset-node-${{ github.sha }} \ + -v ${{ github.workspace }}/coverage:/app/superset-frontend/coverage \ + bash -c \ + 'npm run test -- --coverage --silent --shard=${{ matrix.shard }}/8 --coverageReporters="json-summary" && find ./coverage' + find ${{ github.workspace }}/coverage + + - name: Upload coverage artifact + uses: actions/upload-artifact@v4 + with: + name: coverage-artifacts-${{ matrix.shard }} + path: coverage/ + + report-coverage: + needs: [sharded-jest-tests] + if: needs.frontend-build.result == 'success' + runs-on: ubuntu-22.04 + steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: coverage-artifacts + merge-multiple: true + pattern: coverage/* + + - name: Show files + run: find . + + - name: Merge Code Coverage + run: npx nyc merge coverage/ merged-output/merged-coverage.json + - name: Upload code coverage uses: codecov/codecov-action@v5 with: @@ -86,12 +121,32 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} verbose: true + core-cover: + needs: frontend-build + if: needs.frontend-build.result == 'success' + runs-on: ubuntu-22.04 + steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: docker-image + + - name: Load Docker Image + run: | + docker load -i superset-node.tar.gz + + - name: superset-ui/core coverage + run: | + docker run --rm superset-node-${{ github.sha }} bash -c \ + "npm run core:cover" + lint-frontend: - needs: frontend-docker-build - runs-on: ubuntu-latest + needs: frontend-build + if: needs.frontend-build.result == 'success' + runs-on: ubuntu-22.04 steps: - name: Download Docker Image Artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: docker-image @@ -102,18 +157,19 @@ jobs: - name: eslint run: | docker run --rm superset-node-${{ github.sha }} bash -c \ - "npm run eslint -- . --quiet" + "npm i && npm run eslint -- . --quiet" - name: tsc run: | docker run --rm superset-node-${{ github.sha }} bash -c \ "npm run type" validate-frontend: - needs: frontend-docker-build - runs-on: ubuntu-latest + needs: frontend-build + if: needs.frontend-build.result == 'success' + runs-on: ubuntu-22.04 steps: - name: Download Docker Image Artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: docker-image @@ -126,7 +182,6 @@ jobs: docker run --rm superset-node-${{ github.sha }} bash -c \ "npm run plugins:build" - name: Build plugins Storybook - working-directory: ./superset-frontend run: | docker run --rm superset-node-${{ github.sha }} bash -c \ "npm run plugins:build-storybook" diff --git a/Dockerfile b/Dockerfile index 91af4312c60a8..7297ad139337b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -53,6 +53,10 @@ RUN mkdir -p /app/superset/static/assets \ /app/superset/translations # Mount package files and install dependencies if not in dev mode +# NOTE: we mount packages and plugins as they are referenced in package.json as workspaces +# ideally we'd COPY only their package.json. Here npm ci will be cached as long +# as the full content of these folders don't change, yielding a decent cache reuse rate. +# Note that's it's not possible selectively COPY of mount using blobs. RUN --mount=type=bind,source=./superset-frontend/package.json,target=./package.json \ --mount=type=bind,source=./superset-frontend/package-lock.json,target=./package-lock.json \ --mount=type=cache,target=/root/.cache \ diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx new file mode 100644 index 0000000000000..025bbf0227b25 --- /dev/null +++ b/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx @@ -0,0 +1,549 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// TODO: These tests should be made atomic in separate files + +import fetchMock from 'fetch-mock'; +import userEvent from '@testing-library/user-event'; +import { render, screen, cleanup, act } from 'spec/helpers/testing-library'; +import DatabaseModal from './index'; + +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + isFeatureEnabled: () => true, +})); + +const mockHistoryPush = jest.fn(); +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useHistory: () => ({ + push: mockHistoryPush, + }), +})); + +const dbProps = { + show: true, + database_name: 'my database', + sqlalchemy_uri: 'postgres://superset:superset@something:1234/superset', + onHide: () => {}, +}; + +const DATABASE_FETCH_ENDPOINT = 'glob:*/api/v1/database/10'; +const AVAILABLE_DB_ENDPOINT = 'glob:*/api/v1/database/available*'; +const VALIDATE_PARAMS_ENDPOINT = 'glob:*/api/v1/database/validate_parameters*'; +const DATABASE_CONNECT_ENDPOINT = 'glob:*/api/v1/database/'; + +fetchMock.post(DATABASE_CONNECT_ENDPOINT, { + id: 10, + result: { + configuration_method: 'sqlalchemy_form', + database_name: 'Other2', + driver: 'apsw', + expose_in_sqllab: true, + extra: '{"allows_virtual_table_explore":true}', + sqlalchemy_uri: 'gsheets://', + }, + json: 'foo', +}); + +fetchMock.config.overwriteRoutes = true; +fetchMock.get(DATABASE_FETCH_ENDPOINT, { + result: { + id: 10, + database_name: 'my database', + expose_in_sqllab: false, + allow_ctas: false, + allow_cvas: false, + configuration_method: 'sqlalchemy_form', + }, +}); +fetchMock.mock(AVAILABLE_DB_ENDPOINT, { + databases: [ + { + available_drivers: ['psycopg2'], + default_driver: 'psycopg2', + engine: 'postgresql', + name: 'PostgreSQL', + parameters: { + properties: { + database: { + description: 'Database name', + type: 'string', + }, + encryption: { + description: 'Use an encrypted connection to the database', + type: 'boolean', + }, + host: { + description: 'Hostname or IP address', + type: 'string', + }, + password: { + description: 'Password', + nullable: true, + type: 'string', + }, + port: { + description: 'Database port', + format: 'int32', + maximum: 65536, + minimum: 0, + type: 'integer', + }, + query: { + additionalProperties: {}, + description: 'Additional parameters', + type: 'object', + }, + ssh: { + description: 'Create SSH Tunnel', + type: 'boolean', + }, + username: { + description: 'Username', + nullable: true, + type: 'string', + }, + }, + required: ['database', 'host', 'port', 'username'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'postgresql://user:password@host:port/dbname[?key=value&key=value...]', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['rest'], + engine: 'presto', + name: 'Presto', + preferred: true, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['mysqldb'], + default_driver: 'mysqldb', + engine: 'mysql', + name: 'MySQL', + parameters: { + properties: { + database: { + description: 'Database name', + type: 'string', + }, + encryption: { + description: 'Use an encrypted connection to the database', + type: 'boolean', + }, + host: { + description: 'Hostname or IP address', + type: 'string', + }, + password: { + description: 'Password', + nullable: true, + type: 'string', + }, + port: { + description: 'Database port', + format: 'int32', + maximum: 65536, + minimum: 0, + type: 'integer', + }, + query: { + additionalProperties: {}, + description: 'Additional parameters', + type: 'object', + }, + username: { + description: 'Username', + nullable: true, + type: 'string', + }, + }, + required: ['database', 'host', 'port', 'username'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'mysql://user:password@host:port/dbname[?key=value&key=value...]', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['pysqlite'], + engine: 'sqlite', + name: 'SQLite', + preferred: true, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['rest'], + engine: 'druid', + name: 'Apache Druid', + preferred: false, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['bigquery'], + default_driver: 'bigquery', + engine: 'bigquery', + name: 'Google BigQuery', + parameters: { + properties: { + credentials_info: { + description: 'Contents of BigQuery JSON credentials.', + type: 'string', + 'x-encrypted-extra': true, + }, + query: { + type: 'object', + }, + }, + type: 'object', + }, + preferred: false, + sqlalchemy_uri_placeholder: 'bigquery://{project_id}', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: true, + }, + }, + { + available_drivers: ['rest'], + default_driver: 'apsw', + engine: 'gsheets', + name: 'Google Sheets', + preferred: false, + engine_information: { + supports_file_upload: false, + disable_ssh_tunneling: true, + }, + }, + { + available_drivers: ['connector'], + default_driver: 'connector', + engine: 'databricks', + name: 'Databricks', + parameters: { + properties: { + access_token: { + type: 'string', + }, + database: { + type: 'string', + }, + host: { + type: 'string', + }, + http_path: { + type: 'string', + }, + port: { + format: 'int32', + type: 'integer', + }, + }, + required: ['access_token', 'database', 'host', 'http_path', 'port'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'databricks+connector://token:{access_token}@{host}:{port}/{database_name}', + }, + ], +}); +fetchMock.post(VALIDATE_PARAMS_ENDPOINT, { + message: 'OK', +}); + +describe('DatabaseModal', () => { + const renderAndWait = async () => { + const mounted = act(async () => { + render(, { + useRedux: true, + }); + }); + + return mounted; + }; + + beforeEach(async () => { + await renderAndWait(); + }); + + afterEach(cleanup); + + describe('Functional: Create new database', () => { + test('directs databases to the appropriate form (dynamic vs. SQL Alchemy)', async () => { + // ---------- Dynamic example (3-step form) + // Click the PostgreSQL button to enter the dynamic form + const postgreSQLButton = screen.getByRole('button', { + name: /postgresql/i, + }); + userEvent.click(postgreSQLButton); + + // Dynamic form has 3 steps, seeing this text means the dynamic form is present + const dynamicFormStepText = screen.getByText(/step 2 of 3/i); + + expect(dynamicFormStepText).toBeVisible(); + + // ---------- SQL Alchemy example (2-step form) + // Click the back button to go back to step 1, + // then click the SQLite button to enter the SQL Alchemy form + const backButton = screen.getByRole('button', { name: /back/i }); + userEvent.click(backButton); + + const sqliteButton = screen.getByRole('button', { + name: /sqlite/i, + }); + userEvent.click(sqliteButton); + + // SQL Alchemy form has 2 steps, seeing this text means the SQL Alchemy form is present + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const sqlAlchemyFormStepText = screen.getByText(/step 2 of 2/i); + + expect(sqlAlchemyFormStepText).toBeVisible(); + }); + + describe('SQL Alchemy form flow', () => { + test('enters step 2 of 2 when proper database is selected', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + }); + + test('runs fetchResource when "Connect" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock useSingleViewResource + const mockUseSingleViewResource = jest.fn(); + mockUseSingleViewResource.mockImplementation(useSingleViewResource); + + const { fetchResource } = mockUseSingleViewResource('database'); + + // Invalid hook call? + userEvent.click(screen.getByRole('button', { name: 'Connect' })); + expect(fetchResource).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + + describe('step 2 component interaction', () => { + test('properly interacts with textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const dbNametextBox = screen.getByTestId('database-name-input'); + expect(dbNametextBox).toHaveValue('SQLite'); + + userEvent.type(dbNametextBox, 'Different text'); + expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); + + const sqlAlchemyURItextBox = screen.getByTestId( + 'sqlalchemy-uri-input', + ); + expect(sqlAlchemyURItextBox).toHaveValue(''); + + userEvent.type(sqlAlchemyURItextBox, 'Different text'); + expect(sqlAlchemyURItextBox).toHaveValue('Different text'); + }); + + test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock testDatabaseConnection + const mockTestDatabaseConnection = jest.fn(); + mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); + + userEvent.click( + screen.getByRole('button', { + name: /test connection/i, + }), + ); + + expect(mockTestDatabaseConnection).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + }); + + describe('SSH Tunnel Form interaction', () => { + test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { + userEvent.click( + screen.getByRole('button', { + name: /postgresql/i, + }), + ); + expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('properly interacts with SSH Tunnel form textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + expect(SSHTunnelingToggle).toBeVisible(); + const SSHTunnelServerAddressInput = screen.queryByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); + const SSHTunnelServerPortInput = screen.queryByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); + const SSHTunnelUsernameInput = screen.queryByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); + const SSHTunnelPasswordInput = screen.queryByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); + }); + + test('If user changes the login method, the inputs change', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelUsePasswordInput = screen.getByTestId( + 'ssh-tunnel-use_password-radio', + ); + expect(SSHTunnelUsePasswordInput).toBeVisible(); + const SSHTunnelUsePrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-use_private_key-radio', + ); + expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + // By default, we use Password as login method + expect(SSHTunnelPasswordInput).toBeVisible(); + // Change the login method to use private key + userEvent.click(SSHTunnelUsePrivateKeyInput); + const SSHTunnelPrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-private_key-input', + ); + expect(SSHTunnelPrivateKeyInput).toBeVisible(); + const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( + 'ssh-tunnel-private_key_password-input', + ); + expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); + }); + }); + }); + }); +}); diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx similarity index 87% rename from superset-frontend/src/features/databases/DatabaseModal/index.test.tsx rename to superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx index bd3eb3bec9796..7fee254d6e2cc 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx @@ -1140,215 +1140,6 @@ describe('DatabaseModal', () => { expect(sqlAlchemyFormStepText).toBeVisible(); }); - describe('SQL Alchemy form flow', () => { - test('enters step 2 of 2 when proper database is selected', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - }); - - test('runs fetchResource when "Connect" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock useSingleViewResource - const mockUseSingleViewResource = jest.fn(); - mockUseSingleViewResource.mockImplementation(useSingleViewResource); - - const { fetchResource } = mockUseSingleViewResource('database'); - - // Invalid hook call? - userEvent.click(screen.getByRole('button', { name: 'Connect' })); - expect(fetchResource).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - - describe('step 2 component interaction', () => { - test('properly interacts with textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const dbNametextBox = screen.getByTestId('database-name-input'); - expect(dbNametextBox).toHaveValue('SQLite'); - - userEvent.type(dbNametextBox, 'Different text'); - expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); - - const sqlAlchemyURItextBox = screen.getByTestId( - 'sqlalchemy-uri-input', - ); - expect(sqlAlchemyURItextBox).toHaveValue(''); - - userEvent.type(sqlAlchemyURItextBox, 'Different text'); - expect(sqlAlchemyURItextBox).toHaveValue('Different text'); - }); - - test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock testDatabaseConnection - const mockTestDatabaseConnection = jest.fn(); - mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); - - userEvent.click( - screen.getByRole('button', { - name: /test connection/i, - }), - ); - - expect(mockTestDatabaseConnection).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - }); - - describe('SSH Tunnel Form interaction', () => { - test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { - userEvent.click( - screen.getByRole('button', { - name: /postgresql/i, - }), - ); - expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('properly interacts with SSH Tunnel form textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - expect(SSHTunnelingToggle).toBeVisible(); - const SSHTunnelServerAddressInput = screen.queryByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); - const SSHTunnelServerPortInput = screen.queryByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); - const SSHTunnelUsernameInput = screen.queryByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); - const SSHTunnelPasswordInput = screen.queryByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); - }); - - test('If user changes the login method, the inputs change', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelUsePasswordInput = screen.getByTestId( - 'ssh-tunnel-use_password-radio', - ); - expect(SSHTunnelUsePasswordInput).toBeVisible(); - const SSHTunnelUsePrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-use_private_key-radio', - ); - expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - // By default, we use Password as login method - expect(SSHTunnelPasswordInput).toBeVisible(); - // Change the login method to use private key - userEvent.click(SSHTunnelUsePrivateKeyInput); - const SSHTunnelPrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-private_key-input', - ); - expect(SSHTunnelPrivateKeyInput).toBeVisible(); - const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( - 'ssh-tunnel-private_key_password-input', - ); - expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); - }); - }); - }); - describe('Dynamic form flow', () => { test('enters step 2 of 3 when proper database is selected', async () => { expect(await screen.findByText(/step 1 of 3/i)).toBeInTheDocument(); diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx new file mode 100644 index 0000000000000..abb711238d686 --- /dev/null +++ b/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx @@ -0,0 +1,549 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// TODO: These tests should be made atomic in separate files + +import fetchMock from 'fetch-mock'; +import userEvent from '@testing-library/user-event'; +import { act, cleanup, render, screen } from 'spec/helpers/testing-library'; +import DatabaseModal from './index'; + +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + isFeatureEnabled: () => true, +})); + +const mockHistoryPush = jest.fn(); +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useHistory: () => ({ + push: mockHistoryPush, + }), +})); + +const dbProps = { + show: true, + database_name: 'my database', + sqlalchemy_uri: 'postgres://superset:superset@something:1234/superset', + onHide: () => {}, +}; + +const DATABASE_FETCH_ENDPOINT = 'glob:*/api/v1/database/10'; +const AVAILABLE_DB_ENDPOINT = 'glob:*/api/v1/database/available*'; +const VALIDATE_PARAMS_ENDPOINT = 'glob:*/api/v1/database/validate_parameters*'; +const DATABASE_CONNECT_ENDPOINT = 'glob:*/api/v1/database/'; + +fetchMock.post(DATABASE_CONNECT_ENDPOINT, { + id: 10, + result: { + configuration_method: 'sqlalchemy_form', + database_name: 'Other2', + driver: 'apsw', + expose_in_sqllab: true, + extra: '{"allows_virtual_table_explore":true}', + sqlalchemy_uri: 'gsheets://', + }, + json: 'foo', +}); + +fetchMock.config.overwriteRoutes = true; +fetchMock.get(DATABASE_FETCH_ENDPOINT, { + result: { + id: 10, + database_name: 'my database', + expose_in_sqllab: false, + allow_ctas: false, + allow_cvas: false, + configuration_method: 'sqlalchemy_form', + }, +}); +fetchMock.mock(AVAILABLE_DB_ENDPOINT, { + databases: [ + { + available_drivers: ['psycopg2'], + default_driver: 'psycopg2', + engine: 'postgresql', + name: 'PostgreSQL', + parameters: { + properties: { + database: { + description: 'Database name', + type: 'string', + }, + encryption: { + description: 'Use an encrypted connection to the database', + type: 'boolean', + }, + host: { + description: 'Hostname or IP address', + type: 'string', + }, + password: { + description: 'Password', + nullable: true, + type: 'string', + }, + port: { + description: 'Database port', + format: 'int32', + maximum: 65536, + minimum: 0, + type: 'integer', + }, + query: { + additionalProperties: {}, + description: 'Additional parameters', + type: 'object', + }, + ssh: { + description: 'Create SSH Tunnel', + type: 'boolean', + }, + username: { + description: 'Username', + nullable: true, + type: 'string', + }, + }, + required: ['database', 'host', 'port', 'username'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'postgresql://user:password@host:port/dbname[?key=value&key=value...]', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['rest'], + engine: 'presto', + name: 'Presto', + preferred: true, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['mysqldb'], + default_driver: 'mysqldb', + engine: 'mysql', + name: 'MySQL', + parameters: { + properties: { + database: { + description: 'Database name', + type: 'string', + }, + encryption: { + description: 'Use an encrypted connection to the database', + type: 'boolean', + }, + host: { + description: 'Hostname or IP address', + type: 'string', + }, + password: { + description: 'Password', + nullable: true, + type: 'string', + }, + port: { + description: 'Database port', + format: 'int32', + maximum: 65536, + minimum: 0, + type: 'integer', + }, + query: { + additionalProperties: {}, + description: 'Additional parameters', + type: 'object', + }, + username: { + description: 'Username', + nullable: true, + type: 'string', + }, + }, + required: ['database', 'host', 'port', 'username'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'mysql://user:password@host:port/dbname[?key=value&key=value...]', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['pysqlite'], + engine: 'sqlite', + name: 'SQLite', + preferred: true, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['rest'], + engine: 'druid', + name: 'Apache Druid', + preferred: false, + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: false, + }, + }, + { + available_drivers: ['bigquery'], + default_driver: 'bigquery', + engine: 'bigquery', + name: 'Google BigQuery', + parameters: { + properties: { + credentials_info: { + description: 'Contents of BigQuery JSON credentials.', + type: 'string', + 'x-encrypted-extra': true, + }, + query: { + type: 'object', + }, + }, + type: 'object', + }, + preferred: false, + sqlalchemy_uri_placeholder: 'bigquery://{project_id}', + engine_information: { + supports_file_upload: true, + disable_ssh_tunneling: true, + }, + }, + { + available_drivers: ['rest'], + default_driver: 'apsw', + engine: 'gsheets', + name: 'Google Sheets', + preferred: false, + engine_information: { + supports_file_upload: false, + disable_ssh_tunneling: true, + }, + }, + { + available_drivers: ['connector'], + default_driver: 'connector', + engine: 'databricks', + name: 'Databricks', + parameters: { + properties: { + access_token: { + type: 'string', + }, + database: { + type: 'string', + }, + host: { + type: 'string', + }, + http_path: { + type: 'string', + }, + port: { + format: 'int32', + type: 'integer', + }, + }, + required: ['access_token', 'database', 'host', 'http_path', 'port'], + type: 'object', + }, + preferred: true, + sqlalchemy_uri_placeholder: + 'databricks+connector://token:{access_token}@{host}:{port}/{database_name}', + }, + ], +}); +fetchMock.post(VALIDATE_PARAMS_ENDPOINT, { + message: 'OK', +}); + +describe('DatabaseModal', () => { + const renderAndWait = async () => { + const mounted = act(async () => { + render(, { + useRedux: true, + }); + }); + + return mounted; + }; + + beforeEach(async () => { + await renderAndWait(); + }); + + afterEach(cleanup); + + describe('Functional: Create new database', () => { + test('directs databases to the appropriate form (dynamic vs. SQL Alchemy)', async () => { + // ---------- Dynamic example (3-step form) + // Click the PostgreSQL button to enter the dynamic form + const postgreSQLButton = screen.getByRole('button', { + name: /postgresql/i, + }); + userEvent.click(postgreSQLButton); + + // Dynamic form has 3 steps, seeing this text means the dynamic form is present + const dynamicFormStepText = screen.getByText(/step 2 of 3/i); + + expect(dynamicFormStepText).toBeVisible(); + + // ---------- SQL Alchemy example (2-step form) + // Click the back button to go back to step 1, + // then click the SQLite button to enter the SQL Alchemy form + const backButton = screen.getByRole('button', { name: /back/i }); + userEvent.click(backButton); + + const sqliteButton = screen.getByRole('button', { + name: /sqlite/i, + }); + userEvent.click(sqliteButton); + + // SQL Alchemy form has 2 steps, seeing this text means the SQL Alchemy form is present + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const sqlAlchemyFormStepText = screen.getByText(/step 2 of 2/i); + + expect(sqlAlchemyFormStepText).toBeVisible(); + }); + + describe('SQL Alchemy form flow', () => { + test('enters step 2 of 2 when proper database is selected', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + }); + + test('runs fetchResource when "Connect" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock useSingleViewResource + const mockUseSingleViewResource = jest.fn(); + mockUseSingleViewResource.mockImplementation(useSingleViewResource); + + const { fetchResource } = mockUseSingleViewResource('database'); + + // Invalid hook call? + userEvent.click(screen.getByRole('button', { name: 'Connect' })); + expect(fetchResource).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + + describe('step 2 component interaction', () => { + test('properly interacts with textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const dbNametextBox = screen.getByTestId('database-name-input'); + expect(dbNametextBox).toHaveValue('SQLite'); + + userEvent.type(dbNametextBox, 'Different text'); + expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); + + const sqlAlchemyURItextBox = screen.getByTestId( + 'sqlalchemy-uri-input', + ); + expect(sqlAlchemyURItextBox).toHaveValue(''); + + userEvent.type(sqlAlchemyURItextBox, 'Different text'); + expect(sqlAlchemyURItextBox).toHaveValue('Different text'); + }); + + test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock testDatabaseConnection + const mockTestDatabaseConnection = jest.fn(); + mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); + + userEvent.click( + screen.getByRole('button', { + name: /test connection/i, + }), + ); + + expect(mockTestDatabaseConnection).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + }); + + describe('SSH Tunnel Form interaction', () => { + test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { + userEvent.click( + screen.getByRole('button', { + name: /postgresql/i, + }), + ); + expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('properly interacts with SSH Tunnel form textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + expect(SSHTunnelingToggle).toBeVisible(); + const SSHTunnelServerAddressInput = screen.queryByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); + const SSHTunnelServerPortInput = screen.queryByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); + const SSHTunnelUsernameInput = screen.queryByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); + const SSHTunnelPasswordInput = screen.queryByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); + }); + + test('If user changes the login method, the inputs change', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelUsePasswordInput = screen.getByTestId( + 'ssh-tunnel-use_password-radio', + ); + expect(SSHTunnelUsePasswordInput).toBeVisible(); + const SSHTunnelUsePrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-use_private_key-radio', + ); + expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + // By default, we use Password as login method + expect(SSHTunnelPasswordInput).toBeVisible(); + // Change the login method to use private key + userEvent.click(SSHTunnelUsePrivateKeyInput); + const SSHTunnelPrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-private_key-input', + ); + expect(SSHTunnelPrivateKeyInput).toBeVisible(); + const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( + 'ssh-tunnel-private_key_password-input', + ); + expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); + }); + }); + }); + }); +}); From 125e84ef7036d3d559a739172f02e32ca2dd1970 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 21 Dec 2024 16:02:56 -0800 Subject: [PATCH 03/26] another run --- .github/workflows/superset-frontend.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 94030a35e5e43..5a532e8122a79 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -83,11 +83,12 @@ jobs: - name: npm run test with coverage run: | mkdir -p ${{ github.workspace }}/coverage + chmod -R 777 ${{ github.workspace }}/coverage docker run \ --rm superset-node-${{ github.sha }} \ -v ${{ github.workspace }}/coverage:/app/superset-frontend/coverage \ bash -c \ - 'npm run test -- --coverage --silent --shard=${{ matrix.shard }}/8 --coverageReporters="json-summary" && find ./coverage' + 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters="json-summary" && find ./coverage' find ${{ github.workspace }}/coverage - name: Upload coverage artifact From 4ec6f039785e0c28be5cb45cf5644aea612571f9 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Mon, 23 Dec 2024 11:14:07 -0800 Subject: [PATCH 04/26] try without switch --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 5a532e8122a79..53aa1b825f94a 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -88,7 +88,7 @@ jobs: --rm superset-node-${{ github.sha }} \ -v ${{ github.workspace }}/coverage:/app/superset-frontend/coverage \ bash -c \ - 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters="json-summary" && find ./coverage' + 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 && find ./coverage' find ${{ github.workspace }}/coverage - name: Upload coverage artifact From eeb0f9ef058657a7f39f4593083f318704481ad0 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Thu, 2 Jan 2025 14:21:44 -0800 Subject: [PATCH 05/26] testing --- .github/workflows/superset-frontend.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 53aa1b825f94a..55dc4ece2ac37 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -82,14 +82,12 @@ jobs: - name: npm run test with coverage run: | - mkdir -p ${{ github.workspace }}/coverage - chmod -R 777 ${{ github.workspace }}/coverage docker run \ --rm superset-node-${{ github.sha }} \ - -v ${{ github.workspace }}/coverage:/app/superset-frontend/coverage \ + -v ${{ github.workspace }}/superset-frontend:/app/superset-frontend \ bash -c \ - 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 && find ./coverage' - find ${{ github.workspace }}/coverage + 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary' + find ${{ github.workspace }}/superset-frontend/coverage - name: Upload coverage artifact uses: actions/upload-artifact@v4 From af5a69eddfe8264d43fd988fc6a54a9de66ab276 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 12:19:26 -0800 Subject: [PATCH 06/26] use -v --- .github/workflows/superset-frontend.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 55dc4ece2ac37..156b43c953f43 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -82,9 +82,11 @@ jobs: - name: npm run test with coverage run: | + mkdir -p ${{ github.workspace }}/superset-frontend/coverage docker run \ --rm superset-node-${{ github.sha }} \ - -v ${{ github.workspace }}/superset-frontend:/app/superset-frontend \ + -v ${{ github.workspace }}/superset-frontend/coverage \ + -w /app/superset-frontend/coverage \ bash -c \ 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary' find ${{ github.workspace }}/superset-frontend/coverage From 4e1f7a31a95140ee215b7a8bc1b9b2e765b0c600 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 12:46:24 -0800 Subject: [PATCH 07/26] cache and simplify --- .github/workflows/superset-frontend.yml | 28 ++++++------------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 156b43c953f43..108a7d4ed3cb3 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -46,22 +46,10 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - TAG="superset-node-${{ github.sha }}" - supersetbot docker \ - --load \ + --push \ --preset superset-node-ci \ --platform "linux/amd64" \ - --extra-flags "--tag $TAG" - - docker save $TAG | gzip > superset-node.tar.gz - - - name: Upload Docker Image Artifact - if: steps.check.outputs.frontend - uses: actions/upload-artifact@v4 - with: - name: docker-image - path: superset-node.tar.gz sharded-jest-tests: needs: frontend-build @@ -70,26 +58,22 @@ jobs: matrix: shard: [1, 2, 3, 4, 5, 6, 7, 8] runs-on: ubuntu-22.04 + env: + TAG: apache/superset:GHA-${GITHUB_RUN_ID} steps: - - name: Download Docker Image Artifact - uses: actions/download-artifact@v4 - with: - name: docker-image - - name: Load Docker Image run: | - docker load -i superset-node.tar.gz + docker pull $TAG - name: npm run test with coverage run: | mkdir -p ${{ github.workspace }}/superset-frontend/coverage docker run \ - --rm superset-node-${{ github.sha }} \ + --rm $TAG \ -v ${{ github.workspace }}/superset-frontend/coverage \ -w /app/superset-frontend/coverage \ bash -c \ - 'npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary' - find ${{ github.workspace }}/superset-frontend/coverage + "npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary" - name: Upload coverage artifact uses: actions/upload-artifact@v4 From f46bf0da3d19145b3fc4993a5c68977e526439c9 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 12:50:00 -0800 Subject: [PATCH 08/26] fixing env var --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 108a7d4ed3cb3..f5cfc4a5eec0a 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -59,7 +59,7 @@ jobs: shard: [1, 2, 3, 4, 5, 6, 7, 8] runs-on: ubuntu-22.04 env: - TAG: apache/superset:GHA-${GITHUB_RUN_ID} + TAG: apache/superset:GHA-${{ GITHUB_RUN_ID }} steps: - name: Load Docker Image run: | From 823f5286b06046a412ff5dc3fb05b617e9413cd0 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 12:52:36 -0800 Subject: [PATCH 09/26] another try --- .github/workflows/superset-frontend.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index f5cfc4a5eec0a..1c5bf42e6091d 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -35,6 +35,7 @@ jobs: dockerhub-user: ${{ secrets.DOCKERHUB_USER }} dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }} build: "true" + install-docker-compose: "false" - name: Setup supersetbot if: steps.check.outputs.frontend @@ -59,7 +60,7 @@ jobs: shard: [1, 2, 3, 4, 5, 6, 7, 8] runs-on: ubuntu-22.04 env: - TAG: apache/superset:GHA-${{ GITHUB_RUN_ID }} + TAG: apache/superset:GHA-${{ vars.GITHUB_RUN_ID }} steps: - name: Load Docker Image run: | From 1ef9c7db46233bf83f3d8fe8e00dfb25387e9ba4 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 12:54:34 -0800 Subject: [PATCH 10/26] one more try --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 1c5bf42e6091d..0979a5640d8ae 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -60,7 +60,7 @@ jobs: shard: [1, 2, 3, 4, 5, 6, 7, 8] runs-on: ubuntu-22.04 env: - TAG: apache/superset:GHA-${{ vars.GITHUB_RUN_ID }} + TAG: apache/superset:GHA-${{ github.run_id }} steps: - name: Load Docker Image run: | From 85c22269a765c1a84ea058d7b8609fabda667f4d Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 13:01:44 -0800 Subject: [PATCH 11/26] simpler --- .github/workflows/superset-frontend.yml | 33 +++++++------------------ 1 file changed, 9 insertions(+), 24 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 0979a5640d8ae..1cf1a09898d28 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -13,6 +13,8 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} cancel-in-progress: true +env: + TAG: apache/superset:GHA-${{ github.run_id }} jobs: frontend-build: runs-on: ubuntu-24.04 @@ -59,8 +61,6 @@ jobs: matrix: shard: [1, 2, 3, 4, 5, 6, 7, 8] runs-on: ubuntu-22.04 - env: - TAG: apache/superset:GHA-${{ github.run_id }} steps: - name: Load Docker Image run: | @@ -71,8 +71,8 @@ jobs: mkdir -p ${{ github.workspace }}/superset-frontend/coverage docker run \ --rm $TAG \ - -v ${{ github.workspace }}/superset-frontend/coverage \ - -w /app/superset-frontend/coverage \ + -v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \ + -w /app/superset-frontend \ bash -c \ "npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary" @@ -112,18 +112,13 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-22.04 steps: - - name: Download Docker Image Artifact - uses: actions/download-artifact@v4 - with: - name: docker-image - - name: Load Docker Image run: | - docker load -i superset-node.tar.gz + docker pull $TAG - name: superset-ui/core coverage run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ + docker run --rm $TAG bash -c \ "npm run core:cover" lint-frontend: @@ -131,18 +126,13 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-22.04 steps: - - name: Download Docker Image Artifact - uses: actions/download-artifact@v4 - with: - name: docker-image - - name: Load Docker Image run: | - docker load -i superset-node.tar.gz + docker pull $TAG - name: eslint run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ + docker run --rm $TAG bash -c \ "npm i && npm run eslint -- . --quiet" - name: tsc run: | @@ -154,14 +144,9 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-22.04 steps: - - name: Download Docker Image Artifact - uses: actions/download-artifact@v4 - with: - name: docker-image - - name: Load Docker Image run: | - docker load -i superset-node.tar.gz + docker pull $TAG - name: Build plugins packages run: | From a4ab0290810ba6bf223a4c2e49ff5eb2db1fafa4 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 13:17:48 -0800 Subject: [PATCH 12/26] fixing run command --- .github/workflows/superset-frontend.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 1cf1a09898d28..554c5aea4bd6e 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -70,9 +70,8 @@ jobs: run: | mkdir -p ${{ github.workspace }}/superset-frontend/coverage docker run \ + -v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \ --rm $TAG \ - -v ${{ github.workspace }}/superset-frontend/coverage:/app/superset-frontend/coverage \ - -w /app/superset-frontend \ bash -c \ "npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary" From 54cf7d91fb09eb2bd3b50f1945b4c25f2b342b0b Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 13:24:59 -0800 Subject: [PATCH 13/26] final-ish --- .github/workflows/superset-frontend.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 554c5aea4bd6e..fa5efa974af1e 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -60,6 +60,7 @@ jobs: strategy: matrix: shard: [1, 2, 3, 4, 5, 6, 7, 8] + fail-fast: false runs-on: ubuntu-22.04 steps: - name: Load Docker Image @@ -135,7 +136,7 @@ jobs: "npm i && npm run eslint -- . --quiet" - name: tsc run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ + docker run --rm $TAG bash -c \ "npm run type" validate-frontend: @@ -149,9 +150,9 @@ jobs: - name: Build plugins packages run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ + docker run --rm $TAG bash -c \ "npm run plugins:build" - name: Build plugins Storybook run: | - docker run --rm superset-node-${{ github.sha }} bash -c \ + docker run --rm $TAG bash -c \ "npm run plugins:build-storybook" From df9d317bcd4509f078a3b77b2defca45af6ec1c4 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 13:33:39 -0800 Subject: [PATCH 14/26] collapse file --- .../databases/DatabaseModal/index.1.test.tsx | 549 ------------------ .../databases/DatabaseModal/index.3.test.tsx | 549 ------------------ .../{index.2.test.tsx => index.test.tsx} | 209 +++++++ 3 files changed, 209 insertions(+), 1098 deletions(-) delete mode 100644 superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx delete mode 100644 superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx rename superset-frontend/src/features/databases/DatabaseModal/{index.2.test.tsx => index.test.tsx} (87%) diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx deleted file mode 100644 index 025bbf0227b25..0000000000000 --- a/superset-frontend/src/features/databases/DatabaseModal/index.1.test.tsx +++ /dev/null @@ -1,549 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// TODO: These tests should be made atomic in separate files - -import fetchMock from 'fetch-mock'; -import userEvent from '@testing-library/user-event'; -import { render, screen, cleanup, act } from 'spec/helpers/testing-library'; -import DatabaseModal from './index'; - -jest.mock('@superset-ui/core', () => ({ - ...jest.requireActual('@superset-ui/core'), - isFeatureEnabled: () => true, -})); - -const mockHistoryPush = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useHistory: () => ({ - push: mockHistoryPush, - }), -})); - -const dbProps = { - show: true, - database_name: 'my database', - sqlalchemy_uri: 'postgres://superset:superset@something:1234/superset', - onHide: () => {}, -}; - -const DATABASE_FETCH_ENDPOINT = 'glob:*/api/v1/database/10'; -const AVAILABLE_DB_ENDPOINT = 'glob:*/api/v1/database/available*'; -const VALIDATE_PARAMS_ENDPOINT = 'glob:*/api/v1/database/validate_parameters*'; -const DATABASE_CONNECT_ENDPOINT = 'glob:*/api/v1/database/'; - -fetchMock.post(DATABASE_CONNECT_ENDPOINT, { - id: 10, - result: { - configuration_method: 'sqlalchemy_form', - database_name: 'Other2', - driver: 'apsw', - expose_in_sqllab: true, - extra: '{"allows_virtual_table_explore":true}', - sqlalchemy_uri: 'gsheets://', - }, - json: 'foo', -}); - -fetchMock.config.overwriteRoutes = true; -fetchMock.get(DATABASE_FETCH_ENDPOINT, { - result: { - id: 10, - database_name: 'my database', - expose_in_sqllab: false, - allow_ctas: false, - allow_cvas: false, - configuration_method: 'sqlalchemy_form', - }, -}); -fetchMock.mock(AVAILABLE_DB_ENDPOINT, { - databases: [ - { - available_drivers: ['psycopg2'], - default_driver: 'psycopg2', - engine: 'postgresql', - name: 'PostgreSQL', - parameters: { - properties: { - database: { - description: 'Database name', - type: 'string', - }, - encryption: { - description: 'Use an encrypted connection to the database', - type: 'boolean', - }, - host: { - description: 'Hostname or IP address', - type: 'string', - }, - password: { - description: 'Password', - nullable: true, - type: 'string', - }, - port: { - description: 'Database port', - format: 'int32', - maximum: 65536, - minimum: 0, - type: 'integer', - }, - query: { - additionalProperties: {}, - description: 'Additional parameters', - type: 'object', - }, - ssh: { - description: 'Create SSH Tunnel', - type: 'boolean', - }, - username: { - description: 'Username', - nullable: true, - type: 'string', - }, - }, - required: ['database', 'host', 'port', 'username'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'postgresql://user:password@host:port/dbname[?key=value&key=value...]', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['rest'], - engine: 'presto', - name: 'Presto', - preferred: true, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['mysqldb'], - default_driver: 'mysqldb', - engine: 'mysql', - name: 'MySQL', - parameters: { - properties: { - database: { - description: 'Database name', - type: 'string', - }, - encryption: { - description: 'Use an encrypted connection to the database', - type: 'boolean', - }, - host: { - description: 'Hostname or IP address', - type: 'string', - }, - password: { - description: 'Password', - nullable: true, - type: 'string', - }, - port: { - description: 'Database port', - format: 'int32', - maximum: 65536, - minimum: 0, - type: 'integer', - }, - query: { - additionalProperties: {}, - description: 'Additional parameters', - type: 'object', - }, - username: { - description: 'Username', - nullable: true, - type: 'string', - }, - }, - required: ['database', 'host', 'port', 'username'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'mysql://user:password@host:port/dbname[?key=value&key=value...]', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['pysqlite'], - engine: 'sqlite', - name: 'SQLite', - preferred: true, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['rest'], - engine: 'druid', - name: 'Apache Druid', - preferred: false, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['bigquery'], - default_driver: 'bigquery', - engine: 'bigquery', - name: 'Google BigQuery', - parameters: { - properties: { - credentials_info: { - description: 'Contents of BigQuery JSON credentials.', - type: 'string', - 'x-encrypted-extra': true, - }, - query: { - type: 'object', - }, - }, - type: 'object', - }, - preferred: false, - sqlalchemy_uri_placeholder: 'bigquery://{project_id}', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: true, - }, - }, - { - available_drivers: ['rest'], - default_driver: 'apsw', - engine: 'gsheets', - name: 'Google Sheets', - preferred: false, - engine_information: { - supports_file_upload: false, - disable_ssh_tunneling: true, - }, - }, - { - available_drivers: ['connector'], - default_driver: 'connector', - engine: 'databricks', - name: 'Databricks', - parameters: { - properties: { - access_token: { - type: 'string', - }, - database: { - type: 'string', - }, - host: { - type: 'string', - }, - http_path: { - type: 'string', - }, - port: { - format: 'int32', - type: 'integer', - }, - }, - required: ['access_token', 'database', 'host', 'http_path', 'port'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'databricks+connector://token:{access_token}@{host}:{port}/{database_name}', - }, - ], -}); -fetchMock.post(VALIDATE_PARAMS_ENDPOINT, { - message: 'OK', -}); - -describe('DatabaseModal', () => { - const renderAndWait = async () => { - const mounted = act(async () => { - render(, { - useRedux: true, - }); - }); - - return mounted; - }; - - beforeEach(async () => { - await renderAndWait(); - }); - - afterEach(cleanup); - - describe('Functional: Create new database', () => { - test('directs databases to the appropriate form (dynamic vs. SQL Alchemy)', async () => { - // ---------- Dynamic example (3-step form) - // Click the PostgreSQL button to enter the dynamic form - const postgreSQLButton = screen.getByRole('button', { - name: /postgresql/i, - }); - userEvent.click(postgreSQLButton); - - // Dynamic form has 3 steps, seeing this text means the dynamic form is present - const dynamicFormStepText = screen.getByText(/step 2 of 3/i); - - expect(dynamicFormStepText).toBeVisible(); - - // ---------- SQL Alchemy example (2-step form) - // Click the back button to go back to step 1, - // then click the SQLite button to enter the SQL Alchemy form - const backButton = screen.getByRole('button', { name: /back/i }); - userEvent.click(backButton); - - const sqliteButton = screen.getByRole('button', { - name: /sqlite/i, - }); - userEvent.click(sqliteButton); - - // SQL Alchemy form has 2 steps, seeing this text means the SQL Alchemy form is present - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const sqlAlchemyFormStepText = screen.getByText(/step 2 of 2/i); - - expect(sqlAlchemyFormStepText).toBeVisible(); - }); - - describe('SQL Alchemy form flow', () => { - test('enters step 2 of 2 when proper database is selected', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - }); - - test('runs fetchResource when "Connect" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock useSingleViewResource - const mockUseSingleViewResource = jest.fn(); - mockUseSingleViewResource.mockImplementation(useSingleViewResource); - - const { fetchResource } = mockUseSingleViewResource('database'); - - // Invalid hook call? - userEvent.click(screen.getByRole('button', { name: 'Connect' })); - expect(fetchResource).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - - describe('step 2 component interaction', () => { - test('properly interacts with textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const dbNametextBox = screen.getByTestId('database-name-input'); - expect(dbNametextBox).toHaveValue('SQLite'); - - userEvent.type(dbNametextBox, 'Different text'); - expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); - - const sqlAlchemyURItextBox = screen.getByTestId( - 'sqlalchemy-uri-input', - ); - expect(sqlAlchemyURItextBox).toHaveValue(''); - - userEvent.type(sqlAlchemyURItextBox, 'Different text'); - expect(sqlAlchemyURItextBox).toHaveValue('Different text'); - }); - - test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock testDatabaseConnection - const mockTestDatabaseConnection = jest.fn(); - mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); - - userEvent.click( - screen.getByRole('button', { - name: /test connection/i, - }), - ); - - expect(mockTestDatabaseConnection).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - }); - - describe('SSH Tunnel Form interaction', () => { - test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { - userEvent.click( - screen.getByRole('button', { - name: /postgresql/i, - }), - ); - expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('properly interacts with SSH Tunnel form textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - expect(SSHTunnelingToggle).toBeVisible(); - const SSHTunnelServerAddressInput = screen.queryByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); - const SSHTunnelServerPortInput = screen.queryByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); - const SSHTunnelUsernameInput = screen.queryByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); - const SSHTunnelPasswordInput = screen.queryByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); - }); - - test('If user changes the login method, the inputs change', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelUsePasswordInput = screen.getByTestId( - 'ssh-tunnel-use_password-radio', - ); - expect(SSHTunnelUsePasswordInput).toBeVisible(); - const SSHTunnelUsePrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-use_private_key-radio', - ); - expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - // By default, we use Password as login method - expect(SSHTunnelPasswordInput).toBeVisible(); - // Change the login method to use private key - userEvent.click(SSHTunnelUsePrivateKeyInput); - const SSHTunnelPrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-private_key-input', - ); - expect(SSHTunnelPrivateKeyInput).toBeVisible(); - const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( - 'ssh-tunnel-private_key_password-input', - ); - expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); - }); - }); - }); - }); -}); diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx deleted file mode 100644 index abb711238d686..0000000000000 --- a/superset-frontend/src/features/databases/DatabaseModal/index.3.test.tsx +++ /dev/null @@ -1,549 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// TODO: These tests should be made atomic in separate files - -import fetchMock from 'fetch-mock'; -import userEvent from '@testing-library/user-event'; -import { act, cleanup, render, screen } from 'spec/helpers/testing-library'; -import DatabaseModal from './index'; - -jest.mock('@superset-ui/core', () => ({ - ...jest.requireActual('@superset-ui/core'), - isFeatureEnabled: () => true, -})); - -const mockHistoryPush = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useHistory: () => ({ - push: mockHistoryPush, - }), -})); - -const dbProps = { - show: true, - database_name: 'my database', - sqlalchemy_uri: 'postgres://superset:superset@something:1234/superset', - onHide: () => {}, -}; - -const DATABASE_FETCH_ENDPOINT = 'glob:*/api/v1/database/10'; -const AVAILABLE_DB_ENDPOINT = 'glob:*/api/v1/database/available*'; -const VALIDATE_PARAMS_ENDPOINT = 'glob:*/api/v1/database/validate_parameters*'; -const DATABASE_CONNECT_ENDPOINT = 'glob:*/api/v1/database/'; - -fetchMock.post(DATABASE_CONNECT_ENDPOINT, { - id: 10, - result: { - configuration_method: 'sqlalchemy_form', - database_name: 'Other2', - driver: 'apsw', - expose_in_sqllab: true, - extra: '{"allows_virtual_table_explore":true}', - sqlalchemy_uri: 'gsheets://', - }, - json: 'foo', -}); - -fetchMock.config.overwriteRoutes = true; -fetchMock.get(DATABASE_FETCH_ENDPOINT, { - result: { - id: 10, - database_name: 'my database', - expose_in_sqllab: false, - allow_ctas: false, - allow_cvas: false, - configuration_method: 'sqlalchemy_form', - }, -}); -fetchMock.mock(AVAILABLE_DB_ENDPOINT, { - databases: [ - { - available_drivers: ['psycopg2'], - default_driver: 'psycopg2', - engine: 'postgresql', - name: 'PostgreSQL', - parameters: { - properties: { - database: { - description: 'Database name', - type: 'string', - }, - encryption: { - description: 'Use an encrypted connection to the database', - type: 'boolean', - }, - host: { - description: 'Hostname or IP address', - type: 'string', - }, - password: { - description: 'Password', - nullable: true, - type: 'string', - }, - port: { - description: 'Database port', - format: 'int32', - maximum: 65536, - minimum: 0, - type: 'integer', - }, - query: { - additionalProperties: {}, - description: 'Additional parameters', - type: 'object', - }, - ssh: { - description: 'Create SSH Tunnel', - type: 'boolean', - }, - username: { - description: 'Username', - nullable: true, - type: 'string', - }, - }, - required: ['database', 'host', 'port', 'username'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'postgresql://user:password@host:port/dbname[?key=value&key=value...]', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['rest'], - engine: 'presto', - name: 'Presto', - preferred: true, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['mysqldb'], - default_driver: 'mysqldb', - engine: 'mysql', - name: 'MySQL', - parameters: { - properties: { - database: { - description: 'Database name', - type: 'string', - }, - encryption: { - description: 'Use an encrypted connection to the database', - type: 'boolean', - }, - host: { - description: 'Hostname or IP address', - type: 'string', - }, - password: { - description: 'Password', - nullable: true, - type: 'string', - }, - port: { - description: 'Database port', - format: 'int32', - maximum: 65536, - minimum: 0, - type: 'integer', - }, - query: { - additionalProperties: {}, - description: 'Additional parameters', - type: 'object', - }, - username: { - description: 'Username', - nullable: true, - type: 'string', - }, - }, - required: ['database', 'host', 'port', 'username'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'mysql://user:password@host:port/dbname[?key=value&key=value...]', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['pysqlite'], - engine: 'sqlite', - name: 'SQLite', - preferred: true, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['rest'], - engine: 'druid', - name: 'Apache Druid', - preferred: false, - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: false, - }, - }, - { - available_drivers: ['bigquery'], - default_driver: 'bigquery', - engine: 'bigquery', - name: 'Google BigQuery', - parameters: { - properties: { - credentials_info: { - description: 'Contents of BigQuery JSON credentials.', - type: 'string', - 'x-encrypted-extra': true, - }, - query: { - type: 'object', - }, - }, - type: 'object', - }, - preferred: false, - sqlalchemy_uri_placeholder: 'bigquery://{project_id}', - engine_information: { - supports_file_upload: true, - disable_ssh_tunneling: true, - }, - }, - { - available_drivers: ['rest'], - default_driver: 'apsw', - engine: 'gsheets', - name: 'Google Sheets', - preferred: false, - engine_information: { - supports_file_upload: false, - disable_ssh_tunneling: true, - }, - }, - { - available_drivers: ['connector'], - default_driver: 'connector', - engine: 'databricks', - name: 'Databricks', - parameters: { - properties: { - access_token: { - type: 'string', - }, - database: { - type: 'string', - }, - host: { - type: 'string', - }, - http_path: { - type: 'string', - }, - port: { - format: 'int32', - type: 'integer', - }, - }, - required: ['access_token', 'database', 'host', 'http_path', 'port'], - type: 'object', - }, - preferred: true, - sqlalchemy_uri_placeholder: - 'databricks+connector://token:{access_token}@{host}:{port}/{database_name}', - }, - ], -}); -fetchMock.post(VALIDATE_PARAMS_ENDPOINT, { - message: 'OK', -}); - -describe('DatabaseModal', () => { - const renderAndWait = async () => { - const mounted = act(async () => { - render(, { - useRedux: true, - }); - }); - - return mounted; - }; - - beforeEach(async () => { - await renderAndWait(); - }); - - afterEach(cleanup); - - describe('Functional: Create new database', () => { - test('directs databases to the appropriate form (dynamic vs. SQL Alchemy)', async () => { - // ---------- Dynamic example (3-step form) - // Click the PostgreSQL button to enter the dynamic form - const postgreSQLButton = screen.getByRole('button', { - name: /postgresql/i, - }); - userEvent.click(postgreSQLButton); - - // Dynamic form has 3 steps, seeing this text means the dynamic form is present - const dynamicFormStepText = screen.getByText(/step 2 of 3/i); - - expect(dynamicFormStepText).toBeVisible(); - - // ---------- SQL Alchemy example (2-step form) - // Click the back button to go back to step 1, - // then click the SQLite button to enter the SQL Alchemy form - const backButton = screen.getByRole('button', { name: /back/i }); - userEvent.click(backButton); - - const sqliteButton = screen.getByRole('button', { - name: /sqlite/i, - }); - userEvent.click(sqliteButton); - - // SQL Alchemy form has 2 steps, seeing this text means the SQL Alchemy form is present - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const sqlAlchemyFormStepText = screen.getByText(/step 2 of 2/i); - - expect(sqlAlchemyFormStepText).toBeVisible(); - }); - - describe('SQL Alchemy form flow', () => { - test('enters step 2 of 2 when proper database is selected', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - }); - - test('runs fetchResource when "Connect" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock useSingleViewResource - const mockUseSingleViewResource = jest.fn(); - mockUseSingleViewResource.mockImplementation(useSingleViewResource); - - const { fetchResource } = mockUseSingleViewResource('database'); - - // Invalid hook call? - userEvent.click(screen.getByRole('button', { name: 'Connect' })); - expect(fetchResource).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - - describe('step 2 component interaction', () => { - test('properly interacts with textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const dbNametextBox = screen.getByTestId('database-name-input'); - expect(dbNametextBox).toHaveValue('SQLite'); - - userEvent.type(dbNametextBox, 'Different text'); - expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); - - const sqlAlchemyURItextBox = screen.getByTestId( - 'sqlalchemy-uri-input', - ); - expect(sqlAlchemyURItextBox).toHaveValue(''); - - userEvent.type(sqlAlchemyURItextBox, 'Different text'); - expect(sqlAlchemyURItextBox).toHaveValue('Different text'); - }); - - test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { - /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- - - // Mock testDatabaseConnection - const mockTestDatabaseConnection = jest.fn(); - mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); - - userEvent.click( - screen.getByRole('button', { - name: /test connection/i, - }), - ); - - expect(mockTestDatabaseConnection).toHaveBeenCalled(); - - The line below makes the linter happy */ - expect.anything(); - }); - }); - - describe('SSH Tunnel Form interaction', () => { - test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { - userEvent.click( - screen.getByRole('button', { - name: /postgresql/i, - }), - ); - expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('properly interacts with SSH Tunnel form textboxes', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelServerAddressInput = screen.getByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).toHaveValue(''); - userEvent.type(SSHTunnelServerAddressInput, 'localhost'); - expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); - const SSHTunnelServerPortInput = screen.getByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).toHaveValue(null); - userEvent.type(SSHTunnelServerPortInput, '22'); - expect(SSHTunnelServerPortInput).toHaveValue(22); - const SSHTunnelUsernameInput = screen.getByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).toHaveValue(''); - userEvent.type(SSHTunnelUsernameInput, 'test'); - expect(SSHTunnelUsernameInput).toHaveValue('test'); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).toHaveValue(''); - userEvent.type(SSHTunnelPasswordInput, 'pass'); - expect(SSHTunnelPasswordInput).toHaveValue('pass'); - }); - - test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - expect(SSHTunnelingToggle).toBeVisible(); - const SSHTunnelServerAddressInput = screen.queryByTestId( - 'ssh-tunnel-server_address-input', - ); - expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); - const SSHTunnelServerPortInput = screen.queryByTestId( - 'ssh-tunnel-server_port-input', - ); - expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); - const SSHTunnelUsernameInput = screen.queryByTestId( - 'ssh-tunnel-username-input', - ); - expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); - const SSHTunnelPasswordInput = screen.queryByTestId( - 'ssh-tunnel-password-input', - ); - expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); - }); - - test('If user changes the login method, the inputs change', async () => { - userEvent.click( - screen.getByRole('button', { - name: /sqlite/i, - }), - ); - - expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); - const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); - userEvent.click(SSHTunnelingToggle); - const SSHTunnelUsePasswordInput = screen.getByTestId( - 'ssh-tunnel-use_password-radio', - ); - expect(SSHTunnelUsePasswordInput).toBeVisible(); - const SSHTunnelUsePrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-use_private_key-radio', - ); - expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); - const SSHTunnelPasswordInput = screen.getByTestId( - 'ssh-tunnel-password-input', - ); - // By default, we use Password as login method - expect(SSHTunnelPasswordInput).toBeVisible(); - // Change the login method to use private key - userEvent.click(SSHTunnelUsePrivateKeyInput); - const SSHTunnelPrivateKeyInput = screen.getByTestId( - 'ssh-tunnel-private_key-input', - ); - expect(SSHTunnelPrivateKeyInput).toBeVisible(); - const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( - 'ssh-tunnel-private_key_password-input', - ); - expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); - }); - }); - }); - }); -}); diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx similarity index 87% rename from superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx rename to superset-frontend/src/features/databases/DatabaseModal/index.test.tsx index 7fee254d6e2cc..bd3eb3bec9796 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.2.test.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx @@ -1140,6 +1140,215 @@ describe('DatabaseModal', () => { expect(sqlAlchemyFormStepText).toBeVisible(); }); + describe('SQL Alchemy form flow', () => { + test('enters step 2 of 2 when proper database is selected', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + }); + + test('runs fetchResource when "Connect" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock useSingleViewResource + const mockUseSingleViewResource = jest.fn(); + mockUseSingleViewResource.mockImplementation(useSingleViewResource); + + const { fetchResource } = mockUseSingleViewResource('database'); + + // Invalid hook call? + userEvent.click(screen.getByRole('button', { name: 'Connect' })); + expect(fetchResource).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + + describe('step 2 component interaction', () => { + test('properly interacts with textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const dbNametextBox = screen.getByTestId('database-name-input'); + expect(dbNametextBox).toHaveValue('SQLite'); + + userEvent.type(dbNametextBox, 'Different text'); + expect(dbNametextBox).toHaveValue('SQLiteDifferent text'); + + const sqlAlchemyURItextBox = screen.getByTestId( + 'sqlalchemy-uri-input', + ); + expect(sqlAlchemyURItextBox).toHaveValue(''); + + userEvent.type(sqlAlchemyURItextBox, 'Different text'); + expect(sqlAlchemyURItextBox).toHaveValue('Different text'); + }); + + test('runs testDatabaseConnection when "TEST CONNECTION" is clicked', () => { + /* ---------- 🐞 TODO (lyndsiWilliams): function mock is not currently working 🐞 ---------- + + // Mock testDatabaseConnection + const mockTestDatabaseConnection = jest.fn(); + mockTestDatabaseConnection.mockImplementation(testDatabaseConnection); + + userEvent.click( + screen.getByRole('button', { + name: /test connection/i, + }), + ); + + expect(mockTestDatabaseConnection).toHaveBeenCalled(); + + The line below makes the linter happy */ + expect.anything(); + }); + }); + + describe('SSH Tunnel Form interaction', () => { + test('properly interacts with SSH Tunnel form textboxes for dynamic form', async () => { + userEvent.click( + screen.getByRole('button', { + name: /postgresql/i, + }), + ); + expect(await screen.findByText(/step 2 of 3/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('properly interacts with SSH Tunnel form textboxes', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelServerAddressInput = screen.getByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).toHaveValue(''); + userEvent.type(SSHTunnelServerAddressInput, 'localhost'); + expect(SSHTunnelServerAddressInput).toHaveValue('localhost'); + const SSHTunnelServerPortInput = screen.getByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).toHaveValue(null); + userEvent.type(SSHTunnelServerPortInput, '22'); + expect(SSHTunnelServerPortInput).toHaveValue(22); + const SSHTunnelUsernameInput = screen.getByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).toHaveValue(''); + userEvent.type(SSHTunnelUsernameInput, 'test'); + expect(SSHTunnelUsernameInput).toHaveValue('test'); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).toHaveValue(''); + userEvent.type(SSHTunnelPasswordInput, 'pass'); + expect(SSHTunnelPasswordInput).toHaveValue('pass'); + }); + + test('if the SSH Tunneling toggle is not true, no inputs are displayed', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + expect(SSHTunnelingToggle).toBeVisible(); + const SSHTunnelServerAddressInput = screen.queryByTestId( + 'ssh-tunnel-server_address-input', + ); + expect(SSHTunnelServerAddressInput).not.toBeInTheDocument(); + const SSHTunnelServerPortInput = screen.queryByTestId( + 'ssh-tunnel-server_port-input', + ); + expect(SSHTunnelServerPortInput).not.toBeInTheDocument(); + const SSHTunnelUsernameInput = screen.queryByTestId( + 'ssh-tunnel-username-input', + ); + expect(SSHTunnelUsernameInput).not.toBeInTheDocument(); + const SSHTunnelPasswordInput = screen.queryByTestId( + 'ssh-tunnel-password-input', + ); + expect(SSHTunnelPasswordInput).not.toBeInTheDocument(); + }); + + test('If user changes the login method, the inputs change', async () => { + userEvent.click( + screen.getByRole('button', { + name: /sqlite/i, + }), + ); + + expect(await screen.findByText(/step 2 of 2/i)).toBeInTheDocument(); + const SSHTunnelingToggle = screen.getByTestId('ssh-tunnel-switch'); + userEvent.click(SSHTunnelingToggle); + const SSHTunnelUsePasswordInput = screen.getByTestId( + 'ssh-tunnel-use_password-radio', + ); + expect(SSHTunnelUsePasswordInput).toBeVisible(); + const SSHTunnelUsePrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-use_private_key-radio', + ); + expect(SSHTunnelUsePrivateKeyInput).toBeVisible(); + const SSHTunnelPasswordInput = screen.getByTestId( + 'ssh-tunnel-password-input', + ); + // By default, we use Password as login method + expect(SSHTunnelPasswordInput).toBeVisible(); + // Change the login method to use private key + userEvent.click(SSHTunnelUsePrivateKeyInput); + const SSHTunnelPrivateKeyInput = screen.getByTestId( + 'ssh-tunnel-private_key-input', + ); + expect(SSHTunnelPrivateKeyInput).toBeVisible(); + const SSHTunnelPrivateKeyPasswordInput = screen.getByTestId( + 'ssh-tunnel-private_key_password-input', + ); + expect(SSHTunnelPrivateKeyPasswordInput).toBeVisible(); + }); + }); + }); + describe('Dynamic form flow', () => { test('enters step 2 of 3 when proper database is selected', async () => { expect(await screen.findByText(/step 1 of 3/i)).toBeInTheDocument(); From a7e4eddb9fd58f52c452b34e20ae41adf417c466 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 13:48:53 -0800 Subject: [PATCH 15/26] lining up the files for upload/download --- .github/workflows/superset-frontend.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index fa5efa974af1e..92b220f112af4 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -80,7 +80,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: coverage-artifacts-${{ matrix.shard }} - path: coverage/ + path: superset-frontend/coverage report-coverage: needs: [sharded-jest-tests] @@ -92,7 +92,7 @@ jobs: with: name: coverage-artifacts merge-multiple: true - pattern: coverage/* + pattern: superset-frontend/coverage/* - name: Show files run: find . From 9f1f594d10a90c41ad004e43fb095da65776353b Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 14:02:02 -0800 Subject: [PATCH 16/26] downloading --- .github/workflows/superset-frontend.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 92b220f112af4..08ff1b4b35d06 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -90,9 +90,9 @@ jobs: - name: Download Docker Image Artifact uses: actions/download-artifact@v4 with: - name: coverage-artifacts + pattern: coverage-artifacts-* merge-multiple: true - pattern: superset-frontend/coverage/* + path: coverage/ - name: Show files run: find . From ddb87d365f64da21372dae2e4dc3e944c6d1bf10 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 16:54:46 -0800 Subject: [PATCH 17/26] fixing merge --- .github/workflows/superset-frontend.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 08ff1b4b35d06..e768c58ca1c40 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -91,14 +91,14 @@ jobs: uses: actions/download-artifact@v4 with: pattern: coverage-artifacts-* - merge-multiple: true + merge-multiple: false path: coverage/ - name: Show files run: find . - name: Merge Code Coverage - run: npx nyc merge coverage/ merged-output/merged-coverage.json + run: npx nyc merge coverage/ merged-output/coverage-summary.json - name: Upload code coverage uses: codecov/codecov-action@v5 @@ -106,6 +106,7 @@ jobs: flags: javascript token: ${{ secrets.CODECOV_TOKEN }} verbose: true + directory: ./merged-output core-cover: needs: frontend-build From 5aab3b6da71f1505647c4cf1dae1d50b6326b8a4 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 17:07:43 -0800 Subject: [PATCH 18/26] specify files --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index e768c58ca1c40..082332bb89307 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -106,7 +106,7 @@ jobs: flags: javascript token: ${{ secrets.CODECOV_TOKEN }} verbose: true - directory: ./merged-output + files: merged-output/coverage-summary.json core-cover: needs: frontend-build From cbf7d1e20ad58218e959cd9d18d7c2ad5beb964b Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 17:24:06 -0800 Subject: [PATCH 19/26] add slug --- .github/workflows/superset-frontend.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 082332bb89307..59914963c02a6 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -107,6 +107,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} verbose: true files: merged-output/coverage-summary.json + slug: apache/superset core-cover: needs: frontend-build From 954788a91d13fb53c910478603a61b6af66fbc29 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Sat, 4 Jan 2025 17:30:16 -0800 Subject: [PATCH 20/26] bump ubuntu --- .github/workflows/superset-frontend.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 59914963c02a6..d546ac76dc4d9 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -61,7 +61,7 @@ jobs: matrix: shard: [1, 2, 3, 4, 5, 6, 7, 8] fail-fast: false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Load Docker Image run: | @@ -85,7 +85,7 @@ jobs: report-coverage: needs: [sharded-jest-tests] if: needs.frontend-build.result == 'success' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download Docker Image Artifact uses: actions/download-artifact@v4 @@ -112,7 +112,7 @@ jobs: core-cover: needs: frontend-build if: needs.frontend-build.result == 'success' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Load Docker Image run: | @@ -126,7 +126,7 @@ jobs: lint-frontend: needs: frontend-build if: needs.frontend-build.result == 'success' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Load Docker Image run: | @@ -144,7 +144,7 @@ jobs: validate-frontend: needs: frontend-build if: needs.frontend-build.result == 'success' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Load Docker Image run: | From be196d923dac69ee06753112d1431abe9a236589 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 12:42:59 -0800 Subject: [PATCH 21/26] trying with artifacts --- .github/workflows/superset-frontend.yml | 79 ++++++++++++++++--------- 1 file changed, 50 insertions(+), 29 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index d546ac76dc4d9..c9c945ad2deb3 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -15,30 +15,22 @@ concurrency: env: TAG: apache/superset:GHA-${{ github.run_id }} + jobs: frontend-build: runs-on: ubuntu-24.04 steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + - name: Checkout Code uses: actions/checkout@v4 with: persist-credentials: false - - name: Check for file changes + - name: Check for File Changes id: check uses: ./.github/actions/change-detector/ with: token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Docker Environment - if: steps.check.outputs.frontend - uses: ./.github/actions/setup-docker - with: - dockerhub-user: ${{ secrets.DOCKERHUB_USER }} - dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }} - build: "true" - install-docker-compose: "false" - - name: Setup supersetbot if: steps.check.outputs.frontend uses: ./.github/actions/setup-supersetbot/ @@ -52,7 +44,19 @@ jobs: supersetbot docker \ --push \ --preset superset-node-ci \ - --platform "linux/amd64" \ + --platform "linux/amd64" + + - name: Save Docker Image as Artifact + if: steps.check.outputs.frontend + run: | + docker save $TAG | gzip > docker-image.tar.gz + + - name: Upload Docker Image Artifact + if: steps.check.outputs.frontend + uses: actions/upload-artifact@v4 + with: + name: docker-image + path: docker-image.tar.gz sharded-jest-tests: needs: frontend-build @@ -63,9 +67,13 @@ jobs: fail-fast: false runs-on: ubuntu-24.04 steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: docker-image + - name: Load Docker Image - run: | - docker pull $TAG + run: docker load < docker-image.tar.gz - name: npm run test with coverage run: | @@ -76,7 +84,7 @@ jobs: bash -c \ "npm run test -- --coverage --shard=${{ matrix.shard }}/8 --coverageReporters=json-summary" - - name: Upload coverage artifact + - name: Upload Coverage Artifact uses: actions/upload-artifact@v4 with: name: coverage-artifacts-${{ matrix.shard }} @@ -87,20 +95,19 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-24.04 steps: - - name: Download Docker Image Artifact + - name: Download Coverage Artifacts uses: actions/download-artifact@v4 with: - pattern: coverage-artifacts-* - merge-multiple: false + name: coverage-artifacts-* path: coverage/ - - name: Show files - run: find . + - name: Show Files + run: find coverage/ - name: Merge Code Coverage run: npx nyc merge coverage/ merged-output/coverage-summary.json - - name: Upload code coverage + - name: Upload Code Coverage uses: codecov/codecov-action@v5 with: flags: javascript @@ -114,9 +121,13 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-24.04 steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: docker-image + - name: Load Docker Image - run: | - docker pull $TAG + run: docker load < docker-image.tar.gz - name: superset-ui/core coverage run: | @@ -128,14 +139,19 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-24.04 steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: docker-image + - name: Load Docker Image - run: | - docker pull $TAG + run: docker load < docker-image.tar.gz - name: eslint run: | docker run --rm $TAG bash -c \ "npm i && npm run eslint -- . --quiet" + - name: tsc run: | docker run --rm $TAG bash -c \ @@ -146,15 +162,20 @@ jobs: if: needs.frontend-build.result == 'success' runs-on: ubuntu-24.04 steps: + - name: Download Docker Image Artifact + uses: actions/download-artifact@v4 + with: + name: docker-image + - name: Load Docker Image - run: | - docker pull $TAG + run: docker load < docker-image.tar.gz - - name: Build plugins packages + - name: Build Plugins Packages run: | docker run --rm $TAG bash -c \ "npm run plugins:build" - - name: Build plugins Storybook + + - name: Build Plugins Storybook run: | docker run --rm $TAG bash -c \ "npm run plugins:build-storybook" From 87b5bd0a3d042bd3ff99a59bf57a3e2e88ca1868 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 12:48:08 -0800 Subject: [PATCH 22/26] always load --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index c9c945ad2deb3..bddb2ae873249 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -42,7 +42,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | supersetbot docker \ - --push \ + --load \ --preset superset-node-ci \ --platform "linux/amd64" From c1228d6d20c69290c0c40d3b5e19e2a7b2a81425 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 12:56:10 -0800 Subject: [PATCH 23/26] no --load --- .github/workflows/superset-frontend.yml | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index bddb2ae873249..6f33bbcf60859 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -31,20 +31,17 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup supersetbot - if: steps.check.outputs.frontend - uses: ./.github/actions/setup-supersetbot/ - - name: Build Docker Image if: steps.check.outputs.frontend shell: bash env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - supersetbot docker \ - --load \ - --preset superset-node-ci \ - --platform "linux/amd64" + docker buildx build \ + -t $TAG \ + --cache-from=type=registry,ref=apache/superset-cache:3.10-slim-bookworm \ + --target superset-node-ci \ + . - name: Save Docker Image as Artifact if: steps.check.outputs.frontend From db4d21635f4809fbe16d60c4ba907bd203877bd9 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 13:32:23 -0800 Subject: [PATCH 24/26] fix merge --- .github/workflows/superset-frontend.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index 6f33bbcf60859..4a67958663adb 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -95,7 +95,7 @@ jobs: - name: Download Coverage Artifacts uses: actions/download-artifact@v4 with: - name: coverage-artifacts-* + pattern: coverage-artifacts-* path: coverage/ - name: Show Files From b61e54608cbc36ed9cd1c6eca9e072bd987c938c Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 15:12:41 -0800 Subject: [PATCH 25/26] merge all heads --- superset-frontend/package-lock.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index b7000a02687f5..693c8ac646658 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -57127,7 +57127,7 @@ "license": "Apache-2.0", "dependencies": { "@data-ui/event-flow": "^0.0.84", - "@emotion/cache": "^11.4.0", + "@emotion/cache": "^11.14.0", "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.0", "@mihkeleidast/storybook-addon-source": "^1.0.1", @@ -67792,7 +67792,7 @@ "@babel/preset-react": "^7.26.3", "@babel/preset-typescript": "^7.23.3", "@data-ui/event-flow": "^0.0.84", - "@emotion/cache": "^11.4.0", + "@emotion/cache": "^11.14.0", "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.0", "@mihkeleidast/storybook-addon-source": "^1.0.1", From 9c6b78bde0826c53422fc36537dbe98c96a75d19 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Tue, 7 Jan 2025 16:03:58 -0800 Subject: [PATCH 26/26] merge heads --- superset/config.py | 2 +- .../2025-01-07_16-03_eb1c288c71c4_.py | 35 +++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 superset/migrations/versions/2025-01-07_16-03_eb1c288c71c4_.py diff --git a/superset/config.py b/superset/config.py index 3fc94cd27c18e..5d03016298aae 100644 --- a/superset/config.py +++ b/superset/config.py @@ -136,7 +136,7 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # generated on install via setup.py. In the event that we're # actually running Superset, we will have already installed, # therefore it WILL exist. When unit tests are running, however, -# it WILL NOT exist, so we fall back to reading package.json +# it WILL NOT exist, so we fall back on reading package.json VERSION_STRING = _try_json_readversion(VERSION_INFO_FILE) or _try_json_readversion( PACKAGE_JSON_FILE ) diff --git a/superset/migrations/versions/2025-01-07_16-03_eb1c288c71c4_.py b/superset/migrations/versions/2025-01-07_16-03_eb1c288c71c4_.py new file mode 100644 index 0000000000000..86780f7fde5fb --- /dev/null +++ b/superset/migrations/versions/2025-01-07_16-03_eb1c288c71c4_.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""empty message + +Revision ID: eb1c288c71c4 +Revises: ('df3d7e2eb9a4', '7b17aa722e30') +Create Date: 2025-01-07 16:03:44.936921 + +""" + +# revision identifiers, used by Alembic. +revision = "eb1c288c71c4" +down_revision = ("df3d7e2eb9a4", "7b17aa722e30") + + +def upgrade(): + pass + + +def downgrade(): + pass