diff --git a/.buildkite/backend-benchmark.sh b/.buildkite/backend-benchmark.sh deleted file mode 100644 index 5995e3cad0..0000000000 --- a/.buildkite/backend-benchmark.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -set -e -set -x -trap 'date' DEBUG - -pipx install poetry - -pyenv install $PYTHON_VERSION -pyenv local $PYTHON_VERSION - -poetry install - -if ! command -v codspeed-runner &> /dev/null; then - CODSPEED_RUNNER_VERSION=v2.1.0 - curl -fsSL https://github.com/CodSpeedHQ/runner/releases/download/$CODSPEED_RUNNER_VERSION/codspeed-runner-installer.sh | bash -fi -source "$HOME/.cargo/env" - -codspeed-runner --token=$$CODSPEED_TOKEN -- poetry run pytest -v backend/tests/benchmark/ --codspeed diff --git a/.buildkite/e2e-testing.sh b/.buildkite/e2e-testing.sh deleted file mode 100644 index 320fa67ef2..0000000000 --- a/.buildkite/e2e-testing.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -set -e -set -x -trap 'date' DEBUG - -(cd frontend && npm install) - -pip install toml invoke - -export INFRAHUB_BUILD_NAME="infrahub-$(hostname)" -export INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-60000) # TODO: use a more deterministic method - -invoke demo.build -invoke demo.pull -invoke demo.destroy -invoke demo.start -invoke demo.load-infra-schema - -invoke demo.status -invoke demo.load-infra-data - -export INFRAHUB_ADDRESS="http://localhost:${INFRAHUB_SERVER_PORT}" - -invoke demo.infra-git-import demo.infra-git-create - -if [ "$E2E_TEST_FRAMEWORK" = "playwright" ]; then - (cd frontend && npx playwright install chromium && npm run ci:test:e2e) -else - export CYPRESS_BASE_URL=$INFRAHUB_ADDRESS - (cd frontend && npm run cypress:run:e2e) -fi - -docker ps -a -docker logs "${INFRAHUB_BUILD_NAME}-infrahub-server-1" -docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-1" -docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-2" -docker logs "${INFRAHUB_BUILD_NAME}-database-1" - -invoke demo.status diff --git a/.buildkite/files-changed.py b/.buildkite/files-changed.py deleted file mode 100755 index 8cdff21805..0000000000 --- a/.buildkite/files-changed.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 - -import os -import subprocess -import sys -from pathlib import Path -from typing import List - -import jinja2 -import yaml -from wcmatch import glob - -pullrequest = os.getenv("BUILDKITE_PULL_REQUEST") -base_branch = os.getenv("BUILDKITE_PULL_REQUEST_BASE_BRANCH") - -diff_command = "git diff --name-only HEAD^" -if pullrequest is not None and pullrequest != "false": - diff_command = f"git diff --name-only $(git merge-base origin/{base_branch} HEAD).." - -changed_files = subprocess.run(diff_command, shell=True, check=True, capture_output=True, text=True).stdout.splitlines() - -print(changed_files, file=sys.stderr) - -basedir = Path(__file__).parent.parent.resolve() -filters = yaml.safe_load(Path(basedir.joinpath(".github/file-filters.yml")).read_text()) - - -def match_filters(files: List[str], filters: List): - def flatten(xss): - new_list = [] - for elem in xss: - if type(elem) is list: - new_list = new_list + elem - else: - new_list.append(elem) - return new_list - - if len(glob.globfilter(files, flatten(filters), flags=glob.GLOBSTAR | glob.BRACE)) > 0: - return True - return False - - -result = dict() -for filter_name, filter_list in filters.items(): - if pullrequest == "false": - # Always enable all steps if not a pull request - result[filter_name] = True - else: - result[filter_name] = match_filters(changed_files, filter_list) - - -template = jinja2.Template(Path(basedir.joinpath(".buildkite/pipeline.yml.j2")).read_text()) -pipeline = template.render(result) -print(pipeline) diff --git a/.buildkite/javascript-lint.sh b/.buildkite/javascript-lint.sh deleted file mode 100644 index 48c2285bc4..0000000000 --- a/.buildkite/javascript-lint.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -e -set -x -trap 'date' DEBUG - -cd frontend -npm install -npm run eslint diff --git a/.buildkite/pipeline.yml.j2 b/.buildkite/pipeline.yml.j2 deleted file mode 100644 index d100331197..0000000000 --- a/.buildkite/pipeline.yml.j2 +++ /dev/null @@ -1,110 +0,0 @@ -steps: - ##### yaml-lint -{%- if yaml_all %} - - label: ":github: yaml-lint" - key: yaml-lint - depends_on: - - files-changed - allow_dependency_failure: false - timeout_in_minutes: 5 - agents: - queue: small-runner - command: .buildkite/yaml-lint.sh -{%- endif %} - ##### python-lint -{%- if python_all %} - - label: ":github: python-lint" - key: python-lint - depends_on: - - files-changed - allow_dependency_failure: false - timeout_in_minutes: 5 - agents: - queue: small-runner - command: .buildkite/python-lint.sh -{%- endif %} - ##### javascript-lint -{%- if javascript_all %} - - label: ":github: javascript-lint" - key: javascript-lint - depends_on: - - files-changed - allow_dependency_failure: false - timeout_in_minutes: 5 - agents: - queue: small-runner - plugins: - - sv-oss/node-n#v0.1.2: - node-version: v20 - command: .buildkite/javascript-lint.sh -{%- endif %} - ##### e2e-testing-cypress - - label: ":github: E2E-testing-cypress" - key: E2E-testing-cypress - depends_on: - - files-changed - {%- if javascript_all %} - - javascript-lint - {%- endif %} - {%- if yaml_all %} - - yaml-lint - {%- endif %} - {%- if python_all %} - - python-lint - {%- endif %} - allow_dependency_failure: false - timeout_in_minutes: 40 - env: - INFRAHUB_DB_TYPE: memgraph - agents: - queue: huge-runner - plugins: - - sv-oss/node-n#v0.1.2: - node-version: v20 - command: .buildkite/e2e-testing.sh - ##### e2e-testing-playwright - - label: ":github: E2E-testing-playwright" - key: E2E-testing-playwright - depends_on: - - files-changed - {%- if javascript_all %} - - javascript-lint - {%- endif %} - {%- if yaml_all %} - - yaml-lint - {%- endif %} - {%- if python_all %} - - python-lint - {%- endif %} - allow_dependency_failure: false - timeout_in_minutes: 40 - env: - INFRAHUB_DB_TYPE: memgraph - E2E_TEST_FRAMEWORK: playwright - BUILDKITE_ANALYTICS_TOKEN: $BUILDKITE_E2E_ANALYTICS_TOKEN - agents: - queue: huge-runner - artifact_paths: - - "frontend/playwright-report/**" - plugins: - - sv-oss/node-n#v0.1.2: - node-version: v20 - command: .buildkite/e2e-testing.sh - ##### backend benchmark -{%- if backend_files %} - - label: ":github: Backend benchmark" - key: backend-benchmark - depends_on: - - files-changed - {%- if python_all %} - - python-lint - {%- endif %} - allow_dependency_failure: false - timeout_in_minutes: 40 - env: - INFRAHUB_DB_TYPE: memgraph - PYTHON_VERSION: 3.11 - agents: - queue: huge-runner - command: .buildkite/backend-benchmark.sh -{%- endif %} \ No newline at end of file diff --git a/.buildkite/python-lint.sh b/.buildkite/python-lint.sh deleted file mode 100644 index 124aa0cb89..0000000000 --- a/.buildkite/python-lint.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -set -e -set -x -trap 'date' DEBUG - -pip install ruff==0.1.8 -ruff check --diff . -ruff format --check --diff . diff --git a/.buildkite/yaml-lint.sh b/.buildkite/yaml-lint.sh deleted file mode 100644 index 0242699ff6..0000000000 --- a/.buildkite/yaml-lint.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -e -set -x -trap 'date' DEBUG - -pip install yamllint==1.33.0 -yamllint . diff --git a/.dockerignore b/.dockerignore index b5c8f3ec50..2be2fd97a3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,7 +6,9 @@ node_modules .ruff_cache **/.ruff_cache .mypy_cache +**/.mypy_cache .pytest_cache +**/.pytest_cache *.env .DS_Store .coverage @@ -20,3 +22,7 @@ frontend/playwright-report # Direnv files (https://direnv.net/) .direnv/ .envrc + +# Generated files +generated/ +query_performance_results/ \ No newline at end of file diff --git a/.github/build-docs.sh b/.github/build-docs.sh index eeb9f064b4..bc55137a01 100755 --- a/.github/build-docs.sh +++ b/.github/build-docs.sh @@ -1,8 +1,3 @@ #!/bin/sh -if [ -f docs/docusaurus.config.ts ]; then - (cd docs && npm install && npm run build) -else - npx retypeapp build docs -fi - +cd docs && npm install && npm run build diff --git a/.github/file-filters.yml b/.github/file-filters.yml index 84376b8ea7..d89a44ec82 100644 --- a/.github/file-filters.yml +++ b/.github/file-filters.yml @@ -39,6 +39,9 @@ doc_files: &doc_files - package.json - package-lock.json +helm_files: &helm_files + - "helm/**" + python_all: &python_all - "**/*.py" @@ -64,6 +67,9 @@ documentation_all: - *doc_files - *markdown_all +helm_all: + - *helm_files + sdk_all: - *sdk_files - *backend_files diff --git a/.github/labels.yml b/.github/labels.yml index 293e41ead9..8d44c34baf 100644 --- a/.github/labels.yml +++ b/.github/labels.yml @@ -47,36 +47,6 @@ description: "Maintenance task" color: "fef2c0" -# ---------------------------------- -# TYPE PYTHON SDK -# ---------------------------------- -- name: "type/feature/py-sdk" - description: "New feature or request related to the Python SDK" - color: "a2eeef" - -- name: "type/bug/py-sdk" - description: "Something isn't working as expected related to the Python SDK" - color: "d73a4a" - -- name: "type/housekeeping/py-sdk" - description: "Maintenance task related to the Python SDK" - color: "fef2c0" - -# ---------------------------------- -# TYPE CTL -# ---------------------------------- -- name: "type/feature/ctl" - description: "New feature or request related to the infrahubctl command line" - color: "a2eeef" - -- name: "type/bug/ctl" - description: "Something isn't working as expected related to the infrahubctl command line" - color: "d73a4a" - -- name: "type/housekeeping/ctl" - description: "Maintenance task related to the infrahubctl command line" - color: "fef2c0" - # ---------------------------------- # TYPE ALL # ---------------------------------- diff --git a/.github/python_sdk-release-drafter.yml b/.github/python_sdk-release-drafter.yml index 3f6854fa84..b6379800b2 100644 --- a/.github/python_sdk-release-drafter.yml +++ b/.github/python_sdk-release-drafter.yml @@ -3,15 +3,12 @@ categories: - title: '🚀 Features' labels: - 'type/feature' - - 'type/feature/py-sdk' - title: '🐛 Bug Fixes' labels: - 'type/bug' - - 'type/bug/py-sdk' - title: '🧰 Maintenance' labels: - 'type/housekeeping' - - 'type/housekeeping/py-sdk' change-template: '- $TITLE @$AUTHOR (#$NUMBER)' exclude-labels: - 'ci/skip-changelog' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bf16effa35..dc20b63418 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,10 +24,8 @@ env: INFRAHUB_IMAGE_VER: "local" PYTEST_XDIST_WORKER_COUNT: 4 INFRAHUB_TEST_IN_DOCKER: 1 - BUILDKITE_ANALYTICS_BRANCH: ${{ github.ref }} - BUILDKITE_BRANCH: ${{ github.ref }} - BUILDKITE_COMMIT: ${{ github.sha }} VALE_VERSION: "3.0.7" + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} jobs: # ------------------------------------------ Check Files Changes ------------------------------------------ @@ -39,6 +37,7 @@ jobs: backend: ${{ steps.changes.outputs.backend_all }} documentation: ${{ steps.changes.outputs.documentation_all }} frontend: ${{ steps.changes.outputs.frontend_all }} + helm: ${{ steps.changes.outputs.helm_all }} sdk: ${{ steps.changes.outputs.sdk_all }} sync: ${{ steps.changes.outputs.sync_all }} e2e: ${{ steps.changes.outputs.e2e_all }} @@ -59,6 +58,19 @@ jobs: filters: .github/file-filters.yml # ------------------------------------------ All Linter ------------------------------------------ + helm-lint: + if: needs.files-changed.outputs.helm == 'true' + needs: ["files-changed"] + runs-on: "ubuntu-latest" + timeout-minutes: 5 + steps: + - name: "Check out repository code" + uses: "actions/checkout@v4" + - name: "Install Helm" + uses: azure/setup-helm@v4.2.0 + - name: "Linting: helm lint" + run: "helm lint helm/" + yaml-lint: if: needs.files-changed.outputs.yaml == 'true' needs: ["files-changed"] @@ -67,19 +79,10 @@ jobs: steps: - name: "Check out repository code" uses: "actions/checkout@v4" - - name: "Identify which files have changed" - uses: dorny/paths-filter@v3 - id: changes - with: - filters: | - src: - - '**.yml' - - '**.yaml' - - 'development/workflows/ci.yml' - name: "Setup environment" - run: "pip install yamllint==1.33.0" + run: "pip install yamllint==1.35.1" - name: "Linting: yamllint" - run: "yamllint ." + run: "yamllint -s ." javascript-lint: if: needs.files-changed.outputs.javascript == 'true' @@ -111,7 +114,7 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v4" - name: "Setup environment" - run: "pip install ruff==0.1.8" + run: "pip install ruff==0.4.1" - name: "Linting: ruff check" run: "ruff check --diff ." - name: "Linting: ruff format" @@ -126,7 +129,7 @@ jobs: - name: "Check out repository code" uses: "actions/checkout@v4" - name: "Linting: markdownlint" - uses: DavidAnson/markdownlint-cli2-action@v15 + uses: DavidAnson/markdownlint-cli2-action@v16 with: config: .markdownlint.yaml globs: '**/*.{md,mdx}' @@ -168,12 +171,11 @@ jobs: strategy: matrix: python-version: - - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - pydantic-version: ["^1.10", "^2"] + pydantic-version: ["1.10.14", "^2"] if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && @@ -210,13 +212,12 @@ jobs: - name: "Unit Tests" run: "poetry --directory python_sdk run coverage run --source=infrahub_sdk -m pytest python_sdk/tests/unit/" working-directory: ./ - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_SDK_UNIT }} - name: "Create coverage file" run: "poetry --directory python_sdk run coverage xml" working-directory: ./ - name: "Coveralls : Unit Tests" uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: @@ -249,15 +250,14 @@ jobs: run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: "Build Test Image" - run: "invoke test.build" + run: "invoke dev.build" - name: "Pull External Docker Images" - run: "invoke test.pull" + run: "invoke dev.pull" - name: "Integration Tests" run: "invoke sdk.test-integration" - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_SDK_INTEGRATION }} - name: "Coveralls : Integration Tests" uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: @@ -268,7 +268,10 @@ jobs: strategy: matrix: python-version: + - "3.9" + - "3.10" - "3.11" + - "3.12" if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && @@ -324,20 +327,28 @@ jobs: - name: "Clear docker environment" run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: "Build Test Image" - run: "invoke test.build" + run: "invoke dev.build" - name: "Pull External Docker Images" - run: "invoke test.pull" + run: "invoke dev.pull" - name: "Unit Tests" run: "invoke backend.test-unit" - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_BACKEND_UNIT_DEFAULT }} - name: "Coveralls : Unit Tests" uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: flag-name: backend-unit parallel: true + - name: Generate tracing spans + if: always() + uses: inception-health/otel-upload-test-artifact-action@v1 + with: + jobName: "backend-tests-unit" + stepName: "Unit Tests" + path: "pytest-junit.xml" + type: "junit" + githubToken: ${{ secrets.GH_TRACING_REPO_TOKEN }} backend-tests-integration: if: | @@ -350,7 +361,7 @@ jobs: group: "huge-runners" timeout-minutes: 30 env: - INFRAHUB_DB_TYPE: memgraph + INFRAHUB_DB_TYPE: neo4j steps: - name: "Check out repository code" uses: "actions/checkout@v4" @@ -363,19 +374,18 @@ jobs: - name: "Clear docker environment" run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: "Build Test Image" - run: "invoke test.build" + run: "invoke dev.build" - name: "Pull External Docker Images" - run: "invoke test.pull" + run: "invoke dev.pull" - name: "Pylint Tests" run: "invoke backend.pylint --docker" - name: "Mypy Tests" run: "invoke backend.mypy --docker" - name: "Integration Tests" run: "invoke backend.test-integration" - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_BACKEND_INTEGRATION }} - name: "Coveralls : Integration Tests" uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: @@ -406,13 +416,11 @@ jobs: - name: "Clear docker environment" run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: "Build Test Image" - run: "invoke test.build" + run: "invoke dev.build" - name: "Pull External Docker Images" - run: "invoke test.pull" + run: "invoke dev.pull" - name: "Unit Tests" run: "invoke backend.test-unit" - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_BACKEND_UNIT_NEO4J }} backend-validate-generated: if: | @@ -480,6 +488,7 @@ jobs: path: frontend/cypress/videos/* - name: "Coveralls : Unit Tests" uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: @@ -515,7 +524,7 @@ jobs: - name: "Build website" run: "invoke docs.build" - validate_generated_documentation: + validate-generated-documentation: if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && @@ -534,11 +543,11 @@ jobs: - name: "Setup environment" run: "pip install invoke toml" - name: "Build Test Image" - run: "invoke test.build" + run: "invoke dev.build" - name: "Validate generated documentation" run: "invoke docs.validate --docker" - validate_documentation_style: + validate-documentation-style: if: | always() && !cancelled() && !contains(needs.*.result, 'failure') && @@ -561,87 +570,6 @@ jobs: - name: "Validate documentation style" run: "./vale ." # ------------------------------------------ E2E Tests ------------------------------------------ - E2E-testing-cypress: - needs: - - javascript-lint - - files-changed - - yaml-lint - - python-lint - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') - runs-on: - group: huge-runners - timeout-minutes: 40 - env: - INFRAHUB_DB_TYPE: memgraph - steps: - - name: "Check out repository code" - uses: "actions/checkout@v4" - - name: Install NodeJS - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: 'npm' - cache-dependency-path: frontend/package-lock.json - - name: Install frontend dependencies - working-directory: ./frontend - run: npm install - - name: "Install Invoke" - run: "pip install toml invoke" - - - name: Select infrahub port - run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV - - - name: "Set environment variables" - run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV - - name: "Set environment variables" - run: echo INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }} >> $GITHUB_ENV - - name: "Clear docker environment" - run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - - - name: Build Demo - run: "invoke demo.build" - - name: "Pull External Docker Images" - run: "invoke demo.pull" - - name: Initialize Demo - id: init-demo - run: "invoke demo.start demo.load-infra-schema" - - name: Check Demo Status - run: "invoke demo.status" - - name: Load Data - run: "invoke demo.load-infra-data" - - - name: Set infrahub address - run: echo "INFRAHUB_ADDRESS=http://localhost:${INFRAHUB_SERVER_PORT}" >> $GITHUB_ENV - - name: Set cypress endpoint - run: echo "CYPRESS_BASE_URL=$INFRAHUB_ADDRESS" >> $GITHUB_ENV - - - name: Git Repository - run: "invoke demo.infra-git-import demo.infra-git-create" - - name: Run End to End Tests - working-directory: ./frontend - run: npm run cypress:run:e2e - - name: Containers after tests - run: docker ps -a - - name: Upload cypress screenshots - if: failure() - uses: actions/upload-artifact@v4 - with: - name: screenshots - path: docs/media/* - - name: Display server logs - run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-server-1" - - name: Display git 1 logs - run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-1" - - name: Display git 2 logs - run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-2" - - name: Display database logs - run: docker logs "${INFRAHUB_BUILD_NAME}-database-1" - - name: Display server status - run: invoke demo.status - E2E-testing-playwright: defaults: run: @@ -659,8 +587,8 @@ jobs: group: huge-runners timeout-minutes: 40 env: - INFRAHUB_DB_TYPE: memgraph - INFRAHUB_ADDRESS: "http://localhost:8000" + INFRAHUB_DB_TYPE: neo4j + METRICS_ENDPOINT: ${{ secrets.METRICS_ENDPOINT }} steps: - name: Check out repository code uses: actions/checkout@v4 @@ -676,7 +604,27 @@ jobs: run: pip install toml invoke - name: Select infrahub port - run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV + - name: Select infrahub db port + run: echo "INFRAHUB_DB_BACKUP_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV + - name: Select vmagent port + run: echo "VMAGENT_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV + - name: Set job name + run: echo JOB_NAME="$GITHUB_JOB" >> $GITHUB_ENV + + - name: Enable tracing + run: echo "INFRAHUB_TRACE_ENABLE=true" >> $GITHUB_ENV + - name: Set tracing configuration + run: echo "INFRAHUB_TRACE_INSECURE=false" >> $GITHUB_ENV + - name: Set tracing configuration + run: echo "INFRAHUB_TRACE_EXPORTER_TYPE=otlp" >> $GITHUB_ENV + - name: Set tracing configuration + run: echo "INFRAHUB_TRACE_EXPORTER_ENDPOINT=${{ secrets.TRACING_ENDPOINT }}" >> $GITHUB_ENV + - name: Set tracing configuration + run: echo "OTEL_RESOURCE_ATTRIBUTES=github.run_id=${GITHUB_RUN_ID}" >> $GITHUB_ENV + + - name: "Store start time" + run: echo TEST_START_TIME=$(date +%s)000 >> $GITHUB_ENV - name: "Set environment variables" run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV @@ -686,26 +634,26 @@ jobs: run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: Build Demo - run: invoke demo.build + run: invoke dev.build - name: Pull External Docker Images - run: invoke demo.pull + run: invoke dev.pull - name: Initialize Demo id: init-demo - run: invoke demo.start demo.load-infra-schema + run: invoke dev.start dev.load-infra-schema - name: Check Demo Status - run: invoke demo.status + run: invoke dev.status - name: Load Data - run: invoke demo.load-infra-data + run: invoke dev.load-infra-data - name: Set infrahub address run: echo "INFRAHUB_ADDRESS=http://localhost:${INFRAHUB_SERVER_PORT}" >> $GITHUB_ENV - name: Git Repository - run: invoke demo.infra-git-import demo.infra-git-create + run: invoke dev.infra-git-import dev.infra-git-create - name: Install frontend dependencies run: npm install @@ -713,10 +661,26 @@ jobs: - name: Install Playwright Browsers run: npx playwright install chromium + # Make chromium ignore netlink messages by returning HandleMessage early + - name: Chrome path + run: echo CHROME_BIN_PATH="$(npx playwright install chromium --dry-run | grep Install | awk '{print $3}')/chrome-linux/chrome" >> $GITHUB_ENV + - name: Chrome func offset + run: echo FUNC_OFFSET="$(objdump -C --file-offsets --disassemble='net::internal::AddressTrackerLinux::HandleMessage(char const*, int, bool*, bool*, bool*)' $CHROME_BIN_PATH | grep 'File Offset' | sed -n 1p | sed -E 's/.*File Offset. (.*)\).*/\1/')" >> $GITHUB_ENV + - name: Patch chromium + run: printf '\xc3' | dd of=$CHROME_BIN_PATH bs=1 seek=$(($FUNC_OFFSET)) conv=notrunc + - name: Run Playwright tests run: npm run ci:test:e2e - env: - BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_E2E_PLAYWRIGHT }} + + - name: Generate tracing spans + if: always() + uses: inception-health/otel-upload-test-artifact-action@v1 + with: + jobName: "E2E-testing-playwright" + stepName: "Run Playwright tests" + path: "frontend/playwright-junit.xml" + type: "junit" + githubToken: ${{ secrets.GH_TRACING_REPO_TOKEN }} - name: playwright-report if: always() @@ -748,6 +712,18 @@ jobs: - name: Display server status if: always() run: invoke demo.status + + - name: "Clear docker environment and force vmagent to stop" + if: always() + run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local + + - name: "Wait for vmagent to push metrics" + if: always() + run: "sleep 10" + - name: "Show graph URL" + if: always() + run: 'echo "https://grafana-prod.tailc018d.ts.net/d/a4461039-bb27-4f57-9b2a-2c7f4e0a3459/e2e-tests?orgId=1&var-pr=$GITHUB_PR_NUMBER&var-job=$JOB_NAME&var-runner=$INFRAHUB_BUILD_NAME&from=$TEST_START_TIME&to=$(date +%s)000"' + # ------------------------------------------ Benchmarks ------------------------------------------------ backend-benchmark: needs: @@ -755,10 +731,11 @@ jobs: - files-changed - yaml-lint - python-lint - if: | - always() && !cancelled() && - !contains(needs.*.result, 'failure') && - !contains(needs.*.result, 'cancelled') + if: false # | + # always() && !cancelled() && + # !contains(needs.*.result, 'failure') && + # !contains(needs.*.result, 'cancelled') && + # needs.files-changed.outputs.backend == 'true' runs-on: group: huge-runners env: @@ -781,7 +758,7 @@ jobs: run: "poetry install" - name: Select infrahub db port - run: echo "INFRAHUB_DB_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "INFRAHUB_DB_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV - name: "Set environment variables" run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV @@ -794,7 +771,7 @@ jobs: run: mv development/docker-compose.dev-override-benchmark.yml development/docker-compose.dev-override.yml - name: Start dependencies - run: invoke demo.dev-start + run: invoke dev.deps - name: Update PATH run: "echo ~/.cargo/bin >> $GITHUB_PATH" @@ -840,6 +817,7 @@ jobs: # allowed-conclusions: success,skipped,cancelled,failure - uses: coverallsapp/github-action@v2 + continue-on-error: true env: COVERALLS_SERVICE_NUMBER: ${{ github.sha }} with: diff --git a/.github/workflows/otel-export-trace.yml b/.github/workflows/otel-export-trace.yml new file mode 100644 index 0000000000..68d5a65891 --- /dev/null +++ b/.github/workflows/otel-export-trace.yml @@ -0,0 +1,25 @@ +--- +# yamllint disable rule:truthy rule:truthy rule:line-length +name: OpenTelemetry Export Trace + +on: + workflow_run: + workflows: + - "CI" + types: + - completed + +jobs: + otel-export-trace: + name: OpenTelemetry Export Trace + runs-on: + group: huge-runners + steps: + - name: Export Workflow Trace + uses: inception-health/otel-export-trace-action@v1 + with: + otlpEndpoint: ${{ secrets.TRACING_ENDPOINT }} + otlpHeaders: "" + otelServiceName: CI + githubToken: ${{ secrets.GH_TRACING_REPO_TOKEN }} + runId: ${{ github.event.workflow_run.id }} diff --git a/.github/workflows/publish-helm-chart.yml b/.github/workflows/publish-helm-chart.yml new file mode 100644 index 0000000000..e9b21f49b7 --- /dev/null +++ b/.github/workflows/publish-helm-chart.yml @@ -0,0 +1,83 @@ +--- +# yamllint disable rule:truthy +name: Publish Helm Chart + +on: + workflow_dispatch: + inputs: + publish: + type: boolean + description: Wether to publish the Chart to Infrahub Private Registry + required: false + default: false + version: + type: string + required: false + description: The string to extract semver labels from. + default: '' + workflow_call: + inputs: + publish: + type: boolean + description: Wether to publish the Chart to Infrahub Private Registry + required: false + default: false + version: + type: string + required: false + description: The string to extract semver labels from. + default: '' + +jobs: + publish-helm-chart: + runs-on: ubuntu-22.04 + + steps: + - name: Check out the repo + uses: actions/checkout@v4 + + - name: Install Helm + uses: azure/setup-helm@v4.2.0 + + - name: Install Helm Push Plugin + run: helm plugin install https://github.com/chartmuseum/helm-push + + - name: Determine Chart Version + id: version + run: | + if [[ "${{ inputs.version }}" == "" ]]; then + VERSION=$(helm show chart ./helm | grep '^version:' | cut -d ' ' -f2) + echo "No version input provided, using Chart.yaml version: $VERSION" + else + VERSION="${{ inputs.version }}" + echo "Using input version: $VERSION" + fi + echo "VERSION=$VERSION" >> $GITHUB_OUTPUT + + - name: Login to Helm Registry + run: > + helm registry login ${{ vars.HARBOR_HOST }} + --username '${{ secrets.HARBOR_USERNAME }}' + --password '${{ secrets.HARBOR_PASSWORD }}' + + - name: Add Helm repository + run: helm repo add opsmill https://${{ vars.HARBOR_HOST }}/chartrepo/opsmill + + - name: Package Helm Chart + run: | + helm package --dependency-update ./helm --version ${{ steps.version.outputs.version }} + + - name: Push Helm Chart to OCI Registry + if: ${{ inputs.publish }} + run: | + helm push infrahub-${{ steps.version.outputs.version }}.tgz oci://${{ vars.HARBOR_HOST }}/opsmill/chart + + - name: Push Helm Chart to traditional repository + if: ${{ inputs.publish }} + run: > + helm cm-push -f infrahub-${{ steps.version.outputs.version }}.tgz opsmill + --username '${{ secrets.HARBOR_USERNAME }}' + --password '${{ secrets.HARBOR_PASSWORD }}' + + - name: Cleanup + run: rm -f infrahub-${{ steps.version.outputs.version }}.tgz diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3949270d3c..2064a33ccc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,7 +14,7 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} steps: - - name: Set docker image meta data + - name: Set docker image metadata id: meta uses: docker/metadata-action@v5 with: @@ -27,6 +27,16 @@ jobs: labels: | org.opencontainers.image.source=${{ github.repository }} + # Commented until we agree to link Infrahub version and chart version + # extract-version: + # runs-on: ubuntu-22.04 + # outputs: + # version: ${{ steps.extract_version.outputs.version }} + # steps: + # - name: Extract version from tag + # id: extract_version + # run: echo "version=${GITHUB_REF_NAME/infrahub-v/}" >> $GITHUB_OUTPUT + publish-docker-image: uses: ./.github/workflows/ci-docker-image.yml secrets: inherit @@ -37,3 +47,11 @@ jobs: ref: ${{ github.sha }} tags: ${{ needs.meta_data.outputs.tags }} labels: ${{ needs.meta_data.outputs.labels }} + + publish-helm-chart: + uses: ./.github/workflows/publish-helm-chart.yml + secrets: inherit + needs: extract-version + with: + publish: true + # version: ${{ needs.extract-version.outputs.version }} diff --git a/.github/workflows/scale-tests.yml b/.github/workflows/scale-tests.yml index ab2792b9b7..3a64aa2cc8 100644 --- a/.github/workflows/scale-tests.yml +++ b/.github/workflows/scale-tests.yml @@ -30,20 +30,110 @@ env: jobs: scale-tests: strategy: + fail-fast: false + max-parallel: 4 matrix: include: - - node-amount: 1000 + - name: 1000 nodes + node-amount: 1000 attrs-amount: 0 rels-amount: 0 - - node-amount: 10 + changes-amount: 0 + stager: stage_infranode + - name: 10000 nodes + node-amount: 10000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_infranode + - name: 100000 nodes + node-amount: 100000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_infranode + - name: 100 attrs + node-amount: 10 attrs-amount: 100 rels-amount: 0 - - node-amount: 10 + changes-amount: 0 + stager: stage_infranode + - name: 1000 attrs + node-amount: 10 + attrs-amount: 1000 + rels-amount: 0 + changes-amount: 0 + stager: stage_infranode + - name: 100 rels + node-amount: 10 attrs-amount: 0 rels-amount: 100 + changes-amount: 0 + stager: stage_infranode + - name: 1000 rels + node-amount: 10 + attrs-amount: 0 + rels-amount: 1000 + changes-amount: 0 + stager: stage_infranode + - name: 100 branches (with new nodes) + node-amount: 100 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch + - name: 100 branches (with single node) + node-amount: 100 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch_update + - name: 1000 branches (with new nodes) + node-amount: 1000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch + - name: 1000 branches (with single node) + node-amount: 1000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch_update + - name: 1000 updates of a single node + node-amount: 1 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 1000 + stager: stage_infranode + - name: 100 updates of a single node + node-amount: 1 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 100 + stager: stage_infranode + - name: 100 updates of 100 nodes + node-amount: 100 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 100 + stager: stage_infranode + - name: 1000 changes in diff + node-amount: 1000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch_diff + - name: 10000 changes in diff + node-amount: 10000 + attrs-amount: 0 + rels-amount: 0 + changes-amount: 0 + stager: stage_branch_diff + name: ${{ matrix.name }} runs-on: group: huge-runners - timeout-minutes: 60 + timeout-minutes: 300 env: INFRAHUB_DB_TYPE: neo4j METRICS_ENDPOINT: ${{ secrets.METRICS_ENDPOINT }} @@ -71,28 +161,36 @@ jobs: run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local - name: Select infrahub port - run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV - name: Set INFRAHUB_URL run: echo "INFRAHUB_URL=http://localhost:${INFRAHUB_SERVER_PORT}" >> $GITHUB_ENV - name: Select infrahub db port - run: echo "INFRAHUB_DB_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "INFRAHUB_DB_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV - name: Select infrahub db port - run: echo "INFRAHUB_DB_BACKUP_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "INFRAHUB_DB_BACKUP_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV - name: Select vmagent port - run: echo "VMAGENT_PORT=$(shuf -n 1 -i 10000-60000)" >> $GITHUB_ENV + run: echo "VMAGENT_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV + - name: Set job name + run: echo "JOB_NAME=${{ matrix.name }}" >> $GITHUB_ENV - name: "Start scale environment" run: "poetry run invoke backend.test-scale-env-start -g 1" - name: "Store start time" run: echo TEST_START_TIME=$(date +%s)000 >> $GITHUB_ENV - name: "Run test" - run: "poetry run invoke backend.test-scale --amount ${{ matrix.node-amount }} --attrs ${{ matrix.attrs-amount }} --rels ${{ matrix.rels-amount }} -s backend/tests/scale/schema.yml --stager stage_infranode" + run: "poetry run invoke backend.test-scale --amount ${{ matrix.node-amount }} --attrs ${{ matrix.attrs-amount }} --rels ${{ matrix.rels-amount }} --changes ${{ matrix.changes-amount }} -s backend/tests/scale/schema.yml --stager ${{ matrix.stager }}" - name: "Push metrics" + if: always() run: 'curl -v --data-binary @metrics.csv "http://localhost:${VMAGENT_PORT}/api/v1/import/csv?format=1:metric:scale_test_response_time,2:time:unix_ms,3:label:operation_name,4:label:stage,5:label:node_amount,6:label:attrs_amount,7:label:rels_amount"' - name: "Wait for vmagent to push metrics" + if: always() run: "sleep 10" - name: "Show graph URL" - run: 'echo "https://grafana-prod.tailc018d.ts.net/d/ebf7ec72-db79-4fb7-9b46-4621ca9c407a/scale-tests?orgId=1&var-run_id=$GITHUB_RUN_ID&var-stage=test&var-node_amount=${{ matrix.node-amount }}&var-attrs_amount=${{ matrix.attrs-amount }}&var-rels_amount=${{ matrix.rels-amount }}&var-runner=$INFRAHUB_BUILD_NAME&from=$TEST_START_TIME&to=$(date +%s)000"' + if: always() + run: 'echo "https://grafana-prod.tailc018d.ts.net/d/ebf7ec72-db79-4fb7-9b46-4621ca9c407a/scale-tests?orgId=1&var-run_id=$GITHUB_RUN_ID&var-job=$(echo $JOB_NAME | sed ''s/ /%20/g'')&var-stage=test&var-node_amount=${{ matrix.node-amount }}&var-attrs_amount=${{ matrix.attrs-amount }}&var-rels_amount=${{ matrix.rels-amount }}&var-runner=$INFRAHUB_BUILD_NAME&from=$TEST_START_TIME&to=$(date +%s)000"' + - name: Display server logs + if: always() + run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-server-1" - name: "Destroy scale environment" if: always() run: "invoke backend.test-scale-env-destroy" diff --git a/.github/workflows/update-compose-file.yml b/.github/workflows/update-compose-file.yml new file mode 100644 index 0000000000..157f3ea24f --- /dev/null +++ b/.github/workflows/update-compose-file.yml @@ -0,0 +1,44 @@ +--- +# yamllint disable rule:truthy +name: Update Docker Compose on Pyproject update in Stable + +# This will bump the infrahub docker image in the docker-compose.yml +# when pyproject.toml is change in the stable branch + +on: + push: + branches: + - stable + paths: + - 'pyproject.toml' + +jobs: + update-docker-compose: + runs-on: ubuntu-latest + + steps: + - name: "Check out repository code" + uses: "actions/checkout@v4" + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: "Setup environment" + run: "pip install invoke toml" + + - name: Update docker-compose.yml + run: | + VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])") + sed -i'' -e "s|\(image: \)\(.*\)/infrahub:.*\"|\1\2/infrahub:\${VERSION:-$VERSION}\"|g" docker-compose.yml + + - name: Commit docker-compose.yml + uses: github-actions-x/commit@v2.9 + with: + github-token: ${{ secrets.GH_INFRAHUB_BOT_TOKEN }} + push-branch: 'stable' + commit-message: 'chore: update docker-compose' + files: | + docker-compose.yml + name: opsmill-bot + email: github-bot@opsmill.com + rebase: true diff --git a/.github/workflows/version-upgrade.yml b/.github/workflows/version-upgrade.yml new file mode 100644 index 0000000000..ef63a7f8a8 --- /dev/null +++ b/.github/workflows/version-upgrade.yml @@ -0,0 +1,148 @@ +--- +# yamllint disable rule:truthy rule:truthy rule:line-length +name: "Infrahub Version Upgrade testing" +on: + # schedule: + # - cron: "0 2 * * *" + workflow_dispatch: + inputs: + commit: + type: string + description: commit sha or branch name + default: '' + required: false + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + INFRAHUB_DB_USERNAME: neo4j + INFRAHUB_DB_PASSWORD: admin + INFRAHUB_DB_ADDRESS: localhost + INFRAHUB_DB_PORT: 7687 + INFRAHUB_DB_PROTOCOL: bolt + INFRAHUB_BROKER_ADDRESS: message-queue + INFRAHUB_LOG_LEVEL: CRITICAL + INFRAHUB_IMAGE_NAME: "opsmill/infrahub" + INFRAHUB_IMAGE_VER: "local" + +jobs: + migration-tests: + strategy: + fail-fast: false + max-parallel: 4 + matrix: + include: + - name: From 0.12 + source_version: 0.12.1 + name: ${{ matrix.name }} + runs-on: + group: huge-runners + timeout-minutes: 120 + env: + INFRAHUB_DB_TYPE: neo4j + steps: + - name: "Check out repository code" + uses: "actions/checkout@v4" + with: + ref: ${{ inputs.commit == '' && 'develop' || inputs.commit }} + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: "Setup python environment" + run: | + pipx install poetry + poetry config virtualenvs.prefer-active-python true + pip install invoke toml + - name: "Install Package" + run: "poetry install" + + - name: Install NodeJS + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'npm' + cache-dependency-path: package-lock.json + + - name: "Set environment variables" + run: echo INFRAHUB_BUILD_NAME=infrahub-${{ runner.name }} >> $GITHUB_ENV + - name: "Set environment variables" + run: echo INFRAHUB_IMAGE_VER=${{ matrix.source_version }} >> $GITHUB_ENV + - name: "Clear docker environment" + run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local + + - name: Select infrahub port + run: echo "INFRAHUB_SERVER_PORT=$(shuf -n 1 -i 10000-30000)" >> $GITHUB_ENV + - name: Set INFRAHUB_URL + run: echo "INFRAHUB_URL=http://localhost:${INFRAHUB_SERVER_PORT}" >> $GITHUB_ENV + - name: "Store start time" + run: echo TEST_START_TIME=$(date +%s)000 >> $GITHUB_ENV + + # Initialize the demo environment with the Source Version + - name: Pull External Docker Images + run: invoke demo.pull + - name: Initialize Demo + id: init-demo + run: invoke demo.start demo.load-infra-schema + - name: Check Demo Status + run: invoke demo.status + - name: Load Data + run: invoke demo.load-infra-data + - name: Git Repository + run: invoke demo.infra-git-import demo.infra-git-create + - name: Stop Demo + run: invoke demo.stop + + # Build the local version and run the migrations + - name: "Set environment variables" + run: echo INFRAHUB_IMAGE_VER=local-${{ runner.name }}-${{ github.sha }} >> $GITHUB_ENV + + - name: Build Demo + run: invoke dev.build + + - name: Run database migration + run: invoke dev.migrate + + - name: Start Demo + run: invoke dev.start + + # Execute the E2E tests with playwright + - name: Install frontend dependencies + run: npm install + + - name: Install Playwright Browsers + run: npx playwright install chromium + + - name: Run Playwright tests + run: npm run ci:test:e2e + + + - name: Containers after tests + if: always() + run: docker ps -a + + - name: Display server logs + if: always() + run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-server-1" + + - name: Display git 1 logs + if: always() + run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-1" + + - name: Display git 2 logs + if: always() + run: docker logs "${INFRAHUB_BUILD_NAME}-infrahub-git-2" + + - name: Display database logs + if: always() + run: docker logs "${INFRAHUB_BUILD_NAME}-database-1" + + - name: Display server status + if: always() + run: invoke demo.status + + - name: "Clear docker environment and force vmagent to stop" + if: always() + run: docker compose -p $INFRAHUB_BUILD_NAME down -v --remove-orphans --rmi local diff --git a/.gitignore b/.gitignore index a34b6e594d..62859ffca4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .coverage coverage.xml +*junit* *.pyc *.env script.py @@ -18,11 +19,7 @@ development/docker-compose.dev-override.yml .direnv/ .envrc -# Retype App -.retype/* -docs/.retype docs/build -retype.manifest storage/* .coverage.* @@ -31,3 +28,7 @@ python_sdk/dist/* # Test reports **/*.csv + +# Generated files +generated/ +query_performance_results/ diff --git a/.vale/styles/Infrahub/sentence-case.yml b/.vale/styles/Infrahub/sentence-case.yml index aab3cb7bd0..7ff566fefd 100644 --- a/.vale/styles/Infrahub/sentence-case.yml +++ b/.vale/styles/Infrahub/sentence-case.yml @@ -29,6 +29,8 @@ exceptions: - Helm - Infrahub - infrahubctl + - IP + - IPAM - JavaScript - Jinja - Jinja2 diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt index a21e42b21e..e58f5e189d 100644 --- a/.vale/styles/spelling-exceptions.txt +++ b/.vale/styles/spelling-exceptions.txt @@ -3,14 +3,19 @@ APIs artifact_definitions artifact_name async -autoflake +boolean check_definitions class_name codespaces config +Config content_type +convert_query_response +CoreRepository cypher +Dagster datastore +default_branch dev devcontainer direnv @@ -25,18 +30,23 @@ eslint excalidraw fanout file_path +generator_definition +generator_definitions github graphene graphiql graphql greymatter +idempotency include_in_menu Infrahub Infrahub's infrahubctl +IP IPAddress IPHost IPNetwork +IPAM isort jinja Jotai @@ -44,22 +54,31 @@ kbps markdownlint memgraph menu_placement +modularization namespace namespaces +Nautobot +Netbox npm o'brian openconfig opentelemetry PyPI rebase +rebased repo REST +ressources +schema_mapping sdk +subnet template_path toml uncheck +validator +upsert validators +Version Control Vitest Yaml yamllint - diff --git a/.yamllint.yml b/.yamllint.yml index b1b4a0acb3..3a57423c9f 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -5,9 +5,8 @@ ignore: | /.venv /examples /repositories - /frontend/node_modules /frontend/playwright-report - /node_modules + **/node_modules # https://github.com/sbaudoin/yamllint/issues/16 /helm/templates diff --git a/backend/infrahub/api/artifact.py b/backend/infrahub/api/artifact.py index d0624441b7..dc7136a29f 100644 --- a/backend/infrahub/api/artifact.py +++ b/backend/infrahub/api/artifact.py @@ -43,7 +43,7 @@ async def get_artifact( content = await registry.storage.retrieve(identifier=artifact.storage_id.value) - return Response(content=content, headers={"Content-Type": artifact.content_type.value}) + return Response(content=content, headers={"Content-Type": artifact.content_type.value.value}) @router.post("/generate/{artifact_definition_id:str}") diff --git a/backend/infrahub/api/dependencies.py b/backend/infrahub/api/dependencies.py index d35d5bfbc5..42609f8ea0 100644 --- a/backend/infrahub/api/dependencies.py +++ b/backend/infrahub/api/dependencies.py @@ -8,8 +8,8 @@ from infrahub import config from infrahub.auth import AccountSession, authentication_token, validate_jwt_access_token, validate_jwt_refresh_token -from infrahub.core import get_branch from infrahub.core.branch import Branch # noqa: TCH001 +from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase # noqa: TCH001 from infrahub.exceptions import AuthorizationError, PermissionDeniedError @@ -84,7 +84,7 @@ async def get_branch_params( branch_name: Optional[str] = Query(None, alias="branch", description="Name of the branch to use for the query"), at: Optional[str] = Query(None, description="Time to use for the query, in absolute or relative format"), ) -> BranchParams: - branch = await get_branch(db=db, branch=branch_name) + branch = await registry.get_branch(db=db, branch=branch_name) at = Timestamp(at) return BranchParams(branch=branch, at=at) @@ -94,7 +94,7 @@ async def get_branch_dep( db: InfrahubDatabase = Depends(get_db), branch_name: Optional[str] = Query(None, alias="branch", description="Name of the branch to use for the query"), ) -> Branch: - return await get_branch(db=db, branch=branch_name) + return await registry.get_branch(db=db, branch=branch_name) async def get_current_user( diff --git a/backend/infrahub/api/diff/diff.py b/backend/infrahub/api/diff/diff.py index 16b4d35274..00b869a4a5 100644 --- a/backend/infrahub/api/diff/diff.py +++ b/backend/infrahub/api/diff/diff.py @@ -150,7 +150,7 @@ async def get_diff_artifacts( targets = await registry.manager.query( db=db, - schema="CoreArtifactTarget", + schema=InfrahubKind.ARTIFACTTARGET, filters={"artifacts__ids": artifact_ids_branch}, prefetch_relationships=True, branch=branch, @@ -159,7 +159,7 @@ async def get_diff_artifacts( if only_in_main: targets_in_main = await registry.manager.query( db=db, - schema="CoreArtifactTarget", + schema=InfrahubKind.ARTIFACTTARGET, filters={"artifacts__ids": only_in_main}, prefetch_relationships=True, branch=default_branch_name, diff --git a/backend/infrahub/api/exception_handlers.py b/backend/infrahub/api/exception_handlers.py index 4521cd9d64..4d37415d00 100644 --- a/backend/infrahub/api/exception_handlers.py +++ b/backend/infrahub/api/exception_handlers.py @@ -1,10 +1,12 @@ +from typing import Any + from fastapi.responses import JSONResponse from pydantic.v1 import ValidationError from infrahub.exceptions import Error -async def generic_api_exception_handler(_, exc: Exception, http_code: int = 500) -> JSONResponse: +async def generic_api_exception_handler(_: Any, exc: Exception, http_code: int = 500) -> JSONResponse: """Generic API Exception handler.""" if isinstance(exc, Error): if exc.HTTP_CODE: diff --git a/backend/infrahub/api/internal.py b/backend/infrahub/api/internal.py index da8c5857b8..d6e7f4a2d4 100644 --- a/backend/infrahub/api/internal.py +++ b/backend/infrahub/api/internal.py @@ -1,7 +1,7 @@ -import json import re from typing import List, Optional +import ujson from fastapi import APIRouter from lunr.index import Index from pydantic import BaseModel @@ -82,7 +82,7 @@ def _load_json(self) -> None: try: with open(config.SETTINGS.main.docs_index_path, "r", encoding="utf-8") as f: - search_index = json.loads(f.read()) + search_index = ujson.loads(f.read()) self._title_documents = search_index[0]["documents"] heading_json = search_index[1] self._heading_documents = heading_json["documents"] diff --git a/backend/infrahub/api/menu.py b/backend/infrahub/api/menu.py index fc4055d328..6fba769f24 100644 --- a/backend/infrahub/api/menu.py +++ b/backend/infrahub/api/menu.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, List, Union +from typing import TYPE_CHECKING, Dict, List from fastapi import APIRouter, Depends from pydantic import BaseModel, Field @@ -9,9 +9,12 @@ from infrahub.core import registry from infrahub.core.branch import Branch # noqa: TCH001 from infrahub.core.constants import InfrahubKind -from infrahub.core.schema import GenericSchema, NodeSchema +from infrahub.core.schema import NodeSchema from infrahub.log import get_logger +if TYPE_CHECKING: + from infrahub.core.schema import MainSchemaTypes + log = get_logger() router = APIRouter(prefix="/menu") @@ -44,7 +47,7 @@ def add_to_menu(structure: Dict[str, List[InterfaceMenu]], menu_item: InterfaceM menu_item.children.insert(0, all_items) -def _extract_node_icon(model: Union[NodeSchema, GenericSchema]) -> str: +def _extract_node_icon(model: MainSchemaTypes) -> str: if not model.icon: return "" return model.icon @@ -64,23 +67,36 @@ async def get_menu( structure: Dict[str, List[InterfaceMenu]] = {} - groups = InterfaceMenu( - title="Groups", + ipam = InterfaceMenu( + title="IPAM", + children=[ + InterfaceMenu( + title="Namespaces", + path=f"/objects/{InfrahubKind.IPNAMESPACE}", + icon=_extract_node_icon(full_schema[InfrahubKind.IPNAMESPACE]), + ), + InterfaceMenu( + title="Prefixes", path="/ipam/prefixes", icon=_extract_node_icon(full_schema[InfrahubKind.IPPREFIX]) + ), + InterfaceMenu( + title="IP Addresses", + path="/ipam/addresses?ipam-tab=ip-details", + icon=_extract_node_icon(full_schema[InfrahubKind.IPADDRESS]), + ), + ], ) + + has_ipam = False + for key in full_schema.keys(): model = full_schema[key] - if not model.include_in_menu: - continue + if isinstance(model, NodeSchema) and ( + InfrahubKind.IPADDRESS in model.inherit_from or InfrahubKind.IPPREFIX in model.inherit_from + ): + has_ipam = True - if isinstance(model, NodeSchema) and InfrahubKind.GENERICGROUP in model.inherit_from: - groups.children.append( - InterfaceMenu( - title=model.menu_title, - path=f"/objects/{model.kind}", - icon=_extract_node_icon(full_schema[InfrahubKind.GENERICGROUP]), - ) - ) + if not model.include_in_menu: continue menu_name = model.menu_placement or "base" @@ -98,14 +114,20 @@ async def get_menu( objects.children.append(menu_item) objects.children.sort() - groups.children.sort() - groups.children.insert( - 0, - InterfaceMenu( - title="All Groups", - path="/objects/CoreGroup", - icon=_extract_node_icon(full_schema[InfrahubKind.GENERICGROUP]), - ), + groups = InterfaceMenu( + title="Groups & Profiles", + children=[ + InterfaceMenu( + title="All Groups", + path=f"/objects/{InfrahubKind.GENERICGROUP}", + icon=_extract_node_icon(full_schema[InfrahubKind.GENERICGROUP]), + ), + InterfaceMenu( + title="All Profiles", + path=f"/objects/{InfrahubKind.PROFILE}", + icon=_extract_node_icon(full_schema[InfrahubKind.PROFILE]), + ), + ], ) unified_storage = InterfaceMenu( title="Unified Storage", @@ -128,7 +150,6 @@ async def get_menu( ), ], ) - change_control = InterfaceMenu( title="Change Control", children=[ @@ -159,6 +180,16 @@ async def get_menu( path=f"/objects/{InfrahubKind.ARTIFACTDEFINITION}", icon=_extract_node_icon(full_schema[InfrahubKind.ARTIFACTDEFINITION]), ), + InterfaceMenu( + title="Generator Definition", + path=f"/objects/{InfrahubKind.GENERATORDEFINITION}", + icon=_extract_node_icon(full_schema[InfrahubKind.GENERATORDEFINITION]), + ), + InterfaceMenu( + title="Generator Instance", + path=f"/objects/{InfrahubKind.GENERATORINSTANCE}", + icon=_extract_node_icon(full_schema[InfrahubKind.GENERATORINSTANCE]), + ), InterfaceMenu( title="Transformation", path=f"/objects/{InfrahubKind.TRANSFORM}", @@ -192,4 +223,9 @@ async def get_menu( ], ) - return [objects, groups, unified_storage, change_control, deployment, admin] + menu_items = [objects] + if has_ipam: + menu_items.append(ipam) + menu_items.extend([groups, unified_storage, change_control, deployment, admin]) + + return menu_items diff --git a/backend/infrahub/api/schema.py b/backend/infrahub/api/schema.py index 1acf97dae8..c5fb8a39af 100644 --- a/backend/infrahub/api/schema.py +++ b/backend/infrahub/api/schema.py @@ -1,9 +1,9 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union from fastapi import APIRouter, BackgroundTasks, Depends, Query, Request -from pydantic import BaseModel, Field, model_validator +from pydantic import BaseModel, Field, computed_field, model_validator from starlette.responses import JSONResponse from infrahub import config, lock @@ -12,12 +12,12 @@ from infrahub.core import registry from infrahub.core.branch import Branch # noqa: TCH001 from infrahub.core.migrations.schema.runner import schema_migrations_runner -from infrahub.core.models import SchemaBranchHash # noqa: TCH001 -from infrahub.core.schema import GenericSchema, NodeSchema, SchemaRoot +from infrahub.core.models import SchemaBranchHash, SchemaDiff # noqa: TCH001 +from infrahub.core.schema import GenericSchema, MainSchemaTypes, NodeSchema, ProfileSchema, SchemaRoot from infrahub.core.schema_manager import SchemaBranch, SchemaNamespace, SchemaUpdateValidationResult # noqa: TCH001 from infrahub.core.validators.checker import schema_validators_checker from infrahub.database import InfrahubDatabase # noqa: TCH001 -from infrahub.exceptions import PermissionDeniedError +from infrahub.exceptions import MigrationError, PermissionDeniedError from infrahub.log import get_logger from infrahub.message_bus import Meta, messages from infrahub.services import services @@ -35,7 +35,7 @@ class APISchemaMixin: @classmethod - def from_schema(cls, schema: Union[NodeSchema, GenericSchema]) -> Self: + def from_schema(cls, schema: MainSchemaTypes) -> Self: data = schema.model_dump() data["relationships"] = [ relationship.model_dump() for relationship in schema.relationships if not relationship.internal_peer @@ -43,33 +43,34 @@ def from_schema(cls, schema: Union[NodeSchema, GenericSchema]) -> Self: data["hash"] = schema.get_hash() return cls(**data) + @model_validator(mode="before") + @classmethod + def set_kind(cls, values: Any) -> Any: + if isinstance(values, dict): + values["kind"] = f'{values["namespace"]}{values["name"]}' + return values + class APINodeSchema(NodeSchema, APISchemaMixin): api_kind: Optional[str] = Field(default=None, alias="kind", validate_default=True) hash: str - @model_validator(mode="before") - @classmethod - def set_kind(cls, values: Dict[str, Any]) -> Dict[str, Any]: - values["kind"] = f'{values["namespace"]}{values["name"]}' - return values - class APIGenericSchema(GenericSchema, APISchemaMixin): api_kind: Optional[str] = Field(default=None, alias="kind", validate_default=True) hash: str - @model_validator(mode="before") - @classmethod - def set_kind(cls, values: Dict[str, Any]) -> Dict[str, Any]: - values["kind"] = f'{values["namespace"]}{values["name"]}' - return values + +class APIProfileSchema(ProfileSchema, APISchemaMixin): + api_kind: Optional[str] = Field(default=None, alias="kind", validate_default=True) + hash: str class SchemaReadAPI(BaseModel): main: str = Field(description="Main hash for the entire schema") nodes: List[APINodeSchema] = Field(default_factory=list) generics: List[APIGenericSchema] = Field(default_factory=list) + profiles: List[APIProfileSchema] = Field(default_factory=list) namespaces: List[SchemaNamespace] = Field(default_factory=list) @@ -81,6 +82,17 @@ class SchemasLoadAPI(SchemaRoot): schemas: List[SchemaLoadAPI] +class SchemaUpdate(BaseModel): + hash: str = Field(..., description="The new hash for the entire schema") + previous_hash: str = Field(..., description="The previous hash for the entire schema") + diff: SchemaDiff = Field(..., description="The modifications to the schema") + + @computed_field + def schema_updated(self) -> bool: + """Indicates if the loading of the schema changed the existing schema""" + return self.hash != self.previous_hash + + def evaluate_candidate_schemas( branch_schema: SchemaBranch, schemas_to_evaluate: SchemasLoadAPI ) -> Tuple[SchemaBranch, SchemaUpdateValidationResult]: @@ -122,6 +134,11 @@ async def get_schema( for value in all_schemas if isinstance(value, GenericSchema) and value.namespace != "Internal" ], + profiles=[ + APIProfileSchema.from_schema(value) + for value in all_schemas + if isinstance(value, ProfileSchema) and value.namespace != "Internal" + ], namespaces=schema_branch.get_namespaces(), ) @@ -139,17 +156,20 @@ async def get_schema_summary( async def get_schema_by_kind( schema_kind: str, branch: Branch = Depends(get_branch_dep), -) -> Union[APINodeSchema, APIGenericSchema]: +) -> Union[APIProfileSchema, APINodeSchema, APIGenericSchema]: log.debug("schema_kind_request", branch=branch.name) - schema = registry.schema.get(name=schema_kind, branch=branch) + schema = registry.schema.get(name=schema_kind, branch=branch, duplicate=False) - api_schema: dict[str, type[Union[APINodeSchema, APIGenericSchema]]] = { + api_schema: dict[str, type[Union[APIProfileSchema, APINodeSchema, APIGenericSchema]]] = { + "profile": APIProfileSchema, "node": APINodeSchema, "generic": APIGenericSchema, } key = "" + if isinstance(schema, ProfileSchema): + key = "profile" if isinstance(schema, NodeSchema): key = "node" if isinstance(schema, GenericSchema): @@ -166,7 +186,7 @@ async def load_schema( db: InfrahubDatabase = Depends(get_db), branch: Branch = Depends(get_branch_dep), _: Any = Depends(get_current_user), -) -> JSONResponse: +) -> SchemaUpdate: service: InfrahubServices = request.app.state.service log.info("schema_load_request", branch=branch.name) @@ -179,11 +199,12 @@ async def load_schema( async with lock.registry.global_schema_lock(): branch_schema = registry.schema.get_schema_branch(name=branch.name) + original_hash = branch_schema.get_hash() candidate_schema, result = evaluate_candidate_schemas(branch_schema=branch_schema, schemas_to_evaluate=schemas) if not result.diff.all: - return JSONResponse(status_code=202, content={}) + return SchemaUpdate(hash=original_hash, previous_hash=original_hash, diff=result.diff) # ---------------------------------------------------------- # Validate if the new schema is valid with the content of the database @@ -216,6 +237,8 @@ async def load_schema( log.info("Branch converted to isolated mode because the schema has changed", branch=branch.name) await branch.save(db=dbt) + updated_branch = registry.schema.get_schema_branch(name=branch.name) + updated_hash = updated_branch.get_hash() # ---------------------------------------------------------- # Run the migrations @@ -228,7 +251,7 @@ async def load_schema( service=service, ) if error_messages: - return JSONResponse(status_code=500, content={"error": ",\n".join(error_messages)}) + raise MigrationError(message=",\n".join(error_messages)) if config.SETTINGS.broker.enable: message = messages.EventSchemaUpdate( @@ -237,7 +260,9 @@ async def load_schema( ) background_tasks.add_task(services.send, message) - return JSONResponse(status_code=202, content={}) + await service.component.refresh_schema_hash(branches=[branch.name]) + + return SchemaUpdate(hash=updated_hash, previous_hash=original_hash, diff=result.diff) @router.post("/check") diff --git a/backend/infrahub/auth.py b/backend/infrahub/auth.py index b946050227..e23b7abc6b 100644 --- a/backend/infrahub/auth.py +++ b/backend/infrahub/auth.py @@ -10,11 +10,11 @@ from pydantic.v1 import BaseModel from infrahub import config, models -from infrahub.core import get_branch from infrahub.core.account import validate_token from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager from infrahub.core.node import Node +from infrahub.core.registry import registry from infrahub.exceptions import AuthorizationError, NodeNotFoundError if TYPE_CHECKING: @@ -44,7 +44,7 @@ def read_only(self) -> bool: async def authenticate_with_password( db: InfrahubDatabase, credentials: models.PasswordCredential, branch: Optional[str] = None ) -> models.UserToken: - selected_branch = await get_branch(db=db, branch=branch) + selected_branch = await registry.get_branch(db=db, branch=branch) response = await NodeManager.query( schema=InfrahubKind.ACCOUNT, db=db, @@ -68,7 +68,7 @@ async def authenticate_with_password( refresh_expires = now + timedelta(seconds=config.SETTINGS.security.refresh_token_lifetime) session_id = await create_db_refresh_token(db=db, account_id=account.id, expiration=refresh_expires) - access_token = generate_access_token(account_id=account.id, role=account.role.value, session_id=session_id) + access_token = generate_access_token(account_id=account.id, role=account.role.value.value, session_id=session_id) refresh_token = generate_refresh_token(account_id=account.id, session_id=session_id, expiration=refresh_expires) return models.UserToken(access_token=access_token, refresh_token=refresh_token) @@ -88,7 +88,7 @@ async def create_db_refresh_token(db: InfrahubDatabase, account_id: str, expirat async def create_fresh_access_token( db: InfrahubDatabase, refresh_data: models.RefreshTokenData ) -> models.AccessTokenResponse: - selected_branch = await get_branch(db=db) + selected_branch = await registry.get_branch(db=db) refresh_token = await NodeManager.get_one( id=str(refresh_data.session_id), @@ -110,7 +110,7 @@ async def create_fresh_access_token( ) access_token = generate_access_token( - account_id=account.id, role=account.role.value, session_id=refresh_data.session_id + account_id=account.id, role=account.role.value.value, session_id=refresh_data.session_id ) return models.AccessTokenResponse(access_token=access_token) diff --git a/backend/infrahub/cli/__init__.py b/backend/infrahub/cli/__init__.py index 505e48d63d..b160666e44 100644 --- a/backend/infrahub/cli/__init__.py +++ b/backend/infrahub/cli/__init__.py @@ -5,7 +5,6 @@ from infrahub import config from infrahub.cli.db import app as db_app from infrahub.cli.events import app as events_app -from infrahub.cli.generate_schema import app as generate_schema_app from infrahub.cli.git_agent import app as git_app from infrahub.cli.server import app as server_app from infrahub.core.initialization import initialization @@ -19,7 +18,6 @@ app.add_typer(git_app, name="git-agent") app.add_typer(db_app, name="db") app.add_typer(events_app, name="events", help="Interact with the events system.") -app.add_typer(generate_schema_app, name="generate-schema") async def _init_shell(config_file: str) -> None: diff --git a/backend/infrahub/cli/db.py b/backend/infrahub/cli/db.py index d232adbbea..90e5869148 100644 --- a/backend/infrahub/cli/db.py +++ b/backend/infrahub/cli/db.py @@ -1,25 +1,35 @@ import importlib import logging -from asyncio import run as aiorun from enum import Enum from typing import Optional import typer +from infrahub_sdk.async_typer import AsyncTyper +from rich import print as rprint from rich.console import Console from rich.logging import RichHandler from rich.table import Table from infrahub import config +from infrahub.core import registry from infrahub.core.graph import GRAPH_VERSION from infrahub.core.graph.constraints import ConstraintManagerBase, ConstraintManagerMemgraph, ConstraintManagerNeo4j +from infrahub.core.graph.index import node_indexes, rel_indexes from infrahub.core.graph.schema import GRAPH_SCHEMA -from infrahub.core.initialization import first_time_initialization, get_root_node, initialization +from infrahub.core.initialization import first_time_initialization, get_root_node, initialization, initialize_registry from infrahub.core.migrations.graph import get_graph_migrations +from infrahub.core.migrations.schema.runner import schema_migrations_runner +from infrahub.core.schema import SchemaRoot, core_models, internal_schema +from infrahub.core.schema.definitions.deprecated import deprecated_models +from infrahub.core.schema_manager import SchemaManager from infrahub.core.utils import delete_all_nodes +from infrahub.core.validators.checker import schema_validators_checker from infrahub.database import DatabaseType, InfrahubDatabase, get_db from infrahub.log import get_logger +from infrahub.services import InfrahubServices +from infrahub.services.adapters.message_bus.local import BusSimulator -app = typer.Typer() +app = AsyncTyper() PERMISSIONS_AVAILABLE = ["read", "write", "admin"] @@ -30,6 +40,12 @@ class ConstraintAction(str, Enum): DROP = "drop" +class IndexAction(str, Enum): + SHOW = "show" + ADD = "add" + DROP = "drop" + + @app.callback() def callback() -> None: """ @@ -37,7 +53,12 @@ def callback() -> None: """ -async def _init() -> None: +@app.command() +async def init( + config_file: str = typer.Option( + "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub" + ), +) -> None: """Erase the content of the database and initialize it with the core schema.""" log = get_logger() @@ -48,6 +69,9 @@ async def _init() -> None: # TODO, if possible try to implement this in an idempotent way # -------------------------------------------------- + logging.getLogger("neo4j").setLevel(logging.ERROR) + config.load_and_exit(config_file_name=config_file) + dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) async with dbdriver.start_transaction() as db: log.info("Delete All Nodes") @@ -57,8 +81,17 @@ async def _init() -> None: await dbdriver.close() -async def _load_test_data(dataset: str) -> None: - """Load test data into the database from the test_data directory.""" +@app.command() +async def load_test_data( + config_file: str = typer.Option( + "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub" + ), + dataset: str = "dataset01", +) -> None: + """Load test data into the database from the `test_data` directory.""" + + logging.getLogger("neo4j").setLevel(logging.ERROR) + config.load_and_exit(config_file_name=config_file) dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) async with dbdriver.start_session() as db: @@ -76,20 +109,28 @@ async def _load_test_data(dataset: str) -> None: await dbdriver.close() -async def _migrate(check: bool) -> None: +@app.command() +async def migrate( + check: bool = typer.Option(False, help="Check the state of the database without applying the migrations."), + config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), +) -> None: + """Check the current format of the internal graph and apply the necessary migrations""" log = get_logger() + config.load_and_exit(config_file_name=config_file) + dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) async with dbdriver.start_session() as db: - log.info("Checking current state of the Database") + rprint("Checking current state of the Database") + await initialize_registry(db=db) root_node = await get_root_node(db=db) migrations = await get_graph_migrations(root=root_node) if not migrations: - log.info(f"Database up-to-date (v{root_node.graph_version}), no migration to execute.") + rprint(f"Database up-to-date (v{root_node.graph_version}), no migration to execute.") else: - log.info( + rprint( f"Database needs to be updated (v{root_node.graph_version} -> v{GRAPH_VERSION}), {len(migrations)} migrations pending" ) @@ -102,21 +143,139 @@ async def _migrate(check: bool) -> None: if execution_result.success: validation_result = await migration.validate_migration(db=db) if validation_result.success: - log.info(f"Migration: {migration.name} SUCCESS") + rprint(f"Migration: {migration.name} [green]SUCCESS[/green]") + root_node.graph_version = migration.minimum_version + 1 + await root_node.save(db=db) if not execution_result.success or validation_result and not validation_result.success: - log.info(f"Migration: {migration.name} FAILED") + rprint(f"Migration: {migration.name} [bold red]FAILED[/bold red]") for error in execution_result.errors: - log.warning(f" {error}") + rprint(f" {error}") if validation_result and not validation_result.success: for error in validation_result.errors: - log.warning(f" {error}") + rprint(f" {error}") break await dbdriver.close() -async def _constraint(action: ConstraintAction) -> None: +@app.command() +async def update_core_schema( # pylint: disable=too-many-statements + debug: bool = typer.Option(False, help="Enable advanced logging and troubleshooting"), + config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), +) -> None: + """Check the current format of the internal graph and apply the necessary migrations""" + logging.getLogger("infrahub").setLevel(logging.WARNING) + logging.getLogger("neo4j").setLevel(logging.ERROR) + config.load_and_exit(config_file_name=config_file) + + dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) + + error_badge = "[bold red]ERROR[/bold red]" + + async with dbdriver.start_session() as db: + # ---------------------------------------------------------- + # Initialize Schema and Registry + # ---------------------------------------------------------- + service = InfrahubServices(database=db, message_bus=BusSimulator(database=db)) + await initialize_registry(db=db) + + default_branch = registry.get_branch_from_registry(branch=registry.default_branch) + + registry.schema = SchemaManager() + schema = SchemaRoot(**internal_schema) + registry.schema.register_schema(schema=schema) + + # ---------------------------------------------------------- + # Load Current Schema from the database + # ---------------------------------------------------------- + schema_default_branch = await registry.schema.load_schema_from_db(db=db, branch=default_branch) + registry.schema.set_schema_branch(name=default_branch.name, schema=schema_default_branch) + branch_schema = registry.schema.get_schema_branch(name=registry.default_branch) + + candidate_schema = branch_schema.duplicate() + candidate_schema.load_schema(schema=SchemaRoot(**internal_schema)) + candidate_schema.load_schema(schema=SchemaRoot(**core_models)) + candidate_schema.load_schema(schema=SchemaRoot(**deprecated_models)) + candidate_schema.process() + + result = branch_schema.validate_update(other=candidate_schema) + if result.errors: + rprint(f"{error_badge} | Unable to update the schema, due to failed validations") + for error in result.errors: + rprint(error.to_string()) + raise typer.Exit(1) + + if not result.diff.all: + rprint("Core Schema Up to date, nothing to update") + raise typer.Exit(0) + + rprint("Core Schema has diff, will need to be updated") + if debug: + result.diff.print() + + # ---------------------------------------------------------- + # Validate if the new schema is valid with the content of the database + # ---------------------------------------------------------- + error_messages, _ = await schema_validators_checker( + branch=default_branch, schema=candidate_schema, constraints=result.constraints, service=service + ) + if error_messages: + rprint(f"{error_badge} | Unable to update the schema, due to failed validations") + for message in error_messages: + rprint(message) + raise typer.Exit(1) + + # ---------------------------------------------------------- + # Update the schema + # ---------------------------------------------------------- + origin_schema = branch_schema.duplicate() + + # Update the internal schema + schema_default_branch.load_schema(schema=SchemaRoot(**internal_schema)) + schema_default_branch.process() + registry.schema.set_schema_branch(name=default_branch.name, schema=schema_default_branch) + + async with db.start_transaction() as dbt: + await registry.schema.update_schema_branch( + schema=candidate_schema, + db=dbt, + branch=default_branch.name, + diff=result.diff, + limit=result.diff.all, + update_db=True, + ) + default_branch.update_schema_hash() + rprint("The Core Schema has been updated") + if debug: + rprint(f"New schema hash: {default_branch.active_schema_hash.main}") + await default_branch.save(db=dbt) + + # ---------------------------------------------------------- + # Run the migrations + # ---------------------------------------------------------- + error_messages = await schema_migrations_runner( + branch=default_branch, + new_schema=candidate_schema, + previous_schema=origin_schema, + migrations=result.migrations, + service=service, + ) + if error_messages: + rprint(f"{error_badge} | Some error(s) happened while running the schema migrations") + for message in error_messages: + rprint(message) + raise typer.Exit(1) + + +@app.command() +async def constraint( + action: ConstraintAction = typer.Argument(ConstraintAction.SHOW), + config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), +) -> None: + """Manage Database Constraints""" + config.load_and_exit(config_file_name=config_file) + dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) manager: Optional[ConstraintManagerBase] = None @@ -152,55 +311,38 @@ async def _constraint(action: ConstraintAction) -> None: @app.command() -def init( - config_file: str = typer.Option( - "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub" - ), -) -> None: - """Erase the content of the database and initialize it with the core schema.""" - - logging.getLogger("neo4j").setLevel(logging.ERROR) - - config.load_and_exit(config_file_name=config_file) - - aiorun(_init()) - - -@app.command() -def load_test_data( - config_file: str = typer.Option( - "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub" - ), - dataset: str = "dataset01", +async def index( + action: IndexAction = typer.Argument(IndexAction.SHOW), + config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), ) -> None: - """Load test data into the database from the `test_data` directory.""" - - logging.getLogger("neo4j").setLevel(logging.ERROR) - + """Manage Database Indexes""" config.load_and_exit(config_file_name=config_file) - aiorun(_load_test_data(dataset=dataset)) + dbdriver = InfrahubDatabase(driver=await get_db(retry=1)) + dbdriver.manager.index.init(nodes=node_indexes, rels=rel_indexes) + if action == IndexAction.ADD: + await dbdriver.manager.index.add() + elif action == IndexAction.DROP: + await dbdriver.manager.index.drop() -@app.command() -def migrate( - check: bool = typer.Option(False, help="Check the state of the database without applying the migrations."), - config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), -) -> None: - """Check the current format of the internal graph and apply the necessary migrations""" + indexes = await dbdriver.manager.index.list() - config.load_and_exit(config_file_name=config_file) + console = Console() - aiorun(_migrate(check=check)) + table = Table(title="Database Indexes") + table.add_column("Name", justify="right", style="cyan", no_wrap=True) + table.add_column("Label") + table.add_column("Property") + table.add_column("Type") + table.add_column("Entity Type") -@app.command() -def constraint( - action: ConstraintAction = typer.Argument(ConstraintAction.SHOW), - config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"), -) -> None: - """Manage Database Constraints""" + for item in indexes: + table.add_row( + item.name, item.label, ", ".join(item.properties), item.type.value.upper(), item.entity_type.value.upper() + ) - config.load_and_exit(config_file_name=config_file) + console.print(table) - aiorun(_constraint(action=action)) + await dbdriver.close() diff --git a/backend/infrahub/cli/events.py b/backend/infrahub/cli/events.py index 1b2d93fb25..0eff2f2a0c 100644 --- a/backend/infrahub/cli/events.py +++ b/backend/infrahub/cli/events.py @@ -1,24 +1,28 @@ import asyncio -import json -from asyncio import run as aiorun import typer +import ujson from aio_pika.abc import AbstractIncomingMessage +from infrahub_sdk.async_typer import AsyncTyper from rich import print as rprint from infrahub import config from infrahub.services import InfrahubServices from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus -app = typer.Typer() +app = AsyncTyper() async def print_event(event: AbstractIncomingMessage) -> None: - message = {"routing_key": event.routing_key, "message": json.loads(event.body)} + message = {"routing_key": event.routing_key, "message": ujson.loads(event.body)} rprint(message) -async def _listen(topic: str, config_file: str) -> None: +@app.command() +async def listen( + topic: str = "#", config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG") +) -> None: + """Listen to event in the Events bus and print them.""" config.load_and_exit(config_file) broker = RabbitMQMessageBus() service = InfrahubServices() @@ -32,9 +36,3 @@ async def _listen(topic: str, config_file: str) -> None: await queue.bind(exchange, routing_key=topic) print(f" Waiting for events matching the topic `{topic}`. To exit press CTRL+C") await asyncio.Future() - - -@app.command() -def listen(topic: str = "#", config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG")) -> None: - """Listen to event in the Events bus and print them.""" - aiorun(_listen(topic=topic, config_file=config_file)) diff --git a/backend/infrahub/cli/generate_schema.py b/backend/infrahub/cli/generate_schema.py deleted file mode 100644 index eb8c298cff..0000000000 --- a/backend/infrahub/cli/generate_schema.py +++ /dev/null @@ -1,31 +0,0 @@ -import json -from pathlib import Path - -import typer - -app = typer.Typer() - - -@app.callback() -def callback() -> None: - """ - Generate some Schema files used by Infrahub - """ - - -@app.command(name="schema") -def generate_schema( - output_file: Path = typer.Argument("infrahub_schema.schema.json"), -) -> None: - """Generate a the schema expected by infrahub for the schema `infrahubctl schema load`.""" - - from infrahub.api.schema import ( # pylint: disable=import-outside-toplevel - SchemaLoadAPI, - ) - - schema = SchemaLoadAPI.model_json_schema() - - schema["title"] = "InfrahubSchema" - - output_file.write_text(json.dumps(schema, indent=4)) - print(f"JSONSchema file saved in '{output_file}'") diff --git a/backend/infrahub/cli/git_agent.py b/backend/infrahub/cli/git_agent.py index 0b789b18ec..1e1301d7ce 100644 --- a/backend/infrahub/cli/git_agent.py +++ b/backend/infrahub/cli/git_agent.py @@ -1,15 +1,14 @@ +import asyncio import logging import signal -import sys -from asyncio import run as aiorun from typing import Any import typer -from infrahub_sdk import InfrahubClient +from infrahub_sdk import Config, InfrahubClient from prometheus_client import start_http_server from rich.logging import RichHandler -from infrahub import config +from infrahub import __version__, config from infrahub.components import ComponentType from infrahub.core.initialization import initialization from infrahub.database import InfrahubDatabase, get_db @@ -21,15 +20,17 @@ from infrahub.services import InfrahubServices from infrahub.services.adapters.cache.redis import RedisCache from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus +from infrahub.trace import configure_trace app = typer.Typer() log = get_logger() +shutdown_event = asyncio.Event() + def signal_handler(*args: Any, **kwargs: Any) -> None: # pylint: disable=unused-argument - print("Git Agent terminated by user.") - sys.exit(0) + shutdown_event.set() signal.signal(signal.SIGINT, signal_handler) @@ -63,9 +64,21 @@ async def _start(debug: bool, port: int) -> None: # initialize the Infrahub Client and query the list of branches to validate that the API is reacheable and the auth is working log.debug(f"Using Infrahub API at {config.SETTINGS.main.internal_address}") - client = await InfrahubClient.init(address=config.SETTINGS.main.internal_address, retry_on_failure=True, log=log) + client = InfrahubClient( + config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True, log=log) + ) await client.branch.all() + # Initialize trace + if config.SETTINGS.trace.enable: + configure_trace( + service="infrahub-git-agent", + version=__version__, + exporter_type=config.SETTINGS.trace.exporter_type, + exporter_endpoint=config.SETTINGS.trace.exporter_endpoint, + exporter_protocol=config.SETTINGS.trace.exporter_protocol, + ) + # Initialize the lock initialize_lock() @@ -83,11 +96,19 @@ async def _start(debug: bool, port: int) -> None: async with service.database.start_session() as db: await initialization(db=db) + await service.component.refresh_schema_hash() + await initialize_git_agent(service=service) build_component_registry() - await service.message_bus.subscribe() + while not shutdown_event.is_set(): + await asyncio.sleep(1) + + log.info("Shutdown of Git agent requested") + + await service.shutdown() + log.info("All services stopped") @app.command() @@ -110,4 +131,4 @@ def start( config.load_and_exit(config_file_name=config_file) - aiorun(_start(debug=debug, port=port)) + asyncio.run(_start(debug=debug, port=port)) diff --git a/backend/infrahub/config.py b/backend/infrahub/config.py index 9fbd7b57e4..ea0c6d1c17 100644 --- a/backend/infrahub/config.py +++ b/backend/infrahub/config.py @@ -25,6 +25,14 @@ THIRTY_DAYS_IN_SECONDS = 3600 * 24 * 30 +def default_cors_allow_methods() -> List[str]: + return ["DELETE", "GET", "OPTIONS", "PATCH", "POST", "PUT"] + + +def default_cors_allow_headers() -> List[str]: + return ["accept", "authorization", "content-type", "user-agent", "x-csrftoken", "x-requested-with"] + + class StorageDriver(str, Enum): FileSystemStorage = "local" InfrahubS3ObjectStorage = "s3" @@ -150,6 +158,9 @@ class BrokerSettings(BaseSettings): maximum_message_retries: int = Field( default=10, description="The maximum number of retries that are attempted for failed messages" ) + maximum_concurrent_messages: int = Field( + default=2, description="The maximum number of concurrent messages fetched by each worker", ge=1 + ) virtualhost: str = Field(default="/", description="The virtual host to connect to") @property @@ -174,10 +185,21 @@ def service_port(self) -> int: class ApiSettings(BaseSettings): - cors_allow_origins: List[str] = ["*"] - cors_allow_credentials: bool = True - cors_allow_methods: List[str] = ["*"] - cors_allow_headers: List[str] = ["*"] + model_config = SettingsConfigDict(env_prefix="INFRAHUB_API_") + cors_allow_origins: List[str] = Field( + default_factory=list, description="A list of origins that are authorized to make cross-site HTTP requests" + ) + cors_allow_methods: List[str] = Field( + default_factory=default_cors_allow_methods, + description="A list of HTTP verbs that are allowed for the actual request", + ) + cors_allow_headers: List[str] = Field( + default_factory=default_cors_allow_headers, + description="The list of non-standard HTTP headers allowed in requests from the browser", + ) + cors_allow_credentials: bool = Field( + default=True, description="If True, cookies will be allowed to be included in cross-site HTTP requests" + ) class GitSettings(BaseSettings): @@ -196,6 +218,7 @@ class InitialSettings(BaseSettings): class MiscellaneousSettings(BaseSettings): + model_config = SettingsConfigDict(env_prefix="INFRAHUB_MISC_") print_query_details: bool = False start_background_runner: bool = True maximum_validator_execution_time: int = Field( @@ -256,35 +279,6 @@ class TraceSettings(BaseSettings): default=TraceTransportProtocol.GRPC, description="Protocol to be used for exporting traces" ) exporter_endpoint: Optional[str] = Field(default=None, description="OTLP endpoint for exporting traces") - exporter_port: Optional[int] = Field( - default=None, ge=1, le=65535, description="Specified if running on a non default port (4317)" - ) - - @property - def service_port(self) -> int: - if self.exporter_protocol == TraceTransportProtocol.GRPC: - default_port = 4317 - elif self.exporter_protocol == TraceTransportProtocol.HTTP_PROTOBUF: - default_port = 4318 - else: - default_port = 4317 - - return self.exporter_port or default_port - - @property - def trace_endpoint(self) -> Optional[str]: - if not self.exporter_endpoint: - return None - if self.insecure: - scheme = "http://" - else: - scheme = "https://" - endpoint = str(self.exporter_endpoint) + ":" + str(self.service_port) - - if self.exporter_protocol == TraceTransportProtocol.HTTP_PROTOBUF: - endpoint += "/v1/traces" - - return scheme + endpoint @dataclass diff --git a/backend/infrahub/core/__init__.py b/backend/infrahub/core/__init__.py index 6562078da5..5bfc612dac 100644 --- a/backend/infrahub/core/__init__.py +++ b/backend/infrahub/core/__init__.py @@ -1,5 +1,5 @@ from __future__ import annotations -from infrahub.core.registry import get_branch, get_branch_from_registry, registry +from infrahub.core.registry import registry -__all__ = ["get_branch_from_registry", "get_branch", "registry"] +__all__ = ["registry"] diff --git a/backend/infrahub/core/account.py b/backend/infrahub/core/account.py index a894a56eb7..fd7997ab37 100644 --- a/backend/infrahub/core/account.py +++ b/backend/infrahub/core/account.py @@ -2,10 +2,10 @@ from typing import TYPE_CHECKING, Optional, Tuple, Union -from infrahub.core import get_branch, registry from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager from infrahub.core.query import Query +from infrahub.core.registry import registry if TYPE_CHECKING: from infrahub.core.branch import Branch @@ -77,7 +77,7 @@ def get_account_role(self) -> str: async def validate_token( token, db: InfrahubDatabase, branch: Optional[Union[Branch, str]] = None, at=None ) -> Tuple[Optional[str], str]: - branch = await get_branch(db=db, branch=branch) + branch = await registry.get_branch(db=db, branch=branch) query = await AccountTokenValidateQuery.init(db=db, branch=branch, token=token, at=at) await query.execute(db=db) return query.get_account_id(), query.get_account_role() diff --git a/backend/infrahub/core/attribute.py b/backend/infrahub/core/attribute.py index 7d47e37277..ea32ed6145 100644 --- a/backend/infrahub/core/attribute.py +++ b/backend/infrahub/core/attribute.py @@ -3,7 +3,7 @@ import ipaddress import re from enum import Enum -from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union import ujson from infrahub_sdk import UUIDT @@ -12,7 +12,7 @@ from infrahub import config from infrahub.core import registry -from infrahub.core.constants import BranchSupportType, RelationshipStatus +from infrahub.core.constants import NULL_VALUE, AttributeDBNodeType, BranchSupportType, RelationshipStatus from infrahub.core.property import ( FlagPropertyMixin, NodePropertyData, @@ -25,17 +25,15 @@ AttributeUpdateNodePropertyQuery, AttributeUpdateValueQuery, ) -from infrahub.core.query.node import NodeListGetAttributeQuery +from infrahub.core.query.node import AttributeFromDB, NodeListGetAttributeQuery from infrahub.core.timestamp import Timestamp -from infrahub.core.utils import add_relationship, update_relationships_to +from infrahub.core.utils import add_relationship, convert_ip_to_binary_str, update_relationships_to from infrahub.exceptions import ValidationError from infrahub.helpers import hash_password from .constants.relationship_label import RELATIONSHIP_TO_NODE_LABEL, RELATIONSHIP_TO_VALUE_LABEL if TYPE_CHECKING: - from uuid import UUID - from infrahub.core.branch import Branch from infrahub.core.node import Node from infrahub.core.schema import AttributeSchema @@ -52,11 +50,13 @@ class AttributeCreateData(BaseModel): branch_level: int branch_support: str status: str - value: Any = None + content: Dict[str, Any] + is_default: bool is_protected: bool is_visible: bool source_prop: List[ValuePropertyData] = Field(default_factory=list) owner_prop: List[NodePropertyData] = Field(default_factory=list) + node_type: AttributeDBNodeType = AttributeDBNodeType.DEFAULT class BaseAttribute(FlagPropertyMixin, NodePropertyMixin): @@ -65,21 +65,23 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin): _rel_to_node_label: str = RELATIONSHIP_TO_NODE_LABEL _rel_to_value_label: str = RELATIONSHIP_TO_VALUE_LABEL - def __init__( + def __init__( # pylint: disable=too-many-branches self, name: str, schema: AttributeSchema, branch: Branch, at: Timestamp, node: Node, - id: UUID = None, + id: Optional[str] = None, db_id: Optional[str] = None, - data: Union[dict, str] = None, - updated_at: Union[Timestamp, str] = None, + data: Optional[Union[dict, str, AttributeFromDB]] = None, + updated_at: Optional[Union[Timestamp, str]] = None, + is_default: bool = False, + is_from_profile: bool = False, **kwargs, ): - self.id: UUID = id - self.db_id: str = db_id + self.id = id + self.db_id = db_id self.updated_at = updated_at self.name = name @@ -87,40 +89,57 @@ def __init__( self.schema = schema self.branch = branch self.at = at + self.is_default = is_default + self.is_from_profile = is_from_profile self._init_node_property_mixin(kwargs) self._init_flag_property_mixin(kwargs) self.value = None - if data is not None and isinstance(data, dict): - self.value = self.from_db(data.get("value", None)) + if isinstance(data, AttributeFromDB): + self.load_from_db(data=data) + + elif isinstance(data, dict): + self.value = data.get("value") + + if "is_default" in data: + self.is_default = data.get("is_default") + if "is_from_profile" in data: + self.is_from_profile = data.get("is_from_profile") - fields_to_extract_from_data = ["id", "db_id"] + self._flag_properties + self._node_properties + fields_to_extract_from_data = ["id"] + self._flag_properties + self._node_properties for field_name in fields_to_extract_from_data: setattr(self, field_name, data.get(field_name, None)) if not self.updated_at and "updated_at" in data: self.updated_at = Timestamp(data.get("updated_at")) - - elif data is not None: - self.value = self.from_db(data) - - self.value = self.schema.convert_to_attribute_enum(self.value) + elif data is None: + self.is_default = True + else: + self.value = data # Assign default values if self.value is None and self.schema.default_value is not None: self.value = self.schema.default_value + self.is_default = True if self.value is not None: self.validate(value=self.value, name=self.name, schema=self.schema) + if self.is_enum and self.value: + self.value = self.schema.convert_value_to_enum(self.value) + if self.is_protected is None: self.is_protected = False if self.is_visible is None: self.is_visible = True + @property + def is_enum(self) -> bool: + return bool(self.schema.enum) + def get_branch_based_on_support_type(self) -> Branch: """If the attribute is branch aware, return the Branch object associated with this attribute If the attribute is branch agnostic return the Global Branch @@ -140,6 +159,16 @@ def __init_subclass__(cls, **kwargs): def get_kind(self) -> str: return self.schema.kind + def get_value(self) -> Any: + if isinstance(self.value, Enum): + return self.value.value + return self.value + + def set_default_value(self) -> None: + self.value = self.schema.default_value + if self.is_enum and self.value: + self.value = self.schema.convert_value_to_enum(self.value) + @classmethod def validate(cls, value: Any, name: str, schema: AttributeSchema) -> bool: if value is None and schema.optional is False: @@ -165,9 +194,8 @@ def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None ValidationError: Format of the attribute value is not valid """ value_to_check = value - enum_value = schema.convert_to_attribute_enum(value) - if isinstance(enum_value, Enum): - value_to_check = enum_value.value + if schema.enum and isinstance(value, Enum): + value_to_check = value.value if not isinstance(value_to_check, cls.type): # pylint: disable=isinstance-second-argument-not-valid-type raise ValidationError({name: f"{name} is not of type {schema.kind}"}) @@ -183,7 +211,6 @@ def validate_content(cls, value: Any, name: str, schema: AttributeSchema) -> Non Raises: ValidationError: Content of the attribute value is not valid """ - if schema.regex: try: is_valid = re.match(pattern=schema.regex, string=str(value)) @@ -204,44 +231,61 @@ def validate_content(cls, value: Any, name: str, schema: AttributeSchema) -> Non raise ValidationError({name: f"{value} must have a maximum length of {schema.max_length!r}"}) if schema.enum: - if config.SETTINGS.experimental_features.graphql_enums: - try: - schema.convert_to_attribute_enum(value) - except ValueError as exc: - raise ValidationError({name: f"{value} must be one of {schema.enum!r}"}) from exc - elif value not in schema.enum: - raise ValidationError({name: f"{value} must be one of {schema.enum!r}"}) - - def to_db(self): + try: + schema.convert_value_to_enum(value) + except ValueError as exc: + raise ValidationError({name: f"{value} must be one of {schema.enum!r}"}) from exc + + def to_db(self) -> Dict[str, Any]: + """Return the properties of the AttributeValue node in Dict format.""" + data: Dict[str, Any] = {"is_default": self.is_default} if self.value is None: - return "NULL" + data["value"] = NULL_VALUE + else: + data["value"] = self.serialize_value() - return self.serialize(self.value) + return data - def from_db(self, value: Any): - if value == "NULL": - return None + def load_from_db(self, data: AttributeFromDB) -> None: + self.value = self.value_from_db(data=data) + self.is_default = data.is_default + self.is_from_profile = data.is_from_profile + + self.id = data.attr_uuid + self.db_id = data.attr_id + + for prop_name in self._flag_properties: + if prop_name in data.flag_properties: + setattr(self, prop_name, data.flag_properties[prop_name]) - return self.deserialize(value) + for prop_name in self._node_properties: + if prop_name in data.node_properties: + setattr(self, prop_name, data.node_properties[prop_name].uuid) - def serialize(self, value: Any) -> Any: + if not self.updated_at and data.updated_at: + self.updated_at = Timestamp(data.updated_at) + + def value_from_db(self, data: AttributeFromDB) -> Any: + if data.value == NULL_VALUE: + return None + return self.deserialize_value(data=data) + + def serialize_value(self) -> Any: """Serialize the value before storing it in the database.""" - value = self.schema.convert_to_attribute_enum(value) - if isinstance(value, Enum): - return value.value - return value + if isinstance(self.value, Enum): + return self.value.value + return self.value - def deserialize(self, value: Any) -> Any: + def deserialize_value(self, data: AttributeFromDB) -> Any: """Deserialize the value coming from the database.""" - value = self.schema.convert_to_attribute_enum(value) - return value + return data.value async def save(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> bool: """Create or Update the Attribute in the database.""" save_at = Timestamp(at) - if not self.id: + if not self.id or self.is_from_profile: return False return await self._update(at=save_at, db=db) @@ -265,12 +309,12 @@ async def delete(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> # Check all the relationship and update the one that are in the same branch rel_ids_to_update = set() for result in results: - properties_to_delete.append((result.get("r2").type, result.get("ap").element_id)) + properties_to_delete.append((result.get_rel("r2").type, result.get_node("ap").element_id)) await add_relationship( src_node_id=self.db_id, - dst_node_id=result.get("ap").element_id, - rel_type=result.get("r2").type, + dst_node_id=result.get_node("ap").element_id, + rel_type=result.get_rel("r2").type, branch_name=branch.name, branch_level=branch.hierarchy_level, at=delete_at, @@ -313,6 +357,14 @@ async def _update(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> # Validate if the value is still correct, will raise a ValidationError if not self.validate(value=self.value, name=self.name, schema=self.schema) + # Check if the current value is still the default one + if ( + self.is_default + and (self.schema.default_value is not None and self.schema.default_value != self.value) + or (self.schema.default_value is None and self.value is not None) + ): + self.is_default = False + query = await NodeListGetAttributeQuery.init( db=db, ids=[self.node.id], @@ -323,20 +375,18 @@ async def _update(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> include_owner=True, ) await query.execute(db=db) - current_attr = query.get_result_by_id_and_name(self.node.id, self.name) + current_attr_data, current_attr_result = query.get_result_by_id_and_name(self.node.id, self.name) branch = self.get_branch_based_on_support_type() # ---------- Update the Value ---------- - current_value = self.from_db(current_attr.get("av").get("value")) - - if current_value != self.value: + if current_attr_data.content != self.to_db(): # Create the new AttributeValue and update the existing relationship query = await AttributeUpdateValueQuery.init(db=db, attr=self, at=update_at) await query.execute(db=db) # TODO check that everything went well - rel = current_attr.get("r2") + rel = current_attr_result.get_rel("r2") if rel.get("branch") == branch.name: await update_relationships_to([rel.element_id], to=update_at, db=db) @@ -346,26 +396,27 @@ async def _update(self, db: InfrahubDatabase, at: Optional[Timestamp] = None) -> ("is_protected", "isp", "rel_isp"), ) - for flag_name, node_name, rel_name in SUPPORTED_FLAGS: - if current_attr.get(node_name).get("value") != getattr(self, flag_name): + for flag_name, _, rel_name in SUPPORTED_FLAGS: + if current_attr_data.flag_properties[flag_name] != getattr(self, flag_name): query = await AttributeUpdateFlagQuery.init(db=db, attr=self, at=update_at, flag_name=flag_name) await query.execute(db=db) - rel = current_attr.get(rel_name) + rel = current_attr_result.get(rel_name) if rel.get("branch") == branch.name: await update_relationships_to([rel.element_id], to=update_at, db=db) # ---------- Update the Node Properties ---------- - for prop in self._node_properties: - if getattr(self, f"{prop}_id") and not ( - current_attr.get(prop) and current_attr.get(prop).get("uuid") == getattr(self, f"{prop}_id") + for prop_name in self._node_properties: + if getattr(self, f"{prop_name}_id") and not ( + prop_name in current_attr_data.node_properties + and current_attr_data.node_properties[prop_name].uuid == getattr(self, f"{prop_name}_id") ): query = await AttributeUpdateNodePropertyQuery.init( - db=db, attr=self, at=update_at, prop_name=prop, prop_id=getattr(self, f"{prop}_id") + db=db, attr=self, at=update_at, prop_name=prop_name, prop_id=getattr(self, f"{prop_name}_id") ) await query.execute(db=db) - rel = current_attr.get(f"rel_{prop}") + rel = current_attr_result.get(f"rel_{prop_name}") if rel and rel.get("branch") == branch.name: await update_relationships_to([rel.element_id], to=update_at, db=db) @@ -426,7 +477,10 @@ async def to_graphql( field = getattr(self, field_name) if field_name == "value" and isinstance(field, Enum): - field = field.name + if config.SETTINGS.experimental_features.graphql_enums: + field = field.name + else: + field = field.value if isinstance(field, str): response[field_name] = self._filter_sensitive(value=field, filter_sensitive=filter_sensitive) elif isinstance(field, (int, bool, dict, list)): @@ -446,11 +500,25 @@ async def from_graphql(self, data: dict) -> bool: changed = False if "value" in data: - value_to_set = self.schema.convert_to_attribute_enum(data["value"]) + if self.is_enum: + value_to_set = self.schema.convert_value_to_enum(data["value"]) + else: + value_to_set = data["value"] if value_to_set != self.value: self.value = value_to_set changed = True + if changed and self.is_from_profile: + self.is_from_profile = False + self.clear_source() + + if "is_default" in data and not self.is_default: + self.is_default = True + changed = True + + if "value" not in data: + self.set_default_value() + if "is_protected" in data and data["is_protected"] != self.is_protected: self.is_protected = data["is_protected"] changed = True @@ -467,10 +535,14 @@ async def from_graphql(self, data: dict) -> bool: return changed + def get_db_node_type(self): + return AttributeDBNodeType.DEFAULT + def get_create_data(self) -> AttributeCreateData: # pylint: disable=no-member branch = self.get_branch_based_on_support_type() data = AttributeCreateData( + node_type=self.get_db_node_type(), uuid=str(UUIDT()), name=self.name, type=self.get_kind(), @@ -478,7 +550,8 @@ def get_create_data(self) -> AttributeCreateData: status="active", branch_level=self.branch.hierarchy_level, branch_support=self.schema.branch.value, - value=self.to_db(), + content=self.to_db(), + is_default=self.is_default, is_protected=self.is_protected, is_visible=self.is_visible, ) @@ -506,10 +579,9 @@ class String(BaseAttribute): class HashedPassword(BaseAttribute): type = str - def serialize(self, value: str) -> str: + def serialize_value(self) -> str: """Serialize the value before storing it in the database.""" - - return hash_password(value) + return hash_password(str(self.value)) class Integer(BaseAttribute): @@ -526,20 +598,19 @@ class Dropdown(BaseAttribute): @property def color(self) -> str: """Return the color for the current value""" - color = "" if self.schema.choices: selected = [choice for choice in self.schema.choices if choice.name == self.value] - if selected: - color = selected[0].color + if selected and selected[0].color: + return selected[0].color - return color + return "" @property def description(self) -> str: """Return the description for the current value""" if self.schema.choices: selected = [choice for choice in self.schema.choices if choice.name == self.value] - if selected: + if selected and selected[0].description: return selected[0].description return "" @@ -547,13 +618,12 @@ def description(self) -> str: @property def label(self) -> str: """Return the label for the current value""" - label = "" if self.schema.choices: selected = [choice for choice in self.schema.choices if choice.name == self.value] - if selected: - label = selected[0].label + if selected and selected[0].label: + return selected[0].label - return label + return "" @classmethod def validate_content(cls, value: Any, name: str, schema: AttributeSchema) -> None: @@ -578,61 +648,85 @@ def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None class IPNetwork(BaseAttribute): type = str + @property + def obj(self) -> Union[ipaddress.IPv4Network, ipaddress.IPv6Network]: + """Return an ipaddress interface object.""" + if not self.value: + raise ValueError("value for IPNetwork must be defined") + return ipaddress.ip_network(str(self.value)) + @property def broadcast_address(self) -> Optional[str]: """Return the broadcast address of the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).broadcast_address) + return str(self.obj.broadcast_address) @property def hostmask(self) -> Optional[str]: """Return the hostmask of the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).hostmask) + return str(self.obj.hostmask) @property def netmask(self) -> Optional[str]: """Return the netmask of the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).netmask) + return str(self.obj.netmask) + + @property + def network_address(self) -> Optional[str]: + """Return the netmask of the ip network.""" + if not self.value: + return None + return str(self.obj.network_address) @property - def prefixlen(self) -> Optional[str]: + def network_address_integer(self) -> int: + """Return the network address of the ip network in integer format.""" + return int(self.obj.network_address) + + @property + def network_address_binary(self) -> str: + """Return the network address of the ip network in binary format.""" + return convert_ip_to_binary_str(obj=self.obj) + + @property + def prefixlen(self) -> Optional[int]: """Return the prefix length the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).prefixlen) + return ipaddress.ip_network(str(self.value)).prefixlen @property def num_addresses(self) -> Optional[int]: """Return the number of possible addresses in the ip network.""" if not self.value: return None - return int(ipaddress.ip_network(str(self.value)).num_addresses) + return ipaddress.ip_network(str(self.value)).num_addresses @property def version(self) -> Optional[int]: """Return the IP version of the ip network.""" if not self.value: return None - return int(ipaddress.ip_network(str(self.value)).version) + return ipaddress.ip_network(str(self.value)).version @property def with_hostmask(self) -> Optional[str]: """Return the network ip and the associated hostmask of the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).with_hostmask) + return ipaddress.ip_network(str(self.value)).with_hostmask @property def with_netmask(self) -> Optional[str]: """Return the network ip and the associated netmask of the ip network.""" if not self.value: return None - return str(ipaddress.ip_network(str(self.value)).with_netmask) + return ipaddress.ip_network(str(self.value)).with_netmask @classmethod def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None: @@ -653,70 +747,103 @@ def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None except ValueError as exc: raise ValidationError({name: f"{value} is not a valid {schema.kind}"}) from exc - def serialize(self, value: Any) -> Any: + def serialize_value(self) -> str: """Serialize the value before storing it in the database.""" - return ipaddress.ip_network(value).with_prefixlen + return ipaddress.ip_network(str(self.value)).with_prefixlen + + def get_db_node_type(self): + if self.value is not None: + return AttributeDBNodeType.IPNETWORK + return AttributeDBNodeType.DEFAULT + + def to_db(self) -> Dict[str, Any]: + data = super().to_db() + + if self.value is not None: + data["version"] = self.version + data["binary_address"] = self.network_address_binary + data["prefixlen"] = self.prefixlen + # data["num_addresses"] = self.num_addresses + + return data class IPHost(BaseAttribute): type = str + @property + def obj(self) -> Union[ipaddress.IPv4Interface, ipaddress.IPv6Interface]: + """Return the ip adress without a prefix or subnet mask.""" + if not self.value: + raise ValueError("value for IPHost must be defined") + return ipaddress.ip_interface(str(self.value)) + @property def ip(self) -> Optional[str]: """Return the ip adress without a prefix or subnet mask.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).ip) + return str(self.obj.ip) @property def hostmask(self) -> Optional[str]: """Return the hostmask of the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).hostmask) + return str(self.obj.hostmask) @property def netmask(self) -> Optional[str]: """Return the netmask of the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).netmask) + return str(self.obj.netmask) @property def network(self) -> Optional[str]: """Return the network encapsuling the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).network) + return str(self.obj.network) @property - def prefixlen(self) -> Optional[str]: + def prefixlen(self) -> Optional[int]: """Return the prefix length of the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value))._prefixlen) + return self.obj.network.prefixlen @property def version(self) -> Optional[int]: """Return the IP version of the ip address.""" if not self.value: return None - return int(ipaddress.ip_interface(str(self.value)).version) + return self.obj.version @property def with_hostmask(self) -> Optional[str]: """Return the ip address and the associated hostmask of the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).with_hostmask) + return self.obj.with_hostmask @property def with_netmask(self) -> Optional[str]: """Return the ip address and the associated netmask of the ip address.""" if not self.value: return None - return str(ipaddress.ip_interface(str(self.value)).with_netmask) + return self.obj.with_netmask + + @property + def ip_integer(self) -> int: + """Return the ip address in binary format.""" + return int(self.obj) + + @property + def ip_binary(self) -> str: + """Return the ip address in binary format.""" + return convert_ip_to_binary_str(obj=self.obj) @classmethod def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None: @@ -737,37 +864,50 @@ def validate_format(cls, value: Any, name: str, schema: AttributeSchema) -> None except ValueError as exc: raise ValidationError({name: f"{value} is not a valid {schema.kind}"}) from exc - def serialize(self, value: Any) -> Any: + def serialize_value(self) -> str: """Serialize the value before storing it in the database.""" - return ipaddress.ip_interface(value).with_prefixlen + return ipaddress.ip_interface(str(self.value)).with_prefixlen + + def get_db_node_type(self): + if self.value is not None: + return AttributeDBNodeType.IPHOST + return AttributeDBNodeType.DEFAULT + + def to_db(self) -> Dict[str, Any]: + data = super().to_db() + + if self.value is not None: + data["version"] = self.version + data["binary_address"] = self.ip_binary + data["prefixlen"] = self.prefixlen + + return data class ListAttribute(BaseAttribute): type = list - def serialize(self, value: Any) -> Any: + def serialize_value(self) -> str: """Serialize the value before storing it in the database.""" + return ujson.dumps(self.value) - return ujson.dumps(value) - - def deserialize(self, value: Any) -> Any: + def deserialize_value(self, data: AttributeFromDB) -> Any: """Deserialize the value (potentially) coming from the database.""" - if isinstance(value, (str, bytes)): - return ujson.loads(value) - return value + if isinstance(data.value, (str, bytes)): + return ujson.loads(data.value) + return data.value class JSONAttribute(BaseAttribute): type = (dict, list) - def serialize(self, value: Any) -> Any: + def serialize_value(self) -> str: """Serialize the value before storing it in the database.""" + return ujson.dumps(self.value) - return ujson.dumps(value) - - def deserialize(self, value: Any) -> Any: + def deserialize_value(self, data: AttributeFromDB) -> Any: """Deserialize the value (potentially) coming from the database.""" - if value and isinstance(value, (str, bytes)): - return ujson.loads(value) - return value + if data.value and isinstance(data.value, (str, bytes)): + return ujson.loads(data.value) + return data.value diff --git a/backend/infrahub/core/branch.py b/backend/infrahub/core/branch.py index 67a0b11e16..a00d2465c2 100644 --- a/backend/infrahub/core/branch.py +++ b/backend/infrahub/core/branch.py @@ -3,8 +3,7 @@ import re from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union -from pydantic import Field as FieldV2 -from pydantic import field_validator +from pydantic import Field, field_validator from infrahub.core.constants import ( GLOBAL_BRANCH_NAME, @@ -17,7 +16,7 @@ RebaseBranchDeleteRelationshipQuery, RebaseBranchUpdateRelationshipQuery, ) -from infrahub.core.registry import get_branch_from_registry, registry +from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.exceptions import BranchNotFoundError, InitializationError, ValidationError @@ -26,19 +25,19 @@ class Branch(StandardNode): # pylint: disable=too-many-public-methods - name: str = FieldV2( + name: str = Field( max_length=250, min_length=3, description="Name of the branch (git ref standard)", validate_default=True ) status: str = "OPEN" # OPEN, CLOSED description: str = "" origin_branch: str = "main" - branched_from: Optional[str] = FieldV2(default=None, validate_default=True) + branched_from: Optional[str] = Field(default=None, validate_default=True) hierarchy_level: int = 2 - created_at: Optional[str] = FieldV2(default=None, validate_default=True) + created_at: Optional[str] = Field(default=None, validate_default=True) is_default: bool = False is_global: bool = False is_protected: bool = False - sync_with_git: bool = FieldV2( + sync_with_git: bool = Field( default=True, description="Indicate if the branch should be extended to Git and if Infrahub should merge the branch in Git as part of a proposed change", ) @@ -146,7 +145,7 @@ def get_origin_branch(self) -> Optional[Branch]: if not self.origin_branch or self.origin_branch == self.name: return None - return get_branch_from_registry(branch=self.origin_branch) + return registry.get_branch_from_registry(branch=self.origin_branch) def get_branches_in_scope(self) -> List[str]: """Return the list of all the branches that are constituing this branch. diff --git a/backend/infrahub/core/constants/__init__.py b/backend/infrahub/core/constants/__init__.py index e79d7fbe1e..fae83e697e 100644 --- a/backend/infrahub/core/constants/__init__.py +++ b/backend/infrahub/core/constants/__init__.py @@ -14,6 +14,8 @@ GLOBAL_BRANCH_NAME = "-global-" +DEFAULT_IP_NAMESPACE = "default" + RESERVED_BRANCH_NAMES = [GLOBAL_BRANCH_NAME] RESERVED_ATTR_REL_NAMES = [ @@ -31,6 +33,8 @@ RESERVED_ATTR_GEN_NAMES = ["type"] +NULL_VALUE = "NULL" + class PermissionLevel(enum.Flag): READ = 1 @@ -70,6 +74,11 @@ class BranchConflictKeep(InfrahubStringEnum): SOURCE = "source" +class AllowOverrideType(InfrahubStringEnum): + NONE = "none" + ANY = "any" + + class ContentType(InfrahubStringEnum): APPLICATION_JSON = "application/json" TEXT_PLAIN = "text/plain" @@ -78,6 +87,7 @@ class ContentType(InfrahubStringEnum): class CheckType(InfrahubStringEnum): ARTIFACT = "artifact" DATA = "data" + GENERATOR = "generator" REPOSITORY = "repository" SCHEMA = "schema" TEST = "test" @@ -92,6 +102,13 @@ class DiffAction(InfrahubStringEnum): UNCHANGED = "unchanged" +class GeneratorInstanceStatus(InfrahubStringEnum): + ERROR = "Error" + PENDING = "Pending" + PROCESSING = "Processing" + READY = "Ready" + + class MutationAction(InfrahubStringEnum): ADDED = "added" REMOVED = "removed" @@ -183,6 +200,7 @@ class RelationshipKind(InfrahubStringEnum): PARENT = "Parent" GROUP = "Group" HIERARCHY = "Hierarchy" + PROFILE = "Profile" class RelationshipStatus(InfrahubStringEnum): @@ -210,6 +228,11 @@ class RelationshipHierarchyDirection(InfrahubStringEnum): DESCENDANTS = "descendants" +class RelationshipDeleteBehavior(InfrahubStringEnum): + NO_ACTION = "no-action" + CASCADE = "cascade" + + class Severity(InfrahubStringEnum): SUCCESS = "success" INFO = "info" @@ -236,6 +259,12 @@ class ValidatorState(InfrahubStringEnum): COMPLETED = "completed" +class AttributeDBNodeType(InfrahubStringEnum): + DEFAULT = "default" + IPHOST = "iphost" + IPNETWORK = "ipnetwork" + + RESTRICTED_NAMESPACES: List[str] = [ "Account", "Branch", @@ -247,6 +276,7 @@ class ValidatorState(InfrahubStringEnum): "Internal", "Lineage", "Schema", + "Profile", ] NODE_NAME_REGEX = r"^[A-Z][a-zA-Z0-9]+$" diff --git a/backend/infrahub/core/constants/infrahubkind.py b/backend/infrahub/core/constants/infrahubkind.py index da0e7c7cd4..23384c2b92 100644 --- a/backend/infrahub/core/constants/infrahubkind.py +++ b/backend/infrahub/core/constants/infrahubkind.py @@ -14,12 +14,23 @@ DATAVALIDATOR = "CoreDataValidator" FILECHECK = "CoreFileCheck" FILETHREAD = "CoreFileThread" +GENERATORCHECK = "CoreGeneratorCheck" +GENERATORDEFINITION = "CoreGeneratorDefinition" +GENERATORINSTANCE = "CoreGeneratorInstance" +GENERATORVALIDATOR = "CoreGeneratorValidator" +GENERATORGROUP = "CoreGeneratorGroup" GENERICGROUP = "CoreGroup" GRAPHQLQUERY = "CoreGraphQLQuery" GRAPHQLQUERYGROUP = "CoreGraphQLQueryGroup" +IPNAMESPACE = "BuiltinIPNamespace" +IPADDRESS = "BuiltinIPAddress" +IPPREFIX = "BuiltinIPPrefix" +NAMESPACE = "IpamNamespace" +NODE = "CoreNode" LINEAGEOWNER = "LineageOwner" LINEAGESOURCE = "LineageSource" OBJECTTHREAD = "CoreObjectThread" +PROFILE = "CoreProfile" PROPOSEDCHANGE = "CoreProposedChange" REFRESHTOKEN = "InternalRefreshToken" REPOSITORY = "CoreRepository" diff --git a/backend/infrahub/core/definitions.py b/backend/infrahub/core/definitions.py index 05bfbf1925..26ed664900 100644 --- a/backend/infrahub/core/definitions.py +++ b/backend/infrahub/core/definitions.py @@ -1,21 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable - -if TYPE_CHECKING: - from infrahub.core.branch import Branch - from infrahub.database import InfrahubDatabase - - -@runtime_checkable -class Brancher(Protocol): - @classmethod - async def get_by_name(cls, name: str, db: InfrahubDatabase) -> Branch: - raise NotImplementedError() - - @classmethod - def isinstance(cls, obj: Any) -> bool: - return isinstance(obj, cls) +from typing import Protocol, runtime_checkable @runtime_checkable diff --git a/backend/infrahub/core/diff/branch_differ.py b/backend/infrahub/core/diff/branch_differ.py index 9fe84326db..bdcfa9c1a2 100644 --- a/backend/infrahub/core/diff/branch_differ.py +++ b/backend/infrahub/core/diff/branch_differ.py @@ -120,9 +120,7 @@ def __init__( raise DiffRangeValidationError("diff_to must be later than diff_from") # Results organized by Branch - self._results: Dict[str, dict] = defaultdict( - lambda: {"nodes": {}, "rels": defaultdict(lambda: {}), "files": {}} - ) + self._results: Dict[str, dict] = defaultdict(lambda: {"nodes": {}, "rels": defaultdict(dict), "files": {}}) self._calculated_diff_nodes_at: Optional[Timestamp] = None self._calculated_diff_rels_at: Optional[Timestamp] = None @@ -302,7 +300,7 @@ async def get_modified_paths_graph(self) -> Dict[str, Set[ModifiedPath]]: Path for a relationship : ("relationships", rel_name, rel_id, prop_type Returns: - Dict[str, set]: Returns a dictionnary by branch with a set of paths + Dict[str, set]: Returns a Dictionary by branch with a set of paths """ paths: Dict[str, Set[ModifiedPath]] = {} @@ -660,9 +658,9 @@ async def _calculate_diff_nodes(self) -> None: self._results[branch_name]["nodes"][node_id].attributes[attr_name].origin_rel_id = result.get( "r1" ).element_id - self._results[branch_name]["nodes"][node_id].attributes[attr_name].properties[ - prop_type - ] = PropertyDiffElement(**item) + self._results[branch_name]["nodes"][node_id].attributes[attr_name].properties[prop_type] = ( + PropertyDiffElement(**item) + ) self._calculated_diff_nodes_at = Timestamp() diff --git a/backend/infrahub/core/diff/ipam_diff_parser.py b/backend/infrahub/core/diff/ipam_diff_parser.py new file mode 100644 index 0000000000..94ade9b6aa --- /dev/null +++ b/backend/infrahub/core/diff/ipam_diff_parser.py @@ -0,0 +1,150 @@ +from dataclasses import dataclass +from typing import Optional + +from infrahub.core import registry +from infrahub.core.constants import DiffAction, InfrahubKind +from infrahub.core.constants.relationship_label import RELATIONSHIP_TO_VALUE_LABEL +from infrahub.core.diff.branch_differ import BranchDiffer +from infrahub.core.diff.model import NodeDiffElement, RelationshipDiffElement +from infrahub.core.ipam.model import IpamNodeDetails +from infrahub.core.manager import NodeManager +from infrahub.database import InfrahubDatabase + + +@dataclass +class ChangedIpamNodeDetails: + node_uuid: str + is_address: bool + is_delete: bool + namespace_id: Optional[str] + ip_value: Optional[str] + + +class IpamDiffParser: + def __init__( + self, differ: BranchDiffer, source_branch_name: str, target_branch_name: str, db: InfrahubDatabase + ) -> None: + self.source_branch_name = source_branch_name + self.target_branch_name = target_branch_name + self.differ = differ + self.db = db + + async def get_changed_ipam_node_details(self) -> list[IpamNodeDetails]: + prefix_generic_schema_source = registry.schema.get( + InfrahubKind.IPPREFIX, branch=self.source_branch_name, duplicate=False + ) + prefix_generic_schema_target = registry.schema.get( + InfrahubKind.IPPREFIX, branch=self.target_branch_name, duplicate=False + ) + address_generic_schema_source = registry.schema.get( + InfrahubKind.IPADDRESS, branch=self.source_branch_name, duplicate=False + ) + address_generic_schema_target = registry.schema.get( + InfrahubKind.IPADDRESS, branch=self.target_branch_name, duplicate=False + ) + + ip_address_kinds = set( + getattr(address_generic_schema_target, "used_by", []) + + getattr(address_generic_schema_source, "used_by", []) + ) + ip_prefix_kinds = set( + getattr(prefix_generic_schema_target, "used_by", []) + getattr(prefix_generic_schema_source, "used_by", []) + ) + if not ip_address_kinds and not ip_prefix_kinds: + return [] + + node_diffs_by_branch = await self.differ.get_nodes() + rel_diffs_by_branch = await self.differ.get_relationships_per_node() + changed_node_details = [] + for branch in node_diffs_by_branch: + node_diffs_by_id = node_diffs_by_branch[branch] + rel_diffs_by_node_id = rel_diffs_by_branch.get(branch, {}) + for node_id, diff_element in node_diffs_by_id.items(): + if diff_element.kind in ip_address_kinds: + is_address = True + elif diff_element.kind in ip_prefix_kinds: + is_address = False + else: + continue + rel_diffs_for_node = rel_diffs_by_node_id.get(node_id) + ip_value = self._get_ip_value(diff_element) + namespace_id = None + if rel_diffs_for_node: + namespace_id = self._get_namespace_id(rel_diffs_for_node) + changed_node_details.append( + ChangedIpamNodeDetails( + node_uuid=node_id, + is_delete=diff_element.action is DiffAction.REMOVED, + is_address=is_address, + namespace_id=namespace_id, + ip_value=ip_value, + ) + ) + await self._add_missing_values(branch=branch, changed_node_details=changed_node_details) + + return [ + IpamNodeDetails( + node_uuid=cnd.node_uuid, + is_delete=cnd.is_delete, + is_address=cnd.is_address, + namespace_id=cnd.namespace_id, + ip_value=cnd.ip_value, + ) + for cnd in changed_node_details + if cnd.namespace_id and cnd.ip_value + ] + + async def _add_missing_values(self, branch: str, changed_node_details: list[ChangedIpamNodeDetails]) -> None: + uuids_missing_data = [ + cnd.node_uuid for cnd in changed_node_details if cnd.ip_value is None or cnd.namespace_id is None + ] + if not uuids_missing_data: + return + + nodes_on_branch = await NodeManager.get_many( + db=self.db, branch=branch, ids=uuids_missing_data, prefetch_relationships=True + ) + + for cnd in changed_node_details: + if cnd.ip_value and cnd.namespace_id: + continue + node_from_db = nodes_on_branch.get(cnd.node_uuid) + if not node_from_db: + continue + if not cnd.ip_value: + if cnd.is_address and hasattr(node_from_db, "address"): + cnd.ip_value = node_from_db.address.value + elif not cnd.is_address and hasattr(node_from_db, "prefix"): + cnd.ip_value = node_from_db.prefix.value + if not cnd.namespace_id: + rels = await node_from_db.ip_namespace.get_relationships(db=self.db) # type: ignore[attr-defined] + if rels: + cnd.namespace_id = rels[0].get_peer_id() + + def _get_ip_value(self, node_diff: NodeDiffElement) -> Optional[str]: + if "prefix" in node_diff.attributes: + attr_element = node_diff.attributes["prefix"] + elif "address" in node_diff.attributes: + attr_element = node_diff.attributes["address"] + else: + return None + if RELATIONSHIP_TO_VALUE_LABEL not in attr_element.properties: + return None + value_element = attr_element.properties[RELATIONSHIP_TO_VALUE_LABEL].value + if not value_element: + return None + return value_element.new or value_element.previous + + def _get_namespace_id(self, rel_diffs_for_node: dict[str, list[RelationshipDiffElement]]) -> Optional[str]: + if "ip_namespace__ip_prefix" in rel_diffs_for_node: + rel_elements = rel_diffs_for_node["ip_namespace__ip_prefix"] + elif "ip_namespace__ip_address" in rel_diffs_for_node: + rel_elements = rel_diffs_for_node["ip_namespace__ip_address"] + else: + return None + if not rel_elements: + return None + for rel in rel_elements[0].nodes.values(): + if InfrahubKind.IPNAMESPACE in rel.labels: + return rel.id + return None diff --git a/backend/infrahub/core/diff/model.py b/backend/infrahub/core/diff/model.py index 46d97b8e92..1673ef55d2 100644 --- a/backend/infrahub/core/diff/model.py +++ b/backend/infrahub/core/diff/model.py @@ -131,7 +131,7 @@ def __hash__(self) -> int: class DiffSummaryElement(BaseModel): - branch: str = Field(..., description="The branch where the change occured") + branch: str = Field(..., description="The branch where the change occurred") node: str = Field(..., description="The unique ID of the node") kind: str = Field(..., description="The kind of the node as defined by its namespace and name") actions: List[DiffAction] = Field(..., description="A list of all actions on this node.") @@ -146,7 +146,7 @@ def to_graphql(self) -> Dict[str, Any]: class EnrichedDiffSummaryElement(BaseModel): - branch: str = Field(..., description="The branch where the change occured") + branch: str = Field(..., description="The branch where the change occurred") node: str = Field(..., description="The unique ID of the node") kind: str = Field(..., description="The kind of the node as defined by its namespace and name") action: DiffAction diff --git a/backend/infrahub/core/diff/payload_builder.py b/backend/infrahub/core/diff/payload_builder.py index 8ede811983..5d7cd54595 100644 --- a/backend/infrahub/core/diff/payload_builder.py +++ b/backend/infrahub/core/diff/payload_builder.py @@ -4,9 +4,9 @@ from collections import defaultdict from typing import TYPE_CHECKING, Dict, List, Optional, Union -from infrahub.core import get_branch, registry from infrahub.core.constants import DiffAction, RelationshipCardinality from infrahub.core.manager import NodeManager +from infrahub.core.registry import registry from infrahub.log import get_logger from .model import ( @@ -45,7 +45,7 @@ async def get_display_labels_per_kind( kind: str, ids: List[str], branch_name: str, db: InfrahubDatabase ) -> Dict[str, str]: """Return the display_labels of a list of nodes of a specific kind.""" - branch = await get_branch(branch=branch_name, db=db) + branch = await registry.get_branch(branch=branch_name, db=db) schema = registry.schema.get(name=kind, branch=branch) fields = schema.generate_fields_for_display_label() nodes = await NodeManager.get_many(ids=ids, fields=fields, db=db, branch=branch) diff --git a/backend/infrahub/core/enums.py b/backend/infrahub/core/enums.py index d509b66580..541717c893 100644 --- a/backend/infrahub/core/enums.py +++ b/backend/infrahub/core/enums.py @@ -1,13 +1,13 @@ import enum import re -from typing import Any, List +from typing import Any, List, Type ENUM_NAME_REGEX = re.compile("[_a-zA-Z0-9]+") -def generate_python_enum(name: str, options: List[Any]) -> enum.Enum: +def generate_python_enum(name: str, options: List[Any]) -> Type[enum.Enum]: main_attrs = {} for option in options: enum_name = "_".join(re.findall(ENUM_NAME_REGEX, option)).upper() main_attrs[enum_name] = option - return enum.Enum(name, main_attrs) + return enum.Enum(name, main_attrs) # type: ignore[return-value] diff --git a/backend/infrahub/core/graph/__init__.py b/backend/infrahub/core/graph/__init__.py index 691ee5be12..eea35e9f19 100644 --- a/backend/infrahub/core/graph/__init__.py +++ b/backend/infrahub/core/graph/__init__.py @@ -1 +1 @@ -GRAPH_VERSION = 1 +GRAPH_VERSION = 7 diff --git a/backend/infrahub/core/graph/index.py b/backend/infrahub/core/graph/index.py new file mode 100644 index 0000000000..2f0590f52f --- /dev/null +++ b/backend/infrahub/core/graph/index.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from typing import List + +from infrahub.database.constants import IndexType +from infrahub.database.index import IndexItem + +node_indexes: List[IndexItem] = [ + IndexItem(name="node_uuid", label="Node", properties=["uuid"], type=IndexType.RANGE), + IndexItem(name="node_kind", label="Node", properties=["kind"], type=IndexType.RANGE), + IndexItem(name="attr_name", label="Attribute", properties=["name"], type=IndexType.RANGE), + IndexItem(name="attr_uuid", label="Attribute", properties=["uuid"], type=IndexType.RANGE), + IndexItem(name="attr_value", label="AttributeValue", properties=["value"], type=IndexType.RANGE), + IndexItem(name="attr_ipnet_bin", label="AttributeIPNetwork", properties=["binary_address"], type=IndexType.RANGE), + IndexItem(name="attr_iphost_bin", label="AttributeIPHost", properties=["binary_address"], type=IndexType.RANGE), + IndexItem(name="rel_uuid", label="Relationship", properties=["uuid"], type=IndexType.RANGE), + IndexItem(name="rel_identifier", label="Relationship", properties=["name"], type=IndexType.RANGE), +] + +rel_indexes: List[IndexItem] = [ + IndexItem( + name="attr_from", + label="HAS_ATTRIBUTE", + properties=["from"], + type=IndexType.RANGE, + ), + IndexItem( + name="attr_branch", + label="HAS_ATTRIBUTE", + properties=["branch"], + type=IndexType.RANGE, + ), + IndexItem( + name="value_from", + label="HAS_VALUE", + properties=["from"], + type=IndexType.RANGE, + ), + IndexItem( + name="value_branch", + label="HAS_VALUE", + properties=["branch"], + type=IndexType.RANGE, + ), +] diff --git a/backend/infrahub/core/graph/schema.py b/backend/infrahub/core/graph/schema.py index 81f54dd32d..66d2f45bce 100644 --- a/backend/infrahub/core/graph/schema.py +++ b/backend/infrahub/core/graph/schema.py @@ -146,6 +146,21 @@ class GraphAttributeValueProperties(BaseModel): value: Any = Field(..., description="value of the attribute") +class GraphAttributeIPNetworkProperties(BaseModel): + value: str = Field(..., description="value of the attribute") + is_default: bool = Field(..., description="Flag to indicate if an attribute has the default value") + binary_address: str = Field(..., description="Network address represented in binary format") + version: int = Field(..., description="Version of IP, either 4 or 6") + # num_addresses: int = Field(..., description="Total number of addresses available in this IPNetwork") + + +class GraphAttributeIPHostProperties(BaseModel): + value: str = Field(..., description="value of the attribute") + is_default: bool = Field(..., description="Flag to indicate if an attribute has the default value") + binary_address: str = Field(..., description="Network address represented in binary format") + version: int = Field(..., description="Version of IP, either 4 or 6") + + class GraphAttributeValueRelationships(BaseModel): HAS_VALUE: GraphRelationship = Field( GraphRelationship(peer="Attribute", direction=GraphRelDirection.INBOUND), @@ -159,6 +174,18 @@ class GraphAttributeValueNode(BaseModel): relationships: GraphAttributeValueRelationships +class GraphAttributeIPNetworkNode(BaseModel): + default_label: str = "AttributeIPNetwork" + properties: GraphAttributeIPNetworkProperties + relationships: GraphAttributeValueRelationships + + +class GraphAttributeIPHostNode(BaseModel): + default_label: str = "AttributeIPHost" + properties: GraphAttributeIPHostProperties + relationships: GraphAttributeValueRelationships + + # ----------------------------------------------------- # Boolean # ----------------------------------------------------- @@ -208,6 +235,8 @@ class GraphRelationshipDefault(BaseModel): "Relationship": GraphRelationshipNode, "Attribute": GraphAttributeNode, "AttributeValue": GraphAttributeValueNode, + "AttributeIPNetwork": GraphAttributeIPNetworkNode, + "AttributeIPHost": GraphAttributeIPHostNode, "Boolean": GraphBooleanNode, }, "relationships": { diff --git a/backend/infrahub/core/initialization.py b/backend/infrahub/core/initialization.py index 4bd78ce175..c7e5e0652b 100644 --- a/backend/infrahub/core/initialization.py +++ b/backend/infrahub/core/initialization.py @@ -3,9 +3,10 @@ from infrahub import config, lock from infrahub.core import registry from infrahub.core.branch import Branch -from infrahub.core.constants import GLOBAL_BRANCH_NAME, InfrahubKind +from infrahub.core.constants import DEFAULT_IP_NAMESPACE, GLOBAL_BRANCH_NAME, InfrahubKind from infrahub.core.graph import GRAPH_VERSION from infrahub.core.node import Node +from infrahub.core.node.ipam import BuiltinIPPrefix from infrahub.core.root import Root from infrahub.core.schema import SchemaRoot, core_models, internal_schema from infrahub.core.schema_manager import SchemaManager @@ -34,22 +35,27 @@ async def get_root_node(db: InfrahubDatabase, initialize: bool = False) -> Root: return roots[0] -async def initialization(db: InfrahubDatabase) -> None: - if config.SETTINGS.database.db_type == config.DatabaseType.MEMGRAPH: - session = await db.session() - await session.run(query="SET DATABASE SETTING 'log.level' TO 'INFO'") - await session.run(query="SET DATABASE SETTING 'log.to_stderr' TO 'true'") - await session.run(query="STORAGE MODE IN_MEMORY_ANALYTICAL") +async def get_default_ipnamespace(db: InfrahubDatabase) -> Optional[Node]: + if not registry.schema._branches or not registry.schema.has(name=InfrahubKind.NAMESPACE): + return None + + nodes = await registry.manager.query(db=db, schema=InfrahubKind.NAMESPACE, filters={"default__value": True}) + if len(nodes) == 0: + return None + if len(nodes) > 1: + raise DatabaseError("More than 1 default namespace found.") + + return nodes[0] + + +async def initialize_registry(db: InfrahubDatabase, initialize: bool = False) -> None: # --------------------------------------------------- # Initialize the database and Load the Root node # --------------------------------------------------- - async with lock.registry.initialization(): - log.debug("Checking Root Node") - - root = await get_root_node(db=db, initialize=True) - registry.id = str(root.get_uuid()) - registry.default_branch = root.default_branch + root = await get_root_node(db=db, initialize=initialize) + registry.id = str(root.get_uuid()) + registry.default_branch = root.default_branch # --------------------------------------------------- # Initialize the Storage Driver @@ -57,12 +63,40 @@ async def initialization(db: InfrahubDatabase) -> None: registry.storage = await InfrahubObjectStorage.init(settings=config.SETTINGS.storage) # --------------------------------------------------- - # Load all existing branches into the registry + # Load existing branches into the registry # --------------------------------------------------- branches: List[Branch] = await Branch.get_list(db=db) for branch in branches: registry.branch[branch.name] = branch + # --------------------------------------------------- + # Load internal models into the registry + # --------------------------------------------------- + registry.node["Node"] = Node + registry.node["BuiltinIPPrefix"] = BuiltinIPPrefix + + +async def initialization(db: InfrahubDatabase) -> None: + if config.SETTINGS.database.db_type == config.DatabaseType.MEMGRAPH: + session = await db.session() + await session.run(query="SET DATABASE SETTING 'log.level' TO 'INFO'") + await session.run(query="SET DATABASE SETTING 'log.to_stderr' TO 'true'") + await session.run(query="STORAGE MODE IN_MEMORY_ANALYTICAL") + + # --------------------------------------------------- + # Initialize the database and Load the Root node + # --------------------------------------------------- + async with lock.registry.initialization(): + log.debug("Checking Root Node") + await initialize_registry(db=db, initialize=True) + + # Add Indexes to the database + if db.manager.index.initialized: + log.debug("Loading database indexes ..") + await db.manager.index.add() + else: + log.warning("The database index manager hasn't been initialized.") + # --------------------------------------------------- # Load all schema in the database into the registry # ... Unless the schema has been initialized already @@ -86,7 +120,7 @@ async def initialization(db: InfrahubDatabase) -> None: branch=default_branch.name, ) - for branch in branches: + for branch in registry.branch.values(): if branch.name in [default_branch.name, GLOBAL_BRANCH_NAME]: continue @@ -102,22 +136,11 @@ async def initialization(db: InfrahubDatabase) -> None: ) # --------------------------------------------------- - # Load internal models into the registry - # --------------------------------------------------- - - registry.node["Node"] = Node - - # --------------------------------------------------- - # Load all existing Groups into the registry + # Load Default Namespace # --------------------------------------------------- - # group_schema = await registry.schema.get(db=db, name="Group") - # groups = await NodeManager.query(group_schema, db=db) - # for group in groups: - # registry.node_group[group.name.value] = group - - # groups = AttrGroup.get_list() - # for group in groups: - # registry.attr_group[group.name.value] = group + ip_namespace = await get_default_ipnamespace(db=db) + if ip_namespace: + registry.default_ipnamespace = ip_namespace.id async def create_root_node(db: InfrahubDatabase) -> Root: @@ -228,6 +251,19 @@ async def create_account( return obj +async def create_ipam_namespace( + db: InfrahubDatabase, + name: str = DEFAULT_IP_NAMESPACE, + description: str = "Used to provide a default space of IP resources", +) -> Node: + obj = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await obj.new(db=db, name=name, description=description, default=True) + await obj.save(db=db) + log.info(f"Created IPAM Namespace: {name}") + + return obj + + async def first_time_initialization(db: InfrahubDatabase) -> None: # -------------------------------------------------- # Create the default Branch @@ -253,10 +289,14 @@ async def first_time_initialization(db: InfrahubDatabase) -> None: # -------------------------------------------------- # Create Default Users and Groups # -------------------------------------------------- - await create_account( db=db, name="admin", password=config.SETTINGS.security.initial_admin_password, token_value=config.SETTINGS.security.initial_admin_token, ) + + # -------------------------------------------------- + # Create Default IPAM Namespace + # -------------------------------------------------- + await create_ipam_namespace(db=db) diff --git a/backend/infrahub/core/integrity/object_conflict/conflict_recorder.py b/backend/infrahub/core/integrity/object_conflict/conflict_recorder.py index 1587cde489..36b70e6823 100644 --- a/backend/infrahub/core/integrity/object_conflict/conflict_recorder.py +++ b/backend/infrahub/core/integrity/object_conflict/conflict_recorder.py @@ -64,7 +64,7 @@ async def record_conflicts(self, proposed_change_id: str, conflicts: List[Object await conflict_obj.new( db=self.db, - label="Data Conflict", + label=f"{conflict.name} ({conflict.id})", origin="internal", kind="DataIntegrity", validator=validator.id, diff --git a/backend/infrahub/core/ipam/__init__.py b/backend/infrahub/core/ipam/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/infrahub/core/ipam/constants.py b/backend/infrahub/core/ipam/constants.py new file mode 100644 index 0000000000..824089281c --- /dev/null +++ b/backend/infrahub/core/ipam/constants.py @@ -0,0 +1,6 @@ +import ipaddress +from typing import Union + +IPNetworkType = Union[ipaddress.IPv6Network, ipaddress.IPv4Network] +IPAddressType = Union[ipaddress.IPv6Interface, ipaddress.IPv4Interface] +AllIPTypes = Union[IPNetworkType, IPAddressType] diff --git a/backend/infrahub/core/ipam/model.py b/backend/infrahub/core/ipam/model.py new file mode 100644 index 0000000000..9b4920f133 --- /dev/null +++ b/backend/infrahub/core/ipam/model.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass + + +@dataclass +class IpamNodeDetails: + node_uuid: str + is_address: bool + is_delete: bool + namespace_id: str + ip_value: str diff --git a/backend/infrahub/core/ipam/reconciler.py b/backend/infrahub/core/ipam/reconciler.py new file mode 100644 index 0000000000..d11ebc99f2 --- /dev/null +++ b/backend/infrahub/core/ipam/reconciler.py @@ -0,0 +1,210 @@ +import ipaddress +from typing import TYPE_CHECKING, Optional, Union + +from infrahub.core.branch import Branch +from infrahub.core.constants import InfrahubKind +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.query.ipam import IPPrefixReconcileQuery +from infrahub.core.timestamp import Timestamp +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import NodeNotFoundError + +from .constants import AllIPTypes + +if TYPE_CHECKING: + from infrahub.core.relationship.model import RelationshipManager + + +class IPNodesToReconcile: + def __init__( + self, + node_uuid: str, + current_parent_uuid: Optional[str], + calculated_parent_uuid: Optional[str], + current_child_uuids: set[str], + calculated_child_uuids: set[str], + node_map: dict[str, Node], + ) -> None: + self.node_uuid = node_uuid + self.current_parent_uuid = current_parent_uuid + self.calculated_parent_uuid = calculated_parent_uuid + self.current_child_uuids = current_child_uuids + self.calculated_child_uuids = calculated_child_uuids + self.node_map = node_map + self._calculated_child_nodes = set() + self._current_child_nodes = set() + for ccu in self.current_child_uuids: + if ccu in self.node_map: + self._current_child_nodes.add(self.node_map[ccu]) + for ccu in self.calculated_child_uuids: + if ccu in self.node_map: + self._calculated_child_nodes.add(self.node_map[ccu]) + + @property + def node(self) -> Node: + return self.node_map[self.node_uuid] + + @property + def current_parent(self) -> Optional[Node]: + if not self.current_parent_uuid: + return None + return self.node_map.get(self.current_parent_uuid) + + @property + def calculated_parent(self) -> Optional[Node]: + if not self.calculated_parent_uuid: + return None + return self.node_map.get(self.calculated_parent_uuid) + + @property + def current_child_nodes(self) -> set[Node]: + return self._current_child_nodes + + @property + def calculated_child_nodes(self) -> set[Node]: + return self._calculated_child_nodes + + def get_node_by_uuid(self, uuid: str) -> Node: + return self.node_map[uuid] + + async def _get_child_uuids(self, db: InfrahubDatabase, node: Optional[Node]) -> set[str]: + if not node: + return set() + child_uuids = set() + child_prefix_rels = await node.children.get_relationships(db=db) # type: ignore[attr-defined] + child_uuids |= {cpr.get_peer_id() for cpr in child_prefix_rels} + child_address_rels = await node.ip_addresses.get_relationships(db=db) # type: ignore[attr-defined] + child_uuids |= {car.get_peer_id() for car in child_address_rels} + return child_uuids + + +class IpamReconciler: + def __init__(self, db: InfrahubDatabase, branch: Branch) -> None: + self.db = db + self.branch = branch + self.at: Optional[Timestamp] = None + + async def reconcile( + self, + ip_value: AllIPTypes, + namespace: Optional[Union[Node, str]] = None, + node_uuid: Optional[str] = None, + is_delete: bool = False, + at: Optional[Timestamp] = None, + ) -> Optional[Node]: + self.at = Timestamp(at) + + query = await IPPrefixReconcileQuery.init( + db=self.db, branch=self.branch, ip_value=ip_value, namespace=namespace, node_uuid=node_uuid, at=self.at + ) + await query.execute(db=self.db) + + ip_node_uuid = query.get_ip_node_uuid() + if not ip_node_uuid: + node_type = InfrahubKind.IPPREFIX + if isinstance(ip_value, (ipaddress.IPv6Interface, ipaddress.IPv4Interface)): + node_type = InfrahubKind.IPADDRESS + raise NodeNotFoundError(node_type=node_type, identifier=str(ip_value)) + current_parent_uuid = query.get_current_parent_uuid() + calculated_parent_uuid = query.get_calculated_parent_uuid() + current_children_uuids = set(query.get_current_children_uuids()) + calculated_children_uuids = set(query.get_calculated_children_uuids()) + + all_uuids: set[str] = set() + all_uuids = (all_uuids | {ip_node_uuid}) if ip_node_uuid else all_uuids + all_uuids = (all_uuids | {current_parent_uuid}) if current_parent_uuid else all_uuids + all_uuids = (all_uuids | {calculated_parent_uuid}) if calculated_parent_uuid else all_uuids + all_uuids |= current_children_uuids + all_uuids |= calculated_children_uuids + all_nodes = await NodeManager.get_many( + db=self.db, + branch=self.branch, + ids=list(all_uuids), + ) + + reconcile_nodes = IPNodesToReconcile( + node_uuid=ip_node_uuid, + current_parent_uuid=current_parent_uuid, + calculated_parent_uuid=calculated_parent_uuid, + current_child_uuids=current_children_uuids, + calculated_child_uuids=calculated_children_uuids, + node_map=all_nodes, + ) + + if is_delete: + updated_uuids = await self.update_children_for_delete(reconcile_nodes) + else: + updated_uuids = await self.update_node(reconcile_nodes) + updated_uuids |= await self.update_current_children(reconcile_nodes) + updated_uuids |= await self.update_calculated_children(reconcile_nodes) + + for updated_uuid in updated_uuids: + node = reconcile_nodes.get_node_by_uuid(updated_uuid) + await node.save(db=self.db, at=self.at) + + if is_delete: + try: + await reconcile_nodes.node.delete(db=self.db, at=self.at) + except KeyError: + return None + + return reconcile_nodes.node + + async def _update_node_parent(self, node: Node, new_parent_uuid: Optional[str]) -> None: + node_kinds = {node.get_kind()} | set(node.get_schema().inherit_from) + is_prefix = False + if InfrahubKind.IPADDRESS in node_kinds: + rel_manager: RelationshipManager = node.ip_prefix # type: ignore[attr-defined] + elif InfrahubKind.IPPREFIX in node_kinds: + rel_manager = node.parent # type: ignore[attr-defined] + is_prefix = True + else: + return + + await rel_manager.update(db=self.db, data=new_parent_uuid) + if not is_prefix: + return + node.is_top_level.value = new_parent_uuid is None # type: ignore[attr-defined] + + async def update_node(self, reconcile_nodes: IPNodesToReconcile) -> set[str]: + await self._update_node_parent( + node=reconcile_nodes.node, new_parent_uuid=reconcile_nodes.calculated_parent_uuid + ) + return {reconcile_nodes.node.get_id()} + + async def update_node_for_delete(self, reconcile_nodes: IPNodesToReconcile) -> set[str]: + await self._update_node_parent(node=reconcile_nodes.node, new_parent_uuid=None) + return {reconcile_nodes.node.get_id()} + + async def update_current_children(self, reconcile_nodes: IPNodesToReconcile) -> set[str]: + updated_uuids = set() + for current_child_node in reconcile_nodes.current_child_nodes: + current_child_uuid = current_child_node.get_id() + if current_child_uuid in reconcile_nodes.calculated_child_uuids: + # current child is still a child, no update necessary + continue + # set parent of deleted child to current_parent of the node (might be None) + await self._update_node_parent(node=current_child_node, new_parent_uuid=reconcile_nodes.current_parent_uuid) + updated_uuids.add(current_child_uuid) + return updated_uuids + + async def update_calculated_children(self, reconcile_nodes: IPNodesToReconcile) -> set[str]: + updated_uuids = set() + for calculated_child_node in reconcile_nodes.calculated_child_nodes: + calculated_child_uuid = calculated_child_node.get_id() + if calculated_child_uuid in reconcile_nodes.current_child_uuids: + # calculated child is already a child, no update necessary + continue + # set parent of new child to the node + await self._update_node_parent(node=calculated_child_node, new_parent_uuid=reconcile_nodes.node_uuid) + updated_uuids.add(calculated_child_uuid) + return updated_uuids + + async def update_children_for_delete(self, reconcile_nodes: IPNodesToReconcile) -> set[str]: + updated_uuids = set() + for current_child_node in reconcile_nodes.current_child_nodes: + current_child_uuid = current_child_node.get_id() + await self._update_node_parent(node=current_child_node, new_parent_uuid=reconcile_nodes.current_parent_uuid) + updated_uuids.add(current_child_uuid) + return updated_uuids diff --git a/backend/infrahub/core/manager.py b/backend/infrahub/core/manager.py index e026f3c190..50a1f4880e 100644 --- a/backend/infrahub/core/manager.py +++ b/backend/infrahub/core/manager.py @@ -1,12 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Dict, List, Optional, Type, Union +from functools import reduce +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union from infrahub_sdk.utils import deep_merge_dict -from infrahub.core import get_branch, registry from infrahub.core.node import Node +from infrahub.core.node.delete_validator import NodeDeleteValidator from infrahub.core.query.node import ( + AttributeFromDB, + AttributeNodePropertyFromDB, + NodeAttributesFromDB, NodeGetHierarchyQuery, NodeGetListQuery, NodeListGetAttributeQuery, @@ -15,9 +19,11 @@ NodeToProcess, ) from infrahub.core.query.relationship import RelationshipGetPeerQuery +from infrahub.core.registry import registry from infrahub.core.relationship import Relationship -from infrahub.core.schema import GenericSchema, NodeSchema, RelationshipSchema +from infrahub.core.schema import GenericSchema, NodeSchema, ProfileSchema, RelationshipSchema from infrahub.core.timestamp import Timestamp +from infrahub.dependencies.registry import get_component_registry from infrahub.exceptions import NodeNotFoundError, SchemaNotFoundError if TYPE_CHECKING: @@ -47,12 +53,66 @@ def identify_node_class(node: NodeToProcess) -> Type[Node]: return Node +class ProfileAttributeIndex: + def __init__( + self, + profile_attributes_id_map: dict[str, NodeAttributesFromDB], + profile_ids_by_node_id: dict[str, list[str]], + ): + self._profile_attributes_id_map = profile_attributes_id_map + self._profile_ids_by_node_id = profile_ids_by_node_id + + def apply_profiles(self, node_data_dict: dict[str, Any]) -> dict[str, Any]: + updated_data: dict[str, Any] = {**node_data_dict} + node_id = node_data_dict.get("id") + profile_ids = self._profile_ids_by_node_id.get(node_id, []) + if not profile_ids: + return updated_data + profiles = [ + self._profile_attributes_id_map[p_id] for p_id in profile_ids if p_id in self._profile_attributes_id_map + ] + + def get_profile_priority(nafd: NodeAttributesFromDB) -> tuple[Union[int, float], str]: + try: + return (int(nafd.attrs.get("profile_priority").value), nafd.node.get("uuid")) + except (TypeError, AttributeError): + return (float("inf"), "") + + profiles.sort(key=get_profile_priority) + + for attr_name, attr_data in updated_data.items(): + if not isinstance(attr_data, AttributeFromDB): + continue + if not attr_data.is_default: + continue + profile_value, profile_uuid = None, None + index = 0 + + while profile_value is None and index <= (len(profiles) - 1): + try: + profile_value = profiles[index].attrs[attr_name].value + if profile_value != "NULL": + profile_uuid = profiles[index].node["uuid"] + break + profile_value = None + except (IndexError, KeyError, AttributeError): + ... + index += 1 + + if profile_value is not None: + attr_data.value = profile_value + attr_data.is_from_profile = True + attr_data.is_default = False + attr_data.node_properties["source"] = AttributeNodePropertyFromDB(uuid=profile_uuid, labels=[]) + return updated_data + + class NodeManager: @classmethod async def query( cls, db: InfrahubDatabase, - schema: Union[NodeSchema, GenericSchema, str], + schema: Union[NodeSchema, GenericSchema, ProfileSchema, str], filters: Optional[dict] = None, fields: Optional[dict] = None, offset: Optional[int] = None, @@ -79,12 +139,12 @@ async def query( List[Node]: List of Node object """ - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) if isinstance(schema, str): schema = registry.schema.get(name=schema, branch=branch.name) - elif not isinstance(schema, (NodeSchema, GenericSchema)): + elif not isinstance(schema, (NodeSchema, GenericSchema, ProfileSchema)): raise ValueError(f"Invalid schema provided {schema}") # Query the list of nodes matching this Query @@ -143,7 +203,7 @@ async def count( int: The number of responses found """ - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) query = await NodeGetListQuery.init( @@ -162,7 +222,7 @@ async def count_peers( at: Optional[Union[Timestamp, str]] = None, branch: Optional[Union[Branch, str]] = None, ) -> int: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) rel = Relationship(schema=schema, branch=branch, node_id="PLACEHOLDER") @@ -186,7 +246,7 @@ async def query_peers( at: Union[Timestamp, str] = None, branch: Union[Branch, str] = None, ) -> List[Relationship]: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) rel = Relationship(schema=schema, branch=branch, node_id="PLACEHOLDER") @@ -237,7 +297,7 @@ async def count_hierarchy( at: Optional[Union[Timestamp, str]] = None, branch: Optional[Union[Branch, str]] = None, ) -> int: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) query = await NodeGetHierarchyQuery.init( @@ -266,7 +326,7 @@ async def query_hierarchy( at: Union[Timestamp, str] = None, branch: Union[Branch, str] = None, ) -> Dict[str, Node]: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) query = await NodeGetHierarchyQuery.init( @@ -313,7 +373,7 @@ async def get_one_by_default_filter( prefetch_relationships: bool = False, account=None, ) -> Node: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) node_schema = registry.schema.get(name=schema_name, branch=branch) @@ -358,7 +418,7 @@ async def get_one_by_id_or_default_filter( prefetch_relationships: bool = False, account=None, ) -> Node: - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) node = await cls.get_one( @@ -406,6 +466,8 @@ async def get_one( kind: Optional[str] = None, ) -> Optional[Node]: """Return one node based on its ID.""" + branch = await registry.get_branch(branch=branch, db=db) + result = await cls.get_many( ids=[id], fields=fields, @@ -422,8 +484,9 @@ async def get_one( return None node = result[id] + node_schema = node.get_schema() - if kind and node.get_kind() != kind: + if kind and (node_schema.kind != kind and kind not in node_schema.inherit_from): raise NodeNotFoundError( branch_name=branch.name, node_type=kind, @@ -434,7 +497,7 @@ async def get_one( return node @classmethod - async def get_many( # pylint: disable=too-many-branches + async def get_many( # pylint: disable=too-many-branches,too-many-statements cls, db: InfrahubDatabase, ids: List[str], @@ -448,7 +511,7 @@ async def get_many( # pylint: disable=too-many-branches ) -> Dict[str, Node]: """Return a list of nodes based on their IDs.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) at = Timestamp(at) # Query all nodes @@ -457,11 +520,21 @@ async def get_many( # pylint: disable=too-many-branches nodes_info_by_id: Dict[str, NodeToProcess] = { node.node_uuid: node async for node in query.get_nodes(duplicate=False) } + profile_ids_by_node_id = query.get_profile_ids_by_node_id() + all_profile_ids = reduce( + lambda all_ids, these_ids: all_ids | set(these_ids), profile_ids_by_node_id.values(), set() + ) + + if fields and all_profile_ids: + if "profile_priority" not in fields: + fields["profile_priority"] = {} + if "value" not in fields["profile_priority"]: + fields["profile_priority"]["value"] = None # Query list of all Attributes query = await NodeListGetAttributeQuery.init( db=db, - ids=list(nodes_info_by_id.keys()), + ids=list(nodes_info_by_id.keys()) + list(all_profile_ids), fields=fields, branch=branch, include_source=include_source, @@ -470,7 +543,17 @@ async def get_many( # pylint: disable=too-many-branches at=at, ) await query.execute(db=db) - node_attributes = query.get_attributes_group_by_node() + all_node_attributes = query.get_attributes_group_by_node() + profile_attributes: Dict[str, Dict[str, AttributeFromDB]] = {} + node_attributes: Dict[str, Dict[str, AttributeFromDB]] = {} + for node_id, attribute_dict in all_node_attributes.items(): + if node_id in all_profile_ids: + profile_attributes[node_id] = attribute_dict + else: + node_attributes[node_id] = attribute_dict + profile_index = ProfileAttributeIndex( + profile_attributes_id_map=profile_attributes, profile_ids_by_node_id=profile_ids_by_node_id + ) # if prefetch_relationships is enabled # Query all the peers associated with all nodes at once. @@ -503,7 +586,11 @@ async def get_many( # pylint: disable=too-many-branches continue node = nodes_info_by_id[node_id] - attrs = {"db_id": node.node_id, "id": node_id, "updated_at": node.updated_at} + new_node_data: Dict[str, Union[str, AttributeFromDB]] = { + "db_id": node.node_id, + "id": node_id, + "updated_at": node.updated_at, + } if not node.schema: raise SchemaNotFoundError( @@ -515,26 +602,9 @@ async def get_many( # pylint: disable=too-many-branches # -------------------------------------------------------- # Attributes # -------------------------------------------------------- - for attr_name, attr in node_attributes.get(node_id, {}).get("attrs", {}).items(): - attrs[attr_name] = { - "db_id": attr.attr_id, - "id": attr.attr_uuid, - "name": attr_name, - "value": attr.value, - "updated_at": attr.updated_at, - } - - if attr.is_protected is not None: - attrs[attr_name]["is_protected"] = attr.is_protected - - if attr.is_visible is not None: - attrs[attr_name]["is_visible"] = attr.is_visible - - if attr.source_uuid: - attrs[attr_name]["source"] = attr.source_uuid - - if attr.owner_uuid: - attrs[attr_name]["owner"] = attr.owner_uuid + if node_id in node_attributes: + for attr_name, attr in node_attributes[node_id].attrs.items(): + new_node_data[attr_name] = attr # -------------------------------------------------------- # Relationships @@ -545,17 +615,43 @@ async def get_many( # pylint: disable=too-many-branches rel_peers = [peers.get(id) for id in peers_per_node[node_id][rel_schema.identifier]] if rel_schema.cardinality == "one": if len(rel_peers) == 1: - attrs[rel_schema.name] = rel_peers[0] + new_node_data[rel_schema.name] = rel_peers[0] elif rel_schema.cardinality == "many": - attrs[rel_schema.name] = rel_peers + new_node_data[rel_schema.name] = rel_peers + new_node_data_with_profile_overrides = profile_index.apply_profiles(new_node_data) node_class = identify_node_class(node=node) item = await node_class.init(schema=node.schema, branch=branch, at=at, db=db) - await item.load(**attrs, db=db) + await item.load(**new_node_data_with_profile_overrides, db=db) nodes[node_id] = item return nodes + @classmethod + async def delete( + cls, + db: InfrahubDatabase, + nodes: List[Node], + branch: Optional[Union[Branch, str]] = None, + at: Optional[Union[Timestamp, str]] = None, + ) -> list[Node]: + """Returns list of deleted nodes because of cascading deletes""" + branch = await registry.get_branch(branch=branch, db=db) + component_registry = get_component_registry() + node_delete_validator = await component_registry.get_component(NodeDeleteValidator, db=db, branch=branch) + ids_to_delete = await node_delete_validator.get_ids_to_delete(nodes=nodes, at=at) + node_ids = {node.get_id() for node in nodes} + missing_ids_to_delete = ids_to_delete - node_ids + if missing_ids_to_delete: + node_map = await cls.get_many(db=db, ids=list(missing_ids_to_delete), branch=branch, at=at) + nodes += list(node_map.values()) + deleted_nodes = [] + for node in nodes: + await node.delete(db=db, at=at) + deleted_nodes.append(node) + + return deleted_nodes + registry.manager = NodeManager diff --git a/backend/infrahub/core/merge.py b/backend/infrahub/core/merge.py index 6e6fa8216d..6df564fdd9 100644 --- a/backend/infrahub/core/merge.py +++ b/backend/infrahub/core/merge.py @@ -262,7 +262,8 @@ async def merge( # TODO need to find a way to properly communicate back to the user any issue that could come up during the merge # From the Graph or From the repositories await self.merge_graph(at=at, conflict_resolution=conflict_resolution) - await self.merge_repositories() + if self.source_branch.sync_with_git: + await self.merge_repositories() async def merge_graph( # pylint: disable=too-many-branches,too-many-statements self, diff --git a/backend/infrahub/core/migrations/graph/__init__.py b/backend/infrahub/core/migrations/graph/__init__.py index af79f4534d..a86be3373c 100644 --- a/backend/infrahub/core/migrations/graph/__init__.py +++ b/backend/infrahub/core/migrations/graph/__init__.py @@ -1,23 +1,35 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Sequence +from typing import TYPE_CHECKING, List, Sequence, Union from .m001_add_version_to_graph import Migration001 +from .m002_attribute_is_default import Migration002 +from .m003_relationship_parent_optional import Migration003 +from .m004_add_attr_documentation import Migration004 +from .m005_add_rel_read_only import Migration005 +from .m006_add_rel_on_delete import Migration006 +from .m007_add_rel_allow_override import Migration007 if TYPE_CHECKING: from infrahub.core.root import Root - from ..shared import GraphMigration + from ..shared import GraphMigration, InternalSchemaMigration -MIGRATIONS = [ +MIGRATIONS: List[type[Union[GraphMigration, InternalSchemaMigration]]] = [ Migration001, + Migration002, + Migration003, + Migration004, + Migration005, + Migration006, + Migration007, ] -async def get_graph_migrations(root: Root) -> Sequence[GraphMigration]: +async def get_graph_migrations(root: Root) -> Sequence[Union[GraphMigration, InternalSchemaMigration]]: applicable_migrations = [] for migration_class in MIGRATIONS: - migration = migration_class() + migration = migration_class.init() if root.graph_version > migration.minimum_version: continue applicable_migrations.append(migration) diff --git a/backend/infrahub/core/migrations/graph/m002_attribute_is_default.py b/backend/infrahub/core/migrations/graph/m002_attribute_is_default.py new file mode 100644 index 0000000000..5d32cf564f --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m002_attribute_is_default.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, Sequence + +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.query import Query, QueryType + +from ..shared import GraphMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration002Query01(Query): + name = "migration_002_01" + type: QueryType = QueryType.WRITE + + async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, Any]) -> None: + query = """ + MATCH (a:AttributeValue) + WHERE a.is_default IS NULL + SET a.is_default = false + """ + self.add_to_query(query) + self.return_labels = ["a"] + + +class Migration002(GraphMigration): + name: str = "002_attribute_is_default" + queries: Sequence[type[Query]] = [Migration002Query01] + minimum_version: int = 1 + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + + return result diff --git a/backend/infrahub/core/migrations/graph/m003_relationship_parent_optional.py b/backend/infrahub/core/migrations/graph/m003_relationship_parent_optional.py new file mode 100644 index 0000000000..019468c9b6 --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m003_relationship_parent_optional.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, Sequence + +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.query import Query, QueryType +from infrahub.core.timestamp import Timestamp + +from ..shared import GraphMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration003Query01(Query): + name = "migration_003_01" + type: QueryType = QueryType.WRITE + + async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, Any]) -> None: + at = Timestamp() + filters, params = at.get_query_filter_path() + + self.params.update(params) + + # ruff: noqa: E501 + query = """ + MATCH path = (av2:AttributeValue)-[:HAS_VALUE]-(:Attribute {name: "optional"})-[:HAS_ATTRIBUTE]-(n:SchemaRelationship)-[:HAS_ATTRIBUTE]-(:Attribute {name: "kind"})-[:HAS_VALUE]-(av1:AttributeValue) + WHERE av1.value = "Parent" AND av2.value = true AND all(r IN relationships(path) WHERE ( %(filters)s )) + CALL { + WITH n + MATCH path = (av2:AttributeValue)-[r22:HAS_VALUE]-(a2:Attribute {name: "optional"})-[:HAS_ATTRIBUTE]-(n:SchemaRelationship)-[:HAS_ATTRIBUTE]-(:Attribute {name:"kind"})-[:HAS_VALUE]-(av1:AttributeValue) + WHERE av1.value = "Parent" AND av2.value = true AND all(r IN relationships(path) WHERE ( %(filters)s )) + RETURN av2 as av2_sub, r22, a2, path as path2 + ORDER BY r22.branch_level DESC, r22.from DESC + LIMIT 1 + } + WITH av2_sub as av2, r22, a2, path2 + WHERE all(r IN relationships(path2) WHERE r.status = "active") + MERGE (new_value: AttributeValue { value: false, is_default: false }) + CREATE (a2)-[:HAS_VALUE { branch: r22.branch, branch_level: r22.branch_level, status: "active", from: $at, to: null } ]->(new_value) + SET r22.to = $at + """ % {"filters": filters} + self.add_to_query(query) + self.return_labels = ["av2"] + + +class Migration003(GraphMigration): + name: str = "003_relationship_parent_mandatory" + queries: Sequence[type[Query]] = [Migration003Query01] + minimum_version: int = 2 + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + + return result diff --git a/backend/infrahub/core/migrations/graph/m004_add_attr_documentation.py b/backend/infrahub/core/migrations/graph/m004_add_attr_documentation.py new file mode 100644 index 0000000000..78f89f3979 --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m004_add_attr_documentation.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict + +from typing_extensions import Self + +from infrahub.core.constants import SchemaPathType +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.path import SchemaPath + +from ..schema.node_attribute_add import NodeAttributeAddMigration +from ..shared import InternalSchemaMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration004(InternalSchemaMigration): + name: str = "004_node_add_attr_documentation" + minimum_version: int = 3 + + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + internal_schema = cls.get_internal_schema() + schema_node = internal_schema.get_node(name="SchemaNode") + schema_generic = internal_schema.get_node(name="SchemaGeneric") + + migrations = [ + NodeAttributeAddMigration( + new_node_schema=schema_node, + previous_node_schema=schema_node, + schema_path=SchemaPath( + schema_kind="SchemaNode", path_type=SchemaPathType.ATTRIBUTE, field_name="documentation" + ), + ), + NodeAttributeAddMigration( + new_node_schema=schema_generic, + previous_node_schema=schema_generic, + schema_path=SchemaPath( + schema_kind="SchemaGeneric", path_type=SchemaPathType.ATTRIBUTE, field_name="documentation" + ), + ), + ] + return cls(*args, migrations=migrations, **kwargs) # type: ignore[arg-type] + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + return result diff --git a/backend/infrahub/core/migrations/graph/m005_add_rel_read_only.py b/backend/infrahub/core/migrations/graph/m005_add_rel_read_only.py new file mode 100644 index 0000000000..1c36939bd9 --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m005_add_rel_read_only.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict + +from typing_extensions import Self + +from infrahub.core.constants import SchemaPathType +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.path import SchemaPath + +from ..schema.node_attribute_add import NodeAttributeAddMigration +from ..shared import InternalSchemaMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration005(InternalSchemaMigration): + name: str = "005_add_rel_read_only" + minimum_version: int = 4 + + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + internal_schema = cls.get_internal_schema() + schema_rel = internal_schema.get_node(name="SchemaRelationship") + + migrations = [ + NodeAttributeAddMigration( + new_node_schema=schema_rel, + previous_node_schema=schema_rel, + schema_path=SchemaPath( + schema_kind="SchemaRelationship", path_type=SchemaPathType.ATTRIBUTE, field_name="read_only" + ), + ), + ] + return cls(*args, migrations=migrations, **kwargs) # type: ignore[arg-type] + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + return result diff --git a/backend/infrahub/core/migrations/graph/m006_add_rel_on_delete.py b/backend/infrahub/core/migrations/graph/m006_add_rel_on_delete.py new file mode 100644 index 0000000000..c179b5060e --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m006_add_rel_on_delete.py @@ -0,0 +1,40 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict + +from typing_extensions import Self + +from infrahub.core.constants import SchemaPathType +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.path import SchemaPath + +from ..schema.node_attribute_add import NodeAttributeAddMigration +from ..shared import InternalSchemaMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration006(InternalSchemaMigration): + name: str = "006_add_rel_on_delete" + minimum_version: int = 5 + + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + internal_schema = cls.get_internal_schema() + schema_rel = internal_schema.get_node(name="SchemaRelationship") + + migrations = [ + NodeAttributeAddMigration( + new_node_schema=schema_rel, + previous_node_schema=schema_rel, + schema_path=SchemaPath( + schema_kind="SchemaRelationship", path_type=SchemaPathType.ATTRIBUTE, field_name="on_delete" + ), + ), + ] + return cls(*args, migrations=migrations, **kwargs) # type: ignore[arg-type] + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + return result diff --git a/backend/infrahub/core/migrations/graph/m007_add_rel_allow_override.py b/backend/infrahub/core/migrations/graph/m007_add_rel_allow_override.py new file mode 100644 index 0000000000..2b7e45c1e1 --- /dev/null +++ b/backend/infrahub/core/migrations/graph/m007_add_rel_allow_override.py @@ -0,0 +1,48 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict + +from typing_extensions import Self + +from infrahub.core.constants import SchemaPathType +from infrahub.core.migrations.shared import MigrationResult +from infrahub.core.path import SchemaPath + +from ..schema.node_attribute_add import NodeAttributeAddMigration +from ..shared import InternalSchemaMigration + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class Migration007(InternalSchemaMigration): + name: str = "007_add_allow_override" + minimum_version: int = 6 + + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + internal_schema = cls.get_internal_schema() + schema_rel = internal_schema.get_node(name="SchemaRelationship") + schema_attr = internal_schema.get_node(name="SchemaAttribute") + + migrations = [ + NodeAttributeAddMigration( + new_node_schema=schema_attr, + previous_node_schema=schema_attr, + schema_path=SchemaPath( + schema_kind="SchemaAttribute", path_type=SchemaPathType.ATTRIBUTE, field_name="allow_override" + ), + ), + NodeAttributeAddMigration( + new_node_schema=schema_rel, + previous_node_schema=schema_rel, + schema_path=SchemaPath( + schema_kind="SchemaRelationship", path_type=SchemaPathType.ATTRIBUTE, field_name="allow_override" + ), + ), + ] + return cls(*args, migrations=migrations, **kwargs) # type: ignore[arg-type] + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + return result diff --git a/backend/infrahub/core/migrations/schema/attribute_name_update.py b/backend/infrahub/core/migrations/schema/attribute_name_update.py index af5cbcc6c8..5c50441f99 100644 --- a/backend/infrahub/core/migrations/schema/attribute_name_update.py +++ b/backend/infrahub/core/migrations/schema/attribute_name_update.py @@ -54,6 +54,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params.update(branch_params) self.params["node_kind"] = self.migration.new_schema.kind + self.params["profile_kind"] = f"Profile{self.migration.new_schema.kind}" self.params["new_attr_name"] = self.migration.new_attribute_schema.name attr_id = self.migration.new_attribute_schema.id @@ -95,7 +96,8 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, query = """ // Find all the active nodes MATCH (node:Node) - WHERE $node_kind IN LABELS(node) AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr_name })) + WHERE ($node_kind IN LABELS(node) OR $profile_kind IN LABELS(node)) + AND exists((node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $prev_attr_name })) CALL { WITH node MATCH (root:Root)<-[r:IS_PART_OF]-(node) diff --git a/backend/infrahub/core/migrations/schema/node_attribute_add.py b/backend/infrahub/core/migrations/schema/node_attribute_add.py index 23d67ff597..3516ff2023 100644 --- a/backend/infrahub/core/migrations/schema/node_attribute_add.py +++ b/backend/infrahub/core/migrations/schema/node_attribute_add.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, Sequence -from infrahub.core.constants import RelationshipStatus +from infrahub.core.constants import NULL_VALUE, RelationshipStatus from ..shared import AttributeMigrationQuery, AttributeSchemaMigration @@ -25,7 +25,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, if self.migration.new_attribute_schema.default_value: self.params["attr_value"] = self.migration.new_attribute_schema.default_value else: - self.params["attr_value"] = "NULL" + self.params["attr_value"] = NULL_VALUE self.params["rel_props"] = { "branch": self.branch.name, @@ -50,7 +50,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, } WITH n1 as n, r1 as rb WHERE rb.status = "active" - MERGE (av:AttributeValue { value: $attr_value }) + MERGE (av:AttributeValue { value: $attr_value, is_default: true }) MERGE (is_protected_value:Boolean { value: $is_protected_default }) MERGE (is_visible_value:Boolean { value: $is_visible_default }) WITH n, av, is_protected_value, is_visible_value diff --git a/backend/infrahub/core/migrations/schema/runner.py b/backend/infrahub/core/migrations/schema/runner.py index 0e7306272c..1a229a1100 100644 --- a/backend/infrahub/core/migrations/schema/runner.py +++ b/backend/infrahub/core/migrations/schema/runner.py @@ -1,14 +1,14 @@ from __future__ import annotations import asyncio -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, List, Optional from infrahub.message_bus.messages.schema_migration_path import SchemaMigrationPath, SchemaMigrationPathResponse if TYPE_CHECKING: from infrahub.core.branch import Branch from infrahub.core.models import SchemaUpdateMigrationInfo - from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema import MainSchemaTypes from infrahub.core.schema_manager import SchemaBranch from infrahub.services import InfrahubServices @@ -31,8 +31,8 @@ async def schema_migrations_runner( f"Preparing migration for {migration.migration_name!r} ({migration.routing_key})", branch=branch.name ) - new_node_schema: Optional[Union[NodeSchema, GenericSchema]] = None - previous_node_schema: Optional[Union[NodeSchema, GenericSchema]] = None + new_node_schema: Optional[MainSchemaTypes] = None + previous_node_schema: Optional[MainSchemaTypes] = None if new_schema.has(name=migration.path.schema_kind): new_node_schema = new_schema.get(name=migration.path.schema_kind) diff --git a/backend/infrahub/core/migrations/shared.py b/backend/infrahub/core/migrations/shared.py index b347303ae2..ddba160974 100644 --- a/backend/infrahub/core/migrations/shared.py +++ b/backend/infrahub/core/migrations/shared.py @@ -1,15 +1,25 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Self +from infrahub.core import registry from infrahub.core.path import SchemaPath # noqa: TCH001 from infrahub.core.query import Query, QueryType -from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema, RelationshipSchema # noqa: TCH001 +from infrahub.core.schema import ( + AttributeSchema, + GenericSchema, + NodeSchema, + RelationshipSchema, + SchemaRoot, + internal_schema, +) if TYPE_CHECKING: from infrahub.core.branch import Branch + from infrahub.core.schema_manager import SchemaBranch from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase @@ -99,6 +109,10 @@ class GraphMigration(BaseModel): queries: Sequence[type[Query]] = Field(..., description="List of queries to execute for this migration") minimum_version: int = Field(..., description="Minimum version of the graph to execute this migration") + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + return cls(*args, **kwargs) # type: ignore[arg-type] + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: raise NotImplementedError @@ -117,6 +131,47 @@ async def execute(self, db: InfrahubDatabase) -> MigrationResult: return result +class InternalSchemaMigration(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + name: str = Field(..., description="Name of the migration") + migrations: Sequence[SchemaMigration] = Field(..., description="") + minimum_version: int = Field(..., description="Minimum version of the graph to execute this migration") + + @staticmethod + def get_internal_schema() -> SchemaBranch: + from infrahub.core.schema_manager import SchemaBranch # pylint: disable=import-outside-toplevel + + # load the internal schema from + schema = SchemaRoot(**internal_schema) + schema_branch = SchemaBranch(cache={}, name="default_branch") + schema_branch.load_schema(schema=schema) + schema_branch.process() + + return schema_branch + + @classmethod + def init(cls, *args: Any, **kwargs: Dict[str, Any]) -> Self: + return cls(*args, **kwargs) # type: ignore[arg-type] + + async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: + raise NotImplementedError + + async def execute(self, db: InfrahubDatabase) -> MigrationResult: + result = MigrationResult() + + default_branch = registry.get_branch_from_registry() + + for migration in self.migrations: + try: + execution_result = await migration.execute(db=db, branch=default_branch) + result.errors.extend(execution_result.errors) + except Exception as exc: # pylint: disable=broad-exception-caught + result.errors.append(str(exc)) + return result + + return result + + class MigrationQuery(Query): type: QueryType = QueryType.WRITE diff --git a/backend/infrahub/core/models.py b/backend/infrahub/core/models.py index bb1ee23e98..1508ce97f8 100644 --- a/backend/infrahub/core/models.py +++ b/backend/infrahub/core/models.py @@ -1,7 +1,7 @@ from __future__ import annotations import hashlib -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple from infrahub_sdk.utils import compare_lists, deep_merge_dict, duplicates, intersection from pydantic import BaseModel, ConfigDict, Field @@ -16,7 +16,7 @@ from infrahub.core.path import SchemaPath if TYPE_CHECKING: - from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema import MainSchemaTypes from infrahub.core.schema_manager import SchemaBranch @@ -76,6 +76,27 @@ def __add__(self, other: SchemaDiff) -> SchemaDiff: merged_dict = deep_merge_dict(self.model_dump(), other.model_dump()) return self.__class__(**merged_dict) + def print(self, indentation: int = 4, column_size: int = 32) -> None: + data = self.model_dump() + + indent_str = " " * indentation + + # pylint: disable=too-many-nested-blocks + for node_action, node_info in data.items(): + for node_name, elements in node_info.items(): + print(f"{str(node_name).ljust(column_size)} | {str(node_action).title()}") + for element_action, element_info in elements.items(): + for element_name, element_children in element_info.items(): + print( + f"{indent_str}{str(element_name).ljust(column_size - indentation)} | {str(element_action).title()}" + ) + if element_children and isinstance(element_children, dict): + for sub_action, sub_info in element_children.items(): + for sub_name, _ in sub_info.items(): + print( + f"{indent_str * 2}{str(sub_name).ljust(column_size - indentation * 2)} | {str(sub_action).title()}" + ) + class SchemaUpdateValidationError(BaseModel): model_config = ConfigDict(extra="forbid") @@ -155,7 +176,7 @@ def process_diff(self, schema: SchemaBranch) -> None: def _process_attrs_rels( self, - schema: Union[NodeSchema, GenericSchema], + schema: MainSchemaTypes, node_field_name: str, node_field_diff: HashableModelDiff, ) -> None: @@ -184,8 +205,8 @@ def _process_attrs_rels( for field_name, sub_field_diff in node_field_diff.changed.items(): field = schema.get_field(name=field_name) - if not sub_field_diff or not field: - raise ValueError("sub_field_diff and field must be defined, unexpected situation") + if not sub_field_diff: + raise ValueError("sub_field_diff must be defined, unexpected situation") for prop_name in sub_field_diff.changed: field_info = field.model_fields[prop_name] @@ -203,7 +224,7 @@ def _process_attrs_rels( field_update=field_update, ) - def _process_node_attributes(self, schema: Union[NodeSchema, GenericSchema], node_field_name: str) -> None: + def _process_node_attributes(self, schema: MainSchemaTypes, node_field_name: str) -> None: field_info = schema.model_fields[node_field_name] field_update = str(field_info.json_schema_extra.get("update")) # type: ignore[union-attr] diff --git a/backend/infrahub/core/node/__init__.py b/backend/infrahub/core/node/__init__.py index 80cebdd94b..9cfc7da040 100644 --- a/backend/infrahub/core/node/__init__.py +++ b/backend/infrahub/core/node/__init__.py @@ -13,7 +13,7 @@ NodeDeleteQuery, NodeGetListQuery, ) -from infrahub.core.schema import AttributeSchema, NodeSchema, RelationshipSchema +from infrahub.core.schema import AttributeSchema, NodeSchema, ProfileSchema, RelationshipSchema from infrahub.core.timestamp import Timestamp from infrahub.exceptions import InitializationError, ValidationError from infrahub.types import ATTRIBUTE_TYPES @@ -56,7 +56,7 @@ def __init_subclass_with_meta__( # pylint: disable=arguments-differ _meta.default_filter = default_filter super(Node, cls).__init_subclass_with_meta__(_meta=_meta, **options) - def get_schema(self) -> NodeSchema: + def get_schema(self) -> Union[NodeSchema, ProfileSchema]: return self._schema def get_kind(self) -> str: @@ -80,7 +80,11 @@ def get_labels(self) -> List[str]: self._schema.namespace not in ["Schema", "Internal"] and InfrahubKind.GENERICGROUP not in self._schema.inherit_from ): - labels.append("CoreNode") + labels.append(InfrahubKind.NODE) + return labels + + if isinstance(self._schema, ProfileSchema): + labels: List[str] = [self.get_kind()] + self._schema.inherit_from return labels return [self.get_kind()] @@ -104,11 +108,11 @@ def __repr__(self): def __init__( self, - schema: NodeSchema, + schema: Union[NodeSchema, ProfileSchema], branch: Branch, at: Timestamp, ): - self._schema: NodeSchema = schema + self._schema: Union[NodeSchema, ProfileSchema] = schema self._branch: Branch = branch self._at: Timestamp = at self._existing: bool = False @@ -128,7 +132,7 @@ def __init__( @classmethod async def init( cls, - schema: Union[NodeSchema, str], + schema: Union[NodeSchema, ProfileSchema, str], db: InfrahubDatabase, branch: Optional[Union[Branch, str]] = None, at: Optional[Union[Timestamp, str]] = None, @@ -137,13 +141,13 @@ async def init( branch = await registry.get_branch(branch=branch, db=db) - if isinstance(schema, NodeSchema): + if isinstance(schema, (NodeSchema, ProfileSchema)): attrs["schema"] = schema elif isinstance(schema, str): # TODO need to raise a proper exception for this, right now it will raise a generic ValueError attrs["schema"] = registry.schema.get(name=schema, branch=branch) else: - raise ValueError(f"Invalid schema provided {type(schema)}, expected NodeSchema") + raise ValueError(f"Invalid schema provided {type(schema)}, expected NodeSchema or ProfileSchema") attrs["branch"] = branch attrs["at"] = Timestamp(at) @@ -287,6 +291,7 @@ async def process_label(self, db: Optional[InfrahubDatabase] = None): # pylint: if not self.id and hasattr(self, "label") and hasattr(self, "name"): if self.label.value is None and self.name.value: self.label.value = " ".join([word.title() for word in self.name.value.split("_")]) + self.label.is_default = False async def new(self, db: InfrahubDatabase, id: Optional[str] = None, **kwargs) -> Self: if id and not is_valid_uuid(id): @@ -388,10 +393,6 @@ async def delete(self, db: InfrahubDatabase, at: Optional[Timestamp] = None): delete_at = Timestamp(at) - # Ensure the node can be safely deleted first TODO - # - Check if there is a relationship pointing to it that is mandatory - # - Check if some nodes must be deleted too CASCADE (TODO) - # Go over the list of Attribute and update them one by one for name in self._attributes: attr: BaseAttribute = getattr(self, name) diff --git a/backend/infrahub/core/node/constraints/attribute_uniqueness.py b/backend/infrahub/core/node/constraints/attribute_uniqueness.py index a17cacc87f..ac8ff47f92 100644 --- a/backend/infrahub/core/node/constraints/attribute_uniqueness.py +++ b/backend/infrahub/core/node/constraints/attribute_uniqueness.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, List, Optional from infrahub.core import registry from infrahub.core.branch import Branch @@ -10,7 +10,7 @@ from .interface import NodeConstraintInterface if TYPE_CHECKING: - from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema import MainSchemaTypes class NodeAttributeUniquenessConstraint(NodeConstraintInterface): @@ -25,7 +25,7 @@ async def check(self, node: Node, at: Optional[Timestamp] = None, filters: Optio if filters and unique_attr.name not in filters: continue - comparison_schema: Union[NodeSchema, GenericSchema] = node_schema + comparison_schema: MainSchemaTypes = node_schema attr = getattr(node, unique_attr.name) if unique_attr.inherited: for generic_parent_schema_name in node_schema.inherit_from: diff --git a/backend/infrahub/core/node/constraints/grouped_uniqueness.py b/backend/infrahub/core/node/constraints/grouped_uniqueness.py index 3b10ac4e6f..ab059c7427 100644 --- a/backend/infrahub/core/node/constraints/grouped_uniqueness.py +++ b/backend/infrahub/core/node/constraints/grouped_uniqueness.py @@ -1,9 +1,13 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Union +from typing import TYPE_CHECKING, Iterable, List, Optional, Set from infrahub.core import registry -from infrahub.core.schema import GenericSchema, NodeSchema, SchemaAttributePath, SchemaAttributePathValue +from infrahub.core.schema import ( + MainSchemaTypes, + SchemaAttributePath, + SchemaAttributePathValue, +) from infrahub.core.validators.uniqueness.index import UniquenessQueryResultsIndex from infrahub.core.validators.uniqueness.model import ( NodeUniquenessQueryRequest, @@ -33,7 +37,7 @@ def __init__(self, db: InfrahubDatabase, branch: Branch) -> None: def _build_query_request( self, updated_node: Node, - node_schema: Union[NodeSchema, GenericSchema], + node_schema: MainSchemaTypes, path_groups: List[List[SchemaAttributePath]], filters: Optional[List[str]] = None, ) -> NodeUniquenessQueryRequest: @@ -134,14 +138,16 @@ async def _check_results( async def _check_one_schema( self, node: Node, - node_schema: Union[NodeSchema, GenericSchema], + node_schema: MainSchemaTypes, at: Optional[Timestamp] = None, filters: Optional[List[str]] = None, ) -> None: - path_groups = node_schema.get_unique_constraint_schema_attribute_paths() + path_groups = node_schema.get_unique_constraint_schema_attribute_paths(branch=self.branch) query_request = self._build_query_request( updated_node=node, node_schema=node_schema, path_groups=path_groups, filters=filters ) + if not query_request: + return query = await NodeUniqueAttributeConstraintQuery.init( db=self.db, branch=self.branch, at=at, query_request=query_request, min_count_required=0 ) @@ -150,7 +156,7 @@ async def _check_one_schema( async def check(self, node: Node, at: Optional[Timestamp] = None, filters: Optional[List[str]] = None) -> None: node_schema = node.get_schema() - schemas_to_check: List[Union[NodeSchema, GenericSchema]] = [node_schema] + schemas_to_check: List[MainSchemaTypes] = [node_schema] if node_schema.inherit_from: for parent_schema_name in node_schema.inherit_from: parent_schema = self.schema_branch.get(name=parent_schema_name, duplicate=False) diff --git a/backend/infrahub/core/node/constraints/interface.py b/backend/infrahub/core/node/constraints/interface.py index 93e14b35ce..09c9ce7b21 100644 --- a/backend/infrahub/core/node/constraints/interface.py +++ b/backend/infrahub/core/node/constraints/interface.py @@ -7,5 +7,4 @@ class NodeConstraintInterface(ABC): @abstractmethod - async def check(self, node: Node, at: Optional[Timestamp] = None, filters: Optional[List[str]] = None) -> None: - ... + async def check(self, node: Node, at: Optional[Timestamp] = None, filters: Optional[List[str]] = None) -> None: ... diff --git a/backend/infrahub/core/node/delete_validator.py b/backend/infrahub/core/node/delete_validator.py new file mode 100644 index 0000000000..034599e1d7 --- /dev/null +++ b/backend/infrahub/core/node/delete_validator.py @@ -0,0 +1,189 @@ +from enum import Enum +from typing import Iterable, Optional, Union + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import RelationshipDeleteBehavior +from infrahub.core.node import Node +from infrahub.core.query.relationship import ( + FullRelationshipIdentifier, + RelationshipGetByIdentifierQuery, + RelationshipPeersData, +) +from infrahub.core.schema import MainSchemaTypes +from infrahub.core.timestamp import Timestamp +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import ValidationError + + +class DeleteRelationshipType(Enum): + CASCADE_DELETE = "cascade" + DEPENDENT_NODE = "dependent" + + +class NodeDeleteIndex: + def __init__(self, all_schemas_map: dict[str, MainSchemaTypes]) -> None: + self._all_schemas_map = all_schemas_map + # {node_schema: {DeleteRelationshipType: {relationship_identifier: peer_node_schema}}} + self._dependency_graph: dict[str, dict[DeleteRelationshipType, dict[str, str]]] = {} + + def index(self, start_schemas: Iterable[MainSchemaTypes]) -> None: + self._index_cascading_deletes(start_schemas=start_schemas) + self._index_dependent_schema(start_schemas=start_schemas) + + def _add_to_dependency_graph( + self, kind: str, relationship_type: DeleteRelationshipType, relationship_identifier: str, peer_kind: str + ) -> None: + if kind not in self._dependency_graph: + self._dependency_graph[kind] = {} + if relationship_type not in self._dependency_graph[kind]: + self._dependency_graph[kind][relationship_type] = {} + self._dependency_graph[kind][relationship_type][relationship_identifier] = peer_kind + + def _index_cascading_deletes(self, start_schemas: Iterable[MainSchemaTypes]) -> None: + kinds_to_check: set[str] = {schema.kind for schema in start_schemas} + while True: + try: + kind_to_check = kinds_to_check.pop() + except KeyError: + break + node_schema = self._all_schemas_map[kind_to_check] + for relationship_schema in node_schema.relationships: + if relationship_schema.on_delete != RelationshipDeleteBehavior.CASCADE: + continue + self._add_to_dependency_graph( + kind=kind_to_check, + relationship_type=DeleteRelationshipType.CASCADE_DELETE, + relationship_identifier=relationship_schema.get_identifier(), + peer_kind=relationship_schema.peer, + ) + if relationship_schema.peer not in self._dependency_graph: + kinds_to_check.add(relationship_schema.peer) + + def _index_dependent_schema(self, start_schemas: Iterable[MainSchemaTypes]) -> None: + start_schema_kinds = {schema.kind for schema in start_schemas} + for node_schema in self._all_schemas_map.values(): + for relationship_schema in node_schema.relationships: + if relationship_schema.optional is True or relationship_schema.peer not in start_schema_kinds: + continue + self._add_to_dependency_graph( + kind=relationship_schema.peer, + relationship_type=DeleteRelationshipType.DEPENDENT_NODE, + relationship_identifier=relationship_schema.get_identifier(), + peer_kind=node_schema.kind, + ) + + def get_relationship_identifiers(self) -> list[FullRelationshipIdentifier]: + full_relationship_identifiers = [] + for node_kind, relationship_type_details in self._dependency_graph.items(): + for relationship_map in relationship_type_details.values(): + for relationship_identifier, peer_kind in relationship_map.items(): + full_relationship_identifiers.append( + FullRelationshipIdentifier( + source_kind=node_kind, identifier=relationship_identifier, destination_kind=peer_kind + ) + ) + return full_relationship_identifiers + + def get_relationship_types(self, src_kind: str, relationship_identifier: str) -> set[DeleteRelationshipType]: + relationship_types: set[DeleteRelationshipType] = set() + if src_kind not in self._dependency_graph: + return relationship_types + for relationship_type, relationships_map in self._dependency_graph[src_kind].items(): + if relationship_identifier in relationships_map: + relationship_types.add(relationship_type) + return relationship_types + + +class NodeDeleteValidator: + def __init__(self, db: InfrahubDatabase, branch: Branch): + self.db = db + self.branch = branch + schema_branch = registry.schema.get_schema_branch(name=self.branch.name) + self._all_schemas_map = schema_branch.get_all(duplicate=False) + self.index: NodeDeleteIndex = NodeDeleteIndex(all_schemas_map=self._all_schemas_map) + + async def get_ids_to_delete(self, nodes: Iterable[Node], at: Optional[Union[Timestamp, str]] = None) -> set[str]: + start_schemas = {node.get_schema() for node in nodes} + self.index.index(start_schemas=start_schemas) + at = Timestamp(at) + + return await self._analyze_delete_dependencies(start_nodes=nodes, at=at) + + async def _analyze_delete_dependencies( + self, start_nodes: Iterable[Node], at: Optional[Union[Timestamp, str]] + ) -> set[str]: + full_relationship_identifiers = self.index.get_relationship_identifiers() + if not full_relationship_identifiers: + return {node.get_id() for node in start_nodes} + + query = await RelationshipGetByIdentifierQuery.init( + db=self.db, full_identifiers=full_relationship_identifiers, branch=self.branch, at=at + ) + await query.execute(db=self.db) + + peer_data_by_source_id = self._build_peer_data_map(peers_datas=query.get_peers()) + node_ids_to_check = {node.get_id() for node in start_nodes} + node_ids_to_delete: set[str] = set() + dependent_node_details_map: dict[str, list[RelationshipPeersData]] = {} + + while node_ids_to_check: + node_id = node_ids_to_check.pop() + node_ids_to_delete.add(node_id) + if node_id not in peer_data_by_source_id: + continue + peer_data_list = peer_data_by_source_id[node_id] + for peer_data in peer_data_list: + relationship_types = self.index.get_relationship_types( + src_kind=peer_data.source_kind, relationship_identifier=peer_data.identifier + ) + peer_id = str(peer_data.destination_id) + if DeleteRelationshipType.CASCADE_DELETE in relationship_types: + if peer_id not in node_ids_to_delete: + node_ids_to_check.add(peer_id) + if DeleteRelationshipType.DEPENDENT_NODE in relationship_types: + if peer_id not in dependent_node_details_map: + dependent_node_details_map[peer_id] = [] + dependent_node_details_map[peer_id].append(peer_data) + + missing_delete_ids = set(dependent_node_details_map.keys()) - node_ids_to_delete + if not missing_delete_ids: + return node_ids_to_delete + missing_delete_peers_data = [] + for peers_data_list in dependent_node_details_map.values(): + missing_delete_peers_data.extend(peers_data_list) + validation_error = self._build_validation_error(missing_delete_peers_data=missing_delete_peers_data) + raise validation_error + + def _build_peer_data_map( + self, peers_datas: Iterable[RelationshipPeersData] + ) -> dict[str, list[RelationshipPeersData]]: + peer_data_by_source_id: dict[str, list[RelationshipPeersData]] = {} + for peer_data in peers_datas: + source_id = str(peer_data.source_id) + if source_id not in peer_data_by_source_id: + peer_data_by_source_id[source_id] = [] + peer_data_by_source_id[source_id].append(peer_data) + # check if this relationship also needs to be tracked going the other way + if not self.index.get_relationship_types( + src_kind=peer_data.destination_kind, relationship_identifier=peer_data.identifier + ): + continue + dest_id = str(peer_data.destination_id) + if dest_id not in peer_data_by_source_id: + peer_data_by_source_id[dest_id] = [] + peer_data_by_source_id[dest_id].append(peer_data.reversed()) + return peer_data_by_source_id + + def _build_validation_error(self, missing_delete_peers_data: Iterable[RelationshipPeersData]) -> ValidationError: + validation_errors = [] + for peers_data in missing_delete_peers_data: + peer_kind = peers_data.destination_kind + peer_schema = self._all_schemas_map[peer_kind] + peer_rel_name = peer_schema.get_relationship_by_identifier(peers_data.identifier).name + peer_path = f"{peer_kind}.{peer_rel_name}" + err_msg = f"Cannot delete {peers_data.source_kind} '{peers_data.source_id}'." + err_msg += f" It is linked to mandatory relationship {peer_rel_name} on node {peer_kind} '{peers_data.destination_id}'" + validation_errors.append(ValidationError({peer_path: err_msg})) + + return ValidationError(validation_errors) diff --git a/backend/infrahub/core/node/ipam.py b/backend/infrahub/core/node/ipam.py new file mode 100644 index 0000000000..7638731793 --- /dev/null +++ b/backend/infrahub/core/node/ipam.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + +from infrahub.core.query.ipam import get_utilization + +from . import Node + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class BuiltinIPPrefix(Node): + async def to_graphql( + self, + db: InfrahubDatabase, + fields: Optional[dict] = None, + related_node_ids: Optional[set] = None, + filter_sensitive: bool = False, + ) -> dict: + response = await super().to_graphql( + db, fields=fields, related_node_ids=related_node_ids, filter_sensitive=filter_sensitive + ) + + if fields: + for read_only_attr in ["netmask", "hostmask", "network_address", "broadcast_address"]: + if read_only_attr in fields: + response[read_only_attr] = {"value": getattr(self.prefix, read_only_attr)} # type: ignore[attr-defined] + + if "utilization" in fields: + utilization = await get_utilization(self, db, branch=self._branch) + response["utilization"] = {"value": int(utilization)} + + return response diff --git a/backend/infrahub/core/node/standard.py b/backend/infrahub/core/node/standard.py index 265be7291d..250eee50fc 100644 --- a/backend/infrahub/core/node/standard.py +++ b/backend/infrahub/core/node/standard.py @@ -8,6 +8,7 @@ from infrahub_sdk import UUIDT from pydantic import BaseModel +from infrahub.core.constants import NULL_VALUE from infrahub.core.query.standard_node import ( StandardNodeCreateQuery, StandardNodeDeleteQuery, @@ -165,7 +166,7 @@ def from_db(cls, node: Neo4jNode, extras: Optional[Dict[str, Any]] = None) -> Se field_type = cls.guess_field_type(cls.model_fields[key]) - if value == "NULL": + if value == NULL_VALUE: attrs[key] = None elif issubclass(field_type, (int, float, bool, str, UUID)): attrs[key] = value @@ -190,7 +191,7 @@ def to_db(self) -> Dict[str, Any]: field_type = self.guess_field_type(field) if attr_value is None: - data[attr_name] = "NULL" + data[attr_name] = NULL_VALUE elif inspect.isclass(field_type) and issubclass(field_type, BaseModel): if isinstance(attr_value, list): clean_value = [item.dict() for item in attr_value] diff --git a/backend/infrahub/core/path.py b/backend/infrahub/core/path.py index dc729a83ec..615e8b75f8 100644 --- a/backend/infrahub/core/path.py +++ b/backend/infrahub/core/path.py @@ -144,7 +144,7 @@ def init( path_type = SchemaPathType.NODE if field_name: field = schema.get_field(name=field_name) - path_type = SchemaPathType.ATTRIBUTE if field and field.is_attribute else SchemaPathType.RELATIONSHIP + path_type = SchemaPathType.ATTRIBUTE if field.is_attribute else SchemaPathType.RELATIONSHIP if field_name and property_name and not hasattr(schema.get_field(name=field_name), property_name): raise ValueError(f"Property {property_name} is not valid for {schema.kind}:{field_name}") diff --git a/backend/infrahub/core/property.py b/backend/infrahub/core/property.py index 7d6cbffe0c..3bf991e559 100644 --- a/backend/infrahub/core/property.py +++ b/backend/infrahub/core/property.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List, Union +from typing import TYPE_CHECKING, List, Optional, Union from uuid import UUID from pydantic.v1 import BaseModel @@ -9,7 +9,9 @@ from infrahub.core.registry import registry if TYPE_CHECKING: + from infrahub.core.branch import Branch from infrahub.core.node import Node + from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase @@ -30,7 +32,7 @@ class FlagPropertyMixin: is_visible = True is_protected = False - def _init_flag_property_mixin(self, kwargs: dict = None) -> None: + def _init_flag_property_mixin(self, kwargs: Optional[dict] = None) -> None: if not kwargs: return @@ -42,7 +44,10 @@ def _init_flag_property_mixin(self, kwargs: dict = None) -> None: class NodePropertyMixin: _node_properties: List[str] = [v.value for v in NodeProperty] - def _init_node_property_mixin(self, kwargs: dict = None) -> None: + branch: Branch + at: Timestamp + + def _init_node_property_mixin(self, kwargs: Optional[dict] = None) -> None: for node in self._node_properties: setattr(self, f"_{node}", None) setattr(self, f"{node}_id", None) @@ -57,35 +62,41 @@ def _init_node_property_mixin(self, kwargs: dict = None) -> None: setattr(self, f"{node}_id", kwargs.get(f"{node}_id")) @property - def source(self): + def source(self) -> Node: return self._get_node_property_from_cache(name="source") @source.setter - def source(self, value): + def source(self, value: Union[str, Node, UUID]) -> None: self._set_node_property(name="source", value=value) @property - def owner(self): + def owner(self) -> Node: return self._get_node_property_from_cache(name="owner") @owner.setter - def owner(self, value): + def owner(self, value: Optional[Union[str, Node, UUID]]) -> None: self._set_node_property(name="owner", value=value) - async def get_source(self, db: InfrahubDatabase): + def clear_owner(self) -> None: + self._set_node_property(name="owner", value=None) + + async def get_source(self, db: InfrahubDatabase) -> Optional[Node]: return await self._get_node_property(name="source", db=db) - def set_source(self, value) -> None: + def clear_source(self) -> None: + self._set_node_property(name="source", value=None) + + def set_source(self, value: Union[str, Node, UUID]) -> None: self._set_node_property(name="source", value=value) - async def get_owner(self, db: InfrahubDatabase): + async def get_owner(self, db: InfrahubDatabase) -> Optional[Node]: return await self._get_node_property(name="owner", db=db) - def set_owner(self, value): + def set_owner(self, value: Union[str, Node, UUID]) -> None: self._set_node_property(name="owner", value=value) def _get_node_property_from_cache(self, name: str) -> Node: - """Return the node attribute if it's alraedy present locally, + """Return the node attribute if it's already present locally, Otherwise raise an exception """ item = getattr(self, f"_{name}", None) @@ -96,7 +107,7 @@ def _get_node_property_from_cache(self, name: str) -> Node: return item - async def _get_node_property(self, db: InfrahubDatabase, name: str) -> Node: + async def _get_node_property(self, db: InfrahubDatabase, name: str) -> Optional[Node]: """Return the node attribute. If the node is already present in cache, serve from the cache If the node is not present, query it on the fly using the node_id @@ -106,7 +117,7 @@ async def _get_node_property(self, db: InfrahubDatabase, name: str) -> Node: return getattr(self, f"_{name}", None) - def _set_node_property(self, name: str, value: Union[str, Node, UUID]) -> None: + def _set_node_property(self, name: str, value: Optional[Union[str, Node, UUID]]) -> None: """Set the value of the node_property. If the value is a string, we assume it's an ID and we'll save it to query it later (if needed) If the value is a Node, we save the node and we extract the ID @@ -120,7 +131,7 @@ def _set_node_property(self, name: str, value: Union[str, Node, UUID]) -> None: setattr(self, f"_{name}", None) elif hasattr(value, "_schema"): setattr(self, f"_{name}", value) - setattr(self, f"{name}_id", value.id) + setattr(self, f"{name}_id", getattr(value, "id", None)) elif value is None: setattr(self, f"_{name}", None) setattr(self, f"{name}_id", None) diff --git a/backend/infrahub/core/query/__init__.py b/backend/infrahub/core/query/__init__.py index 1e9868be99..ee90d7b76e 100644 --- a/backend/infrahub/core/query/__init__.py +++ b/backend/infrahub/core/query/__init__.py @@ -1,18 +1,19 @@ from __future__ import annotations -import json from abc import ABC, abstractmethod from collections import defaultdict from dataclasses import dataclass, field from enum import Enum from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Union +import ujson from neo4j.graph import Node as Neo4jNode from neo4j.graph import Relationship as Neo4jRelationship from infrahub import config from infrahub.core.constants import PermissionLevel from infrahub.core.timestamp import Timestamp +from infrahub.database.constants import DatabaseType, Neo4jRuntime from infrahub.exceptions import QueryError if TYPE_CHECKING: @@ -109,9 +110,9 @@ class QueryRel(QueryElement): length_max: Optional[int] = None def __str__(self): - lenght_str = "" + length_str = "" if self.length_max: - lenght_str = "*%s..%s" % ( + length_str = "*%s..%s" % ( self.length_min, self.length_max, ) @@ -119,7 +120,7 @@ def __str__(self): main_str = "[%s%s%s%s]" % ( self.name or "", self.labels_as_str, - lenght_str, + length_str, self.params_as_str, ) @@ -380,7 +381,11 @@ def add_subquery(self, subquery: str, with_clause: Optional[str] = None) -> None self.add_to_query(f"WITH {with_clause}") def get_query( - self, var: bool = False, inline: bool = False, limit: Optional[int] = None, offset: Optional[int] = None + self, + var: bool = False, + inline: bool = False, + limit: Optional[int] = None, + offset: Optional[int] = None, ) -> str: # Make a local copy of the _query_lines limit = limit or self.limit @@ -439,7 +444,7 @@ def prep_value(v): def get_params_for_shell(self): if config.SETTINGS.database.db_type.value == "memgraph": - return json.dumps(self.params) + return ujson.dumps(self.params) return self._get_params_for_neo4j_shell() @@ -460,22 +465,32 @@ def _get_params_for_neo4j_shell(self): return ":params { " + ", ".join(params) + " }" - async def execute(self, db: InfrahubDatabase) -> Self: + async def execute( + self, db: InfrahubDatabase, profile: bool = False, runtime: Neo4jRuntime = Neo4jRuntime.DEFAULT + ) -> Self: # Ensure all mandatory params have been provided # Ensure at least 1 return obj has been defined if config.SETTINGS.miscellaneous.print_query_details: self.print(include_var=True) + query_str = self.get_query() + + if profile: + query_str = "PROFILE\n" + query_str + + if runtime != Neo4jRuntime.DEFAULT and db.db_type == DatabaseType.NEO4J: + query_str = f"CYPHER runtime={runtime.value}\n" + query_str + if self.type == QueryType.READ: if self.limit or self.offset: - results = await db.execute_query(query=self.get_query(), params=self.params, name=self.name) + results = await db.execute_query(query=query_str, params=self.params, name=self.name) else: results = await self.query_with_size_limit(db=db) elif self.type == QueryType.WRITE: results, metadata = await db.execute_query_with_metadata( - query=self.get_query(), params=self.params, name=self.name + query=query_str, params=self.params, name=self.name ) if "stats" in metadata: self.stats.add(metadata.get("stats")) @@ -483,7 +498,7 @@ async def execute(self, db: InfrahubDatabase) -> Self: raise ValueError(f"unknown value for {self.type}") if not results and self.raise_error_if_empty: - raise QueryError(self.get_query(), self.params) + raise QueryError(query_str, self.params) self.results = [QueryResult(data=result, labels=self.return_labels) for result in results] self.has_been_executed = True diff --git a/backend/infrahub/core/query/attribute.py b/backend/infrahub/core/query/attribute.py index bf12f45560..4e350a172d 100644 --- a/backend/infrahub/core/query/attribute.py +++ b/backend/infrahub/core/query/attribute.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from infrahub.core.constants import AttributeDBNodeType from infrahub.core.constants.relationship_label import RELATIONSHIP_TO_NODE_LABEL, RELATIONSHIP_TO_VALUE_LABEL from infrahub.core.constants.schema import FlagProperty, NodeProperty from infrahub.core.query import Query, QueryNode, QueryRel, QueryType @@ -53,16 +54,23 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["branch"] = self.branch.name self.params["branch_level"] = self.branch.hierarchy_level self.params["at"] = at.to_string() - self.params["value"] = self.attr.to_db() + content = self.attr.to_db() + self.params.update(self.attr.to_db()) - query = ( - """ - MATCH (a { uuid: $attr_uuid }) - MERGE (av:AttributeValue { value: $value }) - CREATE (a)-[r:%s { branch: $branch, branch_level: $branch_level, status: "active", from: $at, to: null }]->(av) - """ - % self.attr._rel_to_value_label - ) + prop_list = [f"{key}: ${key}" for key in content.keys()] + + labels = ["AttributeValue"] + node_type = self.attr.get_db_node_type() + if node_type == AttributeDBNodeType.IPHOST: + labels.append("AttributeIPHost") + elif node_type == AttributeDBNodeType.IPNETWORK: + labels.append("AttributeIPNetwork") + + query = """ + MATCH (a:Attribute { uuid: $attr_uuid }) + MERGE (av:%(labels)s { %(props)s } ) + CREATE (a)-[r:%(rel_label)s { branch: $branch, branch_level: $branch_level, status: "active", from: $at, to: null }]->(av) + """ % {"rel_label": self.attr._rel_to_value_label, "labels": ":".join(labels), "props": ", ".join(prop_list)} self.add_to_query(query) self.return_labels = ["a", "av", "r"] @@ -100,7 +108,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["flag_type"] = self.attr.get_kind() query = """ - MATCH (a { uuid: $attr_uuid }) + MATCH (a:Attribute { uuid: $attr_uuid }) MERGE (flag:Boolean { value: $flag_value }) CREATE (a)-[r:%s { branch: $branch, branch_level: $branch_level, status: "active", from: $at, to: null }]->(flag) """ % self.flag_name.upper() @@ -141,8 +149,8 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): query = ( """ - MATCH (a { uuid: $attr_uuid }) - MATCH (np { uuid: $prop_id }) + MATCH (a:Attribute { uuid: $attr_uuid }) + MATCH (np:Node { uuid: $prop_id }) CREATE (a)-[r:%s { branch: $branch, branch_level: $branch_level, status: "active", from: $at, to: null }]->(np) """ % rel_name @@ -170,7 +178,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): """ MATCH (a:Attribute { uuid: $attr_uuid }) MATCH p = ((a)-[r2:HAS_VALUE|IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER]->(ap)) - WHERE all(r IN relationships(p) WHERE ( %s)) + WHERE all(r IN relationships(p) WHERE ( %s )) """ % rels_filter ) @@ -180,7 +188,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.return_labels = ["a", "ap", "r2"] -async def default_attribute_query_filter( # pylint: disable=unused-argument,disable=too-many-branches +async def default_attribute_query_filter( # pylint: disable=unused-argument,too-many-branches,too-many-statements name: str, filter_name: str, branch: Optional[Branch] = None, @@ -189,6 +197,7 @@ async def default_attribute_query_filter( # pylint: disable=unused-argument,dis param_prefix: Optional[str] = None, db: Optional[InfrahubDatabase] = None, partial_match: bool = False, + support_profiles: bool = False, ) -> Tuple[List[QueryElement], Dict[str, Any], List[str]]: """Generate Query String Snippet to filter the right node.""" @@ -215,7 +224,7 @@ async def default_attribute_query_filter( # pylint: disable=unused-argument,dis query_filter.append(QueryNode(name="i", labels=["Attribute"], params={"name": f"${param_prefix}_name"})) query_params[f"{param_prefix}_name"] = name - if filter_name == "value": + if filter_name in ("value", "binary_address"): query_filter.append(QueryRel(labels=[RELATIONSHIP_TO_VALUE_LABEL])) if filter_value is None: @@ -223,20 +232,41 @@ async def default_attribute_query_filter( # pylint: disable=unused-argument,dis else: if partial_match: query_filter.append(QueryNode(name="av", labels=["AttributeValue"])) - query_where.append(f"toString(av.value) CONTAINS toString(${param_prefix}_value)") + query_where.append( + f"toLower(toString(av.{filter_name})) CONTAINS toLower(toString(${param_prefix}_{filter_name}))" + ) + elif support_profiles: + query_filter.append(QueryNode(name="av", labels=["AttributeValue"])) + query_where.append(f"(av.{filter_name} = ${param_prefix}_{filter_name} OR av.is_default)") else: query_filter.append( - QueryNode(name="av", labels=["AttributeValue"], params={"value": f"${param_prefix}_value"}) + QueryNode( + name="av", labels=["AttributeValue"], params={filter_name: f"${param_prefix}_{filter_name}"} + ) ) - query_params[f"{param_prefix}_value"] = filter_value + query_params[f"{param_prefix}_{filter_name}"] = filter_value elif filter_name == "values" and isinstance(filter_value, list): query_filter.extend( (QueryRel(labels=[RELATIONSHIP_TO_VALUE_LABEL]), QueryNode(name="av", labels=["AttributeValue"])) ) - query_where.append(f"av.value IN ${param_prefix}_value") + if support_profiles: + query_where.append(f"(av.value IN ${param_prefix}_value OR av.is_default)") + else: + query_where.append(f"av.value IN ${param_prefix}_value") query_params[f"{param_prefix}_value"] = filter_value + elif filter_name == "version": + query_filter.append(QueryRel(labels=[RELATIONSHIP_TO_VALUE_LABEL])) + + if filter_value is None: + query_filter.append(QueryNode(name="av", labels=["AttributeValue"])) + else: + query_filter.append( + QueryNode(name="av", labels=["AttributeValue"], params={filter_name: f"${param_prefix}_{filter_name}"}) + ) + query_params[f"{param_prefix}_{filter_name}"] = filter_value + elif filter_name in [v.value for v in FlagProperty] and filter_value is not None: query_filter.append(QueryRel(labels=[filter_name.upper()])) query_filter.append( @@ -260,7 +290,7 @@ async def default_attribute_query_filter( # pylint: disable=unused-argument,dis query_filter.extend( [ QueryRel(labels=[f"HAS_{property_name.upper()}"]), - QueryNode(name="ap", labels=["CoreNode"], params={"uuid": f"${param_prefix}_{clean_filter_name}"}), + QueryNode(name="ap", labels=["Node"], params={"uuid": f"${param_prefix}_{clean_filter_name}"}), ] ) query_params[f"{param_prefix}_{clean_filter_name}"] = filter_value diff --git a/backend/infrahub/core/query/diff.py b/backend/infrahub/core/query/diff.py index f292daff92..b5877a6f15 100644 --- a/backend/infrahub/core/query/diff.py +++ b/backend/infrahub/core/query/diff.py @@ -238,6 +238,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): AND ((r.to >= $diff_from AND r.to <= $diff_to) OR r.to is NULL)) ) ) + AND sn <> dn RETURN rel as rel1, sn as sn1, dn as dn1, r1 as r11, r2 as r21 ORDER BY r1.branch_level DESC, r1.from DESC LIMIT 1 @@ -278,6 +279,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): MATCH p = ((sn:Node)-[r1]-(rel)-[r2]-(dn:Node)) WHERE r1.branch = r2.branch AND (r1.to = r2.to OR (r1.to is NULL AND r2.to is NULL)) AND r1.from = r2.from AND r1.status = r2.status AND all(r IN relationships(p) WHERE ( %s )) + AND sn <> dn RETURN rel as rel1, sn as sn1, dn as dn1, r1 as r11, r2 as r21 ORDER BY r1.branch_level DESC, r1.from DESC LIMIT 1 @@ -379,7 +381,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params.update(rels_params) query = """ - MATCH (a) WHERE a.uuid IN $ids + MATCH (a:Attribute) WHERE a.uuid IN $ids MATCH (a)-[r:IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER|HAS_VALUE]-(ap) WHERE %s """ % ("\n AND ".join(rels_filter),) @@ -435,7 +437,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): # TODO Compute the list of potential relationship dynamically in the future based on the class query = """ - MATCH (rl) WHERE rl.uuid IN $ids + MATCH (rl:Relationship) WHERE rl.uuid IN $ids MATCH (rl)-[r:IS_VISIBLE|IS_PROTECTED|HAS_SOURCE|HAS_OWNER]-(rp) WHERE %s """ % ("\n AND ".join(rels_filter),) diff --git a/backend/infrahub/core/query/ipam.py b/backend/infrahub/core/query/ipam.py new file mode 100644 index 0000000000..d48b4d32ab --- /dev/null +++ b/backend/infrahub/core/query/ipam.py @@ -0,0 +1,706 @@ +from __future__ import annotations + +import ipaddress +from dataclasses import dataclass +from typing import TYPE_CHECKING, Iterable, List, Optional, Union + +from infrahub.core.constants import InfrahubKind +from infrahub.core.ipam.constants import AllIPTypes, IPAddressType, IPNetworkType +from infrahub.core.registry import registry +from infrahub.core.utils import convert_ip_to_binary_str + +from . import Query + +if TYPE_CHECKING: + from uuid import UUID + + from infrahub.core.branch import Branch + from infrahub.core.node import Node + from infrahub.core.timestamp import Timestamp + from infrahub.database import InfrahubDatabase + + +@dataclass +class IPPrefixData: + id: UUID + prefix: IPNetworkType + + +@dataclass +class IPAddressData: + id: UUID + address: IPAddressType + + +def _get_namespace_id( + namespace: Optional[Union[Node, str]] = None, +) -> str: + if namespace and isinstance(namespace, str): + return namespace + if namespace and hasattr(namespace, "id"): + return namespace.id + return registry.default_ipnamespace + + +class IPPrefixSubnetFetch(Query): + name: str = "ipprefix_subnet_fetch" + + def __init__( + self, + obj: IPNetworkType, + namespace: Optional[Union[Node, str]] = None, + *args, + **kwargs, + ): + self.obj = obj + self.namespace_id = _get_namespace_id(namespace) + + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + self.params["ns_id"] = self.namespace_id + + prefix_bin = convert_ip_to_binary_str(self.obj)[: self.obj.prefixlen] + self.params["prefix_binary"] = prefix_bin + self.params["maxprefixlen"] = self.obj.prefixlen + self.params["ip_version"] = self.obj.version + + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + # ruff: noqa: E501 + query = """ + // First match on IPNAMESPACE + MATCH (ns:%(ns_label)s) + WHERE ns.uuid = $ns_id + CALL { + WITH ns + MATCH (ns)-[r:IS_PART_OF]-(root:Root) + WHERE %(branch_filter)s + RETURN ns as ns1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH ns, r1 as r + WHERE r.status = "active" + WITH ns + // MATCH all prefixes that are IN SCOPE + MATCH path2 = (ns)-[:IS_RELATED]-(ns_rel:Relationship)-[:IS_RELATED]-(pfx:%(node_label)s)-[:HAS_ATTRIBUTE]-(an:Attribute {name: "prefix"})-[:HAS_VALUE]-(av:AttributeIPNetwork) + WHERE ns_rel.name = "ip_namespace__ip_prefix" + AND av.binary_address STARTS WITH $prefix_binary + AND av.prefixlen > $maxprefixlen + AND av.version = $ip_version + AND all(r IN relationships(path2) WHERE (%(branch_filter)s)) + // TODO Need to check for delete nodes + WITH + collect([pfx, av]) as all_prefixes_and_value, + collect(pfx) as all_prefixes + // --- + // FIND ALL CHILDREN OF THESE PREFIXES + // --- + CALL { + WITH all_prefixes + UNWIND all_prefixes as prefix + OPTIONAL MATCH (prefix)<-[:IS_RELATED]-(ch_rel:Relationship)<-[:IS_RELATED]-(children:BuiltinIPPrefix) + WHERE ch_rel.name = "parent__child" + RETURN children + } + WITH collect( distinct children ) AS all_children, all_prefixes_and_value + UNWIND all_prefixes_and_value as prefixes_to_check + WITH prefixes_to_check, all_children + WHERE not prefixes_to_check[0] in all_children + """ % { + "ns_label": InfrahubKind.IPNAMESPACE, + "node_label": InfrahubKind.IPPREFIX, + "branch_filter": branch_filter, + } + + self.add_to_query(query) + self.return_labels = ["prefixes_to_check[0] as pfx", "prefixes_to_check[1] as av"] + self.order_by = ["av.binary_address"] + + def get_subnets(self): + """Return a list of all subnets fitting in the prefix.""" + subnets: List[IPPrefixData] = [] + + for result in self.get_results(): + subnet = IPPrefixData( + id=result.get("pfx").get("uuid"), prefix=ipaddress.ip_network(result.get("av").get("value")) + ) + subnets.append(subnet) + + return subnets + + +class IPPrefixContainerFetch(Query): + name: str = "ipprefix_container_fetch" + + def __init__( + self, + obj: Union[ipaddress.IPv6Network, ipaddress.IPv4Network, ipaddress.IPv4Interface, ipaddress.IPv6Interface], + namespace: Optional[Union[Node, str]] = None, + *args, + **kwargs, + ): + self.obj = obj + self.namespace_id = _get_namespace_id(namespace) + + if isinstance(obj, (ipaddress.IPv6Network, ipaddress.IPv4Network)): + self.prefixlen = obj.prefixlen + self.minprefixlen = obj.prefixlen + elif isinstance(obj, (ipaddress.IPv4Interface, ipaddress.IPv6Interface)): + self.prefixlen = obj.network.prefixlen + self.minprefixlen = self.prefixlen + 1 + + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + self.params["ns_id"] = self.namespace_id + prefix_bin = convert_ip_to_binary_str(self.obj)[: self.prefixlen] + + self.params["minprefixlen"] = self.minprefixlen + self.params["ip_version"] = self.obj.version + + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + possible_prefixes = set() + for idx in range(1, self.prefixlen): + tmp_prefix = prefix_bin[: self.prefixlen - idx] + padding = "0" * (self.obj.max_prefixlen - len(tmp_prefix)) + possible_prefixes.add(f"{tmp_prefix}{padding}") + + self.params["possible_prefixes"] = list(possible_prefixes) + + # ruff: noqa: E501 + query = """ + // First match on IPNAMESPACE + MATCH (ns:%(ns_label)s) + WHERE ns.uuid = $ns_id + CALL { + WITH ns + MATCH (ns)-[r:IS_PART_OF]-(root:Root) + WHERE %(branch_filter)s + RETURN ns as ns1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH ns, r1 as r + WHERE r.status = "active" + WITH ns + // MATCH all prefixes that are IN SCOPE + MATCH path2 = (ns)-[:IS_RELATED]-(ns_rel:Relationship)-[:IS_RELATED]-(pfx:%(node_label)s)-[:HAS_ATTRIBUTE]-(an:Attribute {name: "prefix"})-[:HAS_VALUE]-(av:AttributeIPNetwork) + WHERE ns_rel.name = "ip_namespace__ip_prefix" + AND av.binary_address IN $possible_prefixes + AND av.prefixlen < $minprefixlen + AND av.version = $ip_version + AND all(r IN relationships(path2) WHERE (%(branch_filter)s)) + """ % { + "ns_label": InfrahubKind.IPNAMESPACE, + "node_label": InfrahubKind.IPPREFIX, + "branch_filter": branch_filter, + } + + self.add_to_query(query) + self.return_labels = ["pfx", "av"] + self.order_by = ["av.prefixlen"] + + def get_container(self) -> Optional[IPPrefixData]: + """Return the more specific prefix that contains this one.""" + candidates: List[IPPrefixData] = [] + + if not self.num_of_results: + return None + + for result in self.get_results(): + candidate = IPPrefixData( + id=result.get("pfx").get("uuid"), prefix=ipaddress.ip_network(result.get("av").get("value")) + ) + candidates.append(candidate) + return candidates[-1] + + +class IPPrefixIPAddressFetch(Query): + name: str = "ipprefix_ipaddress_fetch" + + def __init__( + self, + obj: IPNetworkType, + namespace: Optional[Union[Node, str]] = None, + *args, + **kwargs, + ): + self.obj = obj + self.namespace_id = _get_namespace_id(namespace) + + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + self.params["ns_id"] = self.namespace_id + + prefix_bin = convert_ip_to_binary_str(self.obj)[: self.obj.prefixlen] + self.params["prefix_binary"] = prefix_bin + self.params["maxprefixlen"] = self.obj.prefixlen + self.params["ip_version"] = self.obj.version + + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + # ruff: noqa: E501 + query = """ + // First match on IPNAMESPACE + MATCH (ns:%(ns_label)s) + WHERE ns.uuid = $ns_id + CALL { + WITH ns + MATCH (ns)-[r:IS_PART_OF]-(root:Root) + WHERE %(branch_filter)s + RETURN ns as ns1, r as r1 + ORDER BY r.branch_level DESC, r.from DESC + LIMIT 1 + } + WITH ns, r1 as r + WHERE r.status = "active" + WITH ns + // MATCH all IPAddress that are IN SCOPE + MATCH path2 = (ns)-[:IS_RELATED]-(ns_rel:Relationship)-[:IS_RELATED]-(addr:%(node_label)s)-[:HAS_ATTRIBUTE]-(an:Attribute {name: "address"})-[:HAS_VALUE]-(av:AttributeIPHost) + WHERE ns_rel.name = "ip_namespace__ip_address" + AND av.binary_address STARTS WITH $prefix_binary + AND av.prefixlen >= $maxprefixlen + AND av.version = $ip_version + AND all(r IN relationships(path2) WHERE (%(branch_filter)s)) + """ % { + "ns_label": InfrahubKind.IPNAMESPACE, + "node_label": InfrahubKind.IPADDRESS, + "branch_filter": branch_filter, + } + + self.add_to_query(query) + self.return_labels = ["addr", "av"] + self.order_by = ["av.binary_address"] + + def get_addresses(self): + """Return a list of all addresses fitting in the prefix.""" + addresses: List[IPAddressData] = [] + + for result in self.get_results(): + address = IPAddressData( + id=result.get("addr").get("uuid"), address=ipaddress.ip_interface(result.get("av").get("value")) + ) + addresses.append(address) + + return addresses + + +async def get_container( + db: InfrahubDatabase, + ip_prefix: IPNetworkType, + namespace: Optional[Union[Node, str]] = None, + branch: Optional[Union[Branch, str]] = None, + at: Optional[Union[Timestamp, str]] = None, +) -> Optional[IPPrefixData]: + branch = await registry.get_branch(db=db, branch=branch) + query = await IPPrefixContainerFetch.init(db=db, branch=branch, obj=ip_prefix, namespace=namespace, at=at) + await query.execute(db=db) + return query.get_container() + + +async def get_subnets( + db: InfrahubDatabase, + ip_prefix: IPNetworkType, + namespace: Optional[Union[Node, str]] = None, + branch: Optional[Union[Branch, str]] = None, + at: Optional[Union[Timestamp, str]] = None, +) -> Iterable[IPPrefixData]: + branch = await registry.get_branch(db=db, branch=branch) + query = await IPPrefixSubnetFetch.init(db=db, branch=branch, obj=ip_prefix, namespace=namespace, at=at) + await query.execute(db=db) + return query.get_subnets() + + +async def get_ip_addresses( + db: InfrahubDatabase, + ip_prefix: IPNetworkType, + namespace: Optional[Union[Node, str]] = None, + branch: Optional[Union[Branch, str]] = None, + at=None, +) -> Iterable[IPAddressData]: + branch = await registry.get_branch(db=db, branch=branch) + query = await IPPrefixIPAddressFetch.init(db=db, branch=branch, obj=ip_prefix, namespace=namespace, at=at) + await query.execute(db=db) + return query.get_addresses() + + +async def get_ip_prefix_for_ip_address( + db: InfrahubDatabase, + ip_address: IPAddressType, + namespace: Optional[str] = None, + branch: Optional[Union[Branch, str]] = None, + at: Optional[Union[Timestamp, str]] = None, +) -> Optional[IPPrefixData]: + branch = await registry.get_branch(db=db, branch=branch) + query = await IPPrefixContainerFetch.init(db=db, branch=branch, obj=ip_address, namespace=namespace, at=at) + await query.execute(db=db) + return query.get_container() + + +class IPPrefixUtilizationPrefix(Query): + name: str = "ipprefix_utilization_prefix" + + def __init__(self, ip_prefix: Node, *args, **kwargs): + self.ip_prefix = ip_prefix + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + self.params["id"] = self.ip_prefix.id + + query = """ + MATCH path = (pfx:Node)<-[:IS_RELATED]-(rl:Relationship)<-[:IS_RELATED]-(children:%(label)s) + WHERE pfx.uuid = $id + AND all(r IN relationships(path) WHERE (%(branch_filter)s)) + AND rl.name = "parent__child" + CALL { + WITH pfx, children + MATCH path = (pfx)<-[r1:IS_RELATED]-(rl:Relationship)<-[r2:IS_RELATED]-(children:%(label)s) + WHERE all(r IN relationships(path) WHERE (%(branch_filter)s)) + AND rl.name = "parent__child" + RETURN r1 as r11, r2 as r21 + ORDER BY r1.branch_level DESC, r1.from DESC, r2.branch_level DESC, r2.from DESC + LIMIT 1 + } + WITH pfx, children, r11, r21 + WHERE r11.status = "active" AND r21.status = "active" + CALL { + WITH children + MATCH path = (children)-[r1:HAS_ATTRIBUTE]-(:Attribute {name: "prefix"})-[r2:HAS_VALUE]-(av:AttributeIPNetwork) + WHERE all(r IN relationships(path) WHERE (%(branch_filter)s)) + RETURN r1 as r12, r2 as r22, av + ORDER BY r1.branch_level DESC, r1.from DESC, r2.branch_level DESC, r2.from DESC + LIMIT 1 + } + WITH pfx, children, r12, r22, av + WHERE r12.status = "active" AND r22.status = "active" + """ % {"label": InfrahubKind.IPPREFIX, "branch_filter": branch_filter} # noqa: E501 + + self.return_labels = ["av.prefixlen as prefixlen"] + + self.add_to_query(query) + + def get_percentage(self): + prefix_space = self.ip_prefix.prefix.num_addresses + max_prefixlen = self.ip_prefix.prefix.obj.max_prefixlen + used_space = 0 + for result in self.get_results(): + used_space += 2 ** (max_prefixlen - int(result.get("prefixlen"))) + + return (used_space / prefix_space) * 100 + + +class IPPrefixUtilizationAddress(Query): + name: str = "ipprefix_utilization_address" + + def __init__(self, ip_prefix: Node, *args, **kwargs): + self.ip_prefix = ip_prefix + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + + self.params["id"] = self.ip_prefix.id + + query = """ + MATCH path = (pfx:Node)-[:IS_RELATED]->(rl:Relationship)<-[:IS_RELATED]-(children:%(label)s) + WHERE pfx.uuid = $id + AND all(r IN relationships(path) WHERE (%(branch_filter)s)) + AND rl.name = "ip_prefix__ip_address" + CALL { + WITH pfx, children + MATCH path = (pfx)-[r1:IS_RELATED]->(rl:Relationship)<-[r2:IS_RELATED]-(children:%(label)s) + WHERE all(r IN relationships(path) WHERE (%(branch_filter)s)) + AND rl.name = "ip_prefix__ip_address" + RETURN r1, r2 + ORDER BY r1.branch_level DESC, r1.from DESC, r2.branch_level DESC, r2.from DESC + LIMIT 1 + } + WITH pfx, children, r1, r2 + WHERE r1.status = "active" AND r2.status = "active" + """ % {"label": InfrahubKind.IPADDRESS, "branch_filter": branch_filter} # noqa: E501 + + self.return_labels = ["count(children) as nbr_children"] + + self.add_to_query(query) + + def get_percentage(self): + prefix_space = self.ip_prefix.prefix.num_addresses + + # Non-RFC3021 subnet + if ( + self.ip_prefix.prefix.version == 4 + and self.ip_prefix.prefix.prefixlen < 31 + and not self.ip_prefix.is_pool.value + ): + prefix_space -= 2 + + return (self.get_result().get("nbr_children") / prefix_space) * 100 + + +async def get_utilization( + ip_prefix: Node, + db: InfrahubDatabase, + branch: Optional[Branch] = None, + at: Optional[Union[Timestamp, str]] = None, +) -> float: + if ip_prefix.member_type.value == "address": + query = await IPPrefixUtilizationAddress.init(db, branch=branch, at=at, ip_prefix=ip_prefix) + else: + query = await IPPrefixUtilizationPrefix.init(db, branch=branch, at=at, ip_prefix=ip_prefix) + + await query.execute(db=db) + return query.get_percentage() + + +class IPPrefixReconcileQuery(Query): + name: str = "ip_prefix_reconcile" + + def __init__( + self, + ip_value: AllIPTypes, + namespace: Optional[Union[Node, str]] = None, + node_uuid: Optional[str] = None, + *args, + **kwargs, + ): + self.ip_value = ip_value + self.ip_uuid = node_uuid + self.namespace_id = _get_namespace_id(namespace) + super().__init__(*args, **kwargs) + + async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) + self.params.update(branch_params) + self.params["ip_prefix_kind"] = InfrahubKind.IPPREFIX + self.params["ip_address_kind"] = InfrahubKind.IPADDRESS + self.params["ip_prefix_attribute_kind"] = "AttributeIPNetwork" + self.params["ip_address_attribute_kind"] = "AttributeIPHost" + self.params["namespace_kind"] = InfrahubKind.IPNAMESPACE + self.params["namespace_id"] = self.namespace_id + prefix_bin = convert_ip_to_binary_str(self.ip_value) + self.params["prefix_binary"] = prefix_bin + if isinstance(self.ip_value, IPAddressType): + prefixlen = self.ip_value.max_prefixlen + else: + prefixlen = self.ip_value.prefixlen + self.params["prefixlen"] = prefixlen + self.params["ip_version"] = self.ip_value.version + possible_prefixes = set() + for idx in range(1, prefixlen): + tmp_prefix = prefix_bin[: prefixlen - idx] + padding = "0" * (self.ip_value.max_prefixlen - len(tmp_prefix)) + possible_prefixes.add(f"{tmp_prefix}{padding}") + self.params["possible_prefixes"] = list(possible_prefixes) + + namespace_query = """ + // Get IP Namespace + MATCH (ip_namespace:%(namespace_kind)s)-[r:IS_PART_OF]->(root:Root) + WHERE ip_namespace.uuid = $namespace_id + AND %(branch_filter)s + """ % {"branch_filter": branch_filter, "namespace_kind": self.params["namespace_kind"]} + self.add_to_query(namespace_query) + + if self.ip_uuid: + self.params["node_uuid"] = self.ip_uuid + get_node_by_id_query = """ + // Get IP Prefix node by UUID + MATCH (ip_node {uuid: $node_uuid}) + """ + self.add_to_query(get_node_by_id_query) + + else: + get_node_by_prefix_query = """ + // Get IP Prefix node by prefix value + OPTIONAL MATCH ip_node_path = (:Root)<-[:IS_PART_OF]-(ip_node:Node)-[:HAS_ATTRIBUTE]->(a:Attribute)-[:HAS_VALUE]->(aipn), (ip_namespace)-[:IS_RELATED]-(nsr:Relationship)-[:IS_RELATED]-(ip_node) + WHERE any(label IN LABELS(ip_node) WHERE label IN [$ip_prefix_kind, $ip_address_kind]) + AND nsr.name IN ["ip_namespace__ip_prefix", "ip_namespace__ip_address"] + AND any(label IN LABELS(aipn) WHERE label IN [$ip_prefix_attribute_kind, $ip_address_attribute_kind]) + AND aipn.binary_address = $prefix_binary + AND aipn.prefixlen = $prefixlen + AND aipn.version = $ip_version + AND all(r IN relationships(ip_node_path) WHERE (%(branch_filter)s) and r.status = "active") + """ % { + "branch_filter": branch_filter, + } + self.add_to_query(get_node_by_prefix_query) + + get_current_parent_query = """ + // Get prefix node's current parent, if it exists + OPTIONAL MATCH parent_prefix_path = (ip_node)-[:IS_RELATED]->(:Relationship {name: "parent__child"})-[:IS_RELATED]->(current_parent:%(ip_prefix_kind)s) + WHERE all(r IN relationships(parent_prefix_path) WHERE (%(branch_filter)s) and r.status = "active") + """ % { + "branch_filter": branch_filter, + "ip_prefix_kind": self.params["ip_prefix_kind"], + } + self.add_to_query(get_current_parent_query) + + get_current_children_query = """ + // Get prefix node's current prefix children, if any exist + OPTIONAL MATCH child_prefix_path = (ip_node)<-[:IS_RELATED]-(:Relationship {name: "parent__child"})<-[:IS_RELATED]-(current_prefix_child:%(ip_prefix_kind)s) + WHERE all(r IN relationships(child_prefix_path) WHERE (%(branch_filter)s) and r.status = "active") + WITH ip_namespace, ip_node, current_parent, collect(current_prefix_child) AS current_prefix_children + // Get prefix node's current address children, if any exist + OPTIONAL MATCH child_address_path = (ip_node)-[:IS_RELATED]-(:Relationship {name: "ip_prefix__ip_address"})-[:IS_RELATED]-(current_address_child:%(ip_address_kind)s) + WHERE all(r IN relationships(child_address_path) WHERE (%(branch_filter)s) and r.status = "active") + WITH ip_namespace, ip_node, current_parent, current_prefix_children, collect(current_address_child) AS current_address_children + WITH ip_namespace, ip_node, current_parent, current_prefix_children + current_address_children AS current_children + """ % { + "branch_filter": branch_filter, + "ip_prefix_kind": self.params["ip_prefix_kind"], + "ip_address_kind": self.params["ip_address_kind"], + } + self.add_to_query(get_current_children_query) + + get_new_parent_query = """ + // Identify the correct parent, if any, for the prefix node + CALL { + WITH ip_namespace + OPTIONAL MATCH parent_path = (ip_namespace)-[pr1:IS_RELATED]-(ns_rel:Relationship)-[pr2:IS_RELATED]-(maybe_new_parent:%(ip_prefix_kind)s)-[har:HAS_ATTRIBUTE]->(:Attribute {name: "prefix"})-[hvr:HAS_VALUE]->(av:%(ip_prefix_attribute_kind)s) + WHERE ns_rel.name = "ip_namespace__ip_prefix" + AND all(r IN relationships(parent_path) WHERE (%(branch_filter)s)) + AND pr1.status = "active" + AND pr2.status = "active" + AND av.binary_address IN $possible_prefixes + AND av.prefixlen < $prefixlen + AND av.version = $ip_version + WITH + maybe_new_parent, + har, + hvr, + av.prefixlen as prefixlen, + (har.status = "active" AND hvr.status = "active") AS is_active, + har.branch_level + hvr.branch_level AS branch_level + ORDER BY branch_level DESC, har.from DESC, hvr.from DESC + WITH maybe_new_parent, prefixlen, is_active + RETURN maybe_new_parent, head(collect(prefixlen)) AS mnp_prefixlen, head(collect(is_active)) AS mnp_is_active + } + WITH ip_namespace, ip_node, current_parent, current_children, maybe_new_parent, mnp_prefixlen, mnp_is_active + WHERE mnp_is_active OR maybe_new_parent IS NULL + WITH ip_namespace, ip_node, current_parent, current_children, maybe_new_parent, mnp_prefixlen + ORDER BY ip_node.uuid, mnp_prefixlen DESC + WITH ip_namespace, ip_node, current_parent, current_children, head(collect(maybe_new_parent)) as new_parent + """ % { + "branch_filter": branch_filter, + "ip_prefix_kind": self.params["ip_prefix_kind"], + "ip_prefix_attribute_kind": self.params["ip_prefix_attribute_kind"], + } + self.add_to_query(get_new_parent_query) + + get_new_children_query = """ + // Identify the correct children, if any, for the prefix node + CALL { + // Get ALL possible children for the prefix node + WITH ip_namespace, ip_node + OPTIONAL MATCH child_path = (ip_namespace)-[:IS_RELATED]-(ns_rel:Relationship)-[:IS_RELATED]-(maybe_new_child)-[har:HAS_ATTRIBUTE]->(a:Attribute)-[hvr:HAS_VALUE]->(av:AttributeValue) + WHERE (ip_node IS NULL OR maybe_new_child.uuid <> ip_node.uuid) + AND ns_rel.name IN ["ip_namespace__ip_prefix", "ip_namespace__ip_address"] + AND a.name in ["prefix", "address"] + AND any(label IN LABELS(maybe_new_child) WHERE label IN [$ip_prefix_kind, $ip_address_kind]) + AND any(label IN LABELS(av) WHERE label IN [$ip_prefix_attribute_kind, $ip_address_attribute_kind]) + AND ( + ($ip_prefix_kind IN LABELS(maybe_new_child) AND av.prefixlen > $prefixlen) + OR ($ip_address_kind IN LABELS(maybe_new_child) AND av.prefixlen >= $prefixlen) + ) + AND av.version = $ip_version + AND av.binary_address STARTS WITH SUBSTRING($prefix_binary, 0, $prefixlen) + AND all(r IN relationships(child_path) WHERE (%(branch_filter)s) AND r.status = "active") + WITH + maybe_new_child, + av AS mnc_attribute, + har, + hvr, + (har.status = "active" AND hvr.status = "active") AS is_active, + har.branch_level + hvr.branch_level AS branch_level + ORDER BY maybe_new_child.uuid, branch_level DESC, har.from DESC, hvr.from DESC + WITH maybe_new_child, head(collect([mnc_attribute, is_active])) AS latest_mnc_details + RETURN maybe_new_child, latest_mnc_details[0] AS latest_mnc_attribute, latest_mnc_details[1] AS mnc_is_active + } + WITH ip_namespace, ip_node, current_parent, current_children, new_parent, maybe_new_child, latest_mnc_attribute, mnc_is_active + WHERE mnc_is_active = TRUE OR mnc_is_active IS NULL + WITH ip_namespace, ip_node, current_parent, current_children, new_parent, collect([maybe_new_child, latest_mnc_attribute]) AS maybe_children_ips + WITH ip_namespace, ip_node, current_parent, current_children, new_parent, maybe_children_ips, range(0, size(maybe_children_ips) - 1) AS child_indices + UNWIND child_indices as ind + CALL { + // Filter all possible children to remove those that have a more-specific parent + // among the list of all possible children + WITH ind, maybe_children_ips + WITH ind, maybe_children_ips AS ips + RETURN REDUCE( + has_more_specific_parent = FALSE, potential_parent IN ips | + CASE + WHEN has_more_specific_parent THEN has_more_specific_parent // keep it True once set + WHEN potential_parent IS NULL OR ips[ind][0] IS NULL THEN has_more_specific_parent + WHEN potential_parent[0] = ips[ind][0] THEN has_more_specific_parent // skip comparison to self + WHEN $ip_address_kind in LABELS(potential_parent[0]) THEN has_more_specific_parent // address cannot be a parent + WHEN $ip_prefix_attribute_kind IN LABELS(ips[ind][1]) AND (potential_parent[1]).prefixlen >= (ips[ind][1]).prefixlen THEN has_more_specific_parent // prefix with same or greater prefixlen for prefix cannot be parent + WHEN $ip_address_attribute_kind IN LABELS(ips[ind][1]) AND (potential_parent[1]).prefixlen > (ips[ind][1]).prefixlen THEN has_more_specific_parent // prefix with greater prefixlen for address cannot be parent + WHEN (ips[ind][1]).binary_address STARTS WITH SUBSTRING((potential_parent[1]).binary_address, 0, (potential_parent[1]).prefixlen) THEN TRUE // we found a parent + ELSE has_more_specific_parent + END + ) as has_parent_among_maybe_children + } + WITH ip_namespace, ip_node, current_parent, current_children, new_parent, maybe_children_ips[ind][0] AS new_child, has_parent_among_maybe_children + WHERE has_parent_among_maybe_children = FALSE + WITH + ip_namespace, + ip_node, + current_parent, + current_children, + new_parent, + collect(new_child) as new_children + """ % {"branch_filter": branch_filter} + self.add_to_query(get_new_children_query) + self.return_labels = ["ip_node", "current_parent", "current_children", "new_parent", "new_children"] + + def _get_uuid_from_query(self, node_name: str) -> Optional[str]: + results = list(self.get_results()) + if not results: + return None + result = results[0] + node = result.get(node_name) + if not node: + return None + node_uuid = node.get("uuid") + if node_uuid: + return str(node_uuid) + return None + + def _get_uuids_from_query_list(self, alias_name: str) -> list[str]: + results = list(self.get_results()) + if not results: + return [] + result = results[0] + element_uuids = [] + for element in result.get(alias_name): + if not element: + continue + element_uuid = element.get("uuid") + if element_uuid: + element_uuids.append(str(element_uuid)) + return element_uuids + + def get_ip_node_uuid(self) -> Optional[str]: + return self._get_uuid_from_query("ip_node") + + def get_current_parent_uuid(self) -> Optional[str]: + return self._get_uuid_from_query("current_parent") + + def get_calculated_parent_uuid(self) -> Optional[str]: + return self._get_uuid_from_query("new_parent") + + def get_current_children_uuids(self) -> list[str]: + return self._get_uuids_from_query_list("current_children") + + def get_calculated_children_uuids(self) -> list[str]: + return self._get_uuids_from_query_list("new_children") diff --git a/backend/infrahub/core/query/node.py b/backend/infrahub/core/query/node.py index eefcb328c7..7167937f61 100644 --- a/backend/infrahub/core/query/node.py +++ b/backend/infrahub/core/query/node.py @@ -2,10 +2,12 @@ from collections import defaultdict from dataclasses import dataclass +from dataclasses import field as dataclass_field +from enum import Enum from typing import TYPE_CHECKING, Any, AsyncIterator, Dict, Generator, List, Optional, Tuple, Union from infrahub import config -from infrahub.core.constants import RelationshipDirection, RelationshipHierarchyDirection +from infrahub.core.constants import AttributeDBNodeType, RelationshipDirection, RelationshipHierarchyDirection from infrahub.core.query import Query, QueryResult, QueryType from infrahub.core.query.subquery import build_subquery_filter, build_subquery_order from infrahub.core.query.utils import find_node_schema @@ -13,22 +15,28 @@ from infrahub.exceptions import QueryError if TYPE_CHECKING: + from neo4j.graph import Node as Neo4jNode + from infrahub.core.attribute import AttributeCreateData, BaseAttribute from infrahub.core.branch import Branch from infrahub.core.node import Node from infrahub.core.relationship import RelationshipCreateData, RelationshipManager from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema.attribute_schema import AttributeSchema + from infrahub.core.schema.profile_schema import ProfileSchema + from infrahub.core.schema.relationship_schema import RelationshipSchema from infrahub.database import InfrahubDatabase -# pylint: disable=consider-using-f-string,redefined-builtin +# pylint: disable=consider-using-f-string,redefined-builtin,too-many-lines @dataclass class NodeToProcess: - schema: Optional[NodeSchema] + schema: Optional[Union[NodeSchema, ProfileSchema]] node_id: str node_uuid: str + profile_uuids: list[str] updated_at: str @@ -38,35 +46,40 @@ class NodeToProcess: @dataclass -class AttrToProcess: +class AttributeNodePropertyFromDB: + uuid: str + labels: List[str] + + +@dataclass +class AttributeFromDB: name: str attr_labels: List[str] - attr_id: int + attr_id: str attr_uuid: str - attr_value_id: int + attr_value_id: str attr_value_uuid: Optional[str] + value: Any + content: Any updated_at: str branch: str - # permission: PermissionLevel + is_default: bool + is_from_profile: bool = dataclass_field(default=False) - # time_from: Optional[str] - # time_to: Optional[str] + node_properties: Dict[str, AttributeNodePropertyFromDB] = dataclass_field(default_factory=dict) + flag_properties: Dict[str, bool] = dataclass_field(default_factory=dict) - source_uuid: Optional[str] - source_labels: Optional[List[str]] - owner_uuid: Optional[str] - owner_labels: Optional[List[str]] - - is_inherited: Optional[bool] - is_protected: Optional[bool] - is_visible: Optional[bool] +@dataclass +class NodeAttributesFromDB: + node: Neo4jNode + attrs: Dict[str, AttributeFromDB] = dataclass_field(default_factory=dict) class NodeQuery(Query): @@ -79,7 +92,7 @@ def __init__( branch: Optional[Branch] = None, *args, **kwargs, - ): + ) -> None: # TODO Validate that Node is a valid node # Eventually extract the branch from Node as well self.node = node @@ -113,9 +126,19 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["branch_support"] = self.node._schema.branch attributes: List[AttributeCreateData] = [] + attributes_iphost: List[AttributeCreateData] = [] + attributes_ipnetwork: List[AttributeCreateData] = [] + for attr_name in self.node._attributes: attr: BaseAttribute = getattr(self.node, attr_name) - attributes.append(attr.get_create_data()) + attr_data = attr.get_create_data() + + if attr_data.node_type == AttributeDBNodeType.IPHOST: + attributes_iphost.append(attr_data) + elif attr_data.node_type == AttributeDBNodeType.IPNETWORK: + attributes_ipnetwork.append(attr_data) + else: + attributes.append(attr_data) relationships: List[RelationshipCreateData] = [] for rel_name in self.node._relationships: @@ -124,6 +147,8 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): relationships.append(await rel.get_create_data(db=db)) self.params["attrs"] = [attr.dict() for attr in attributes] + self.params["attrs_iphost"] = [attr.dict() for attr in attributes_iphost] + self.params["attrs_ipnetwork"] = [attr.dict() for attr in attributes_ipnetwork] self.params["rels_bidir"] = [ rel.dict() for rel in relationships if rel.direction == RelationshipDirection.BIDIR.value ] @@ -149,6 +174,25 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): rel_prop_str = "{ branch: rel.branch, branch_level: rel.branch_level, status: rel.status, hierarchy: rel.hierarchical, from: $at, to: null }" + iphost_prop = { + "value": "attr.content.value", + "is_default": "attr.content.is_default", + "binary_address": "attr.content.binary_address", + "version": "attr.content.version", + "prefixlen": "attr.content.prefixlen", + } + iphost_prop_list = [f"{key}: {value}" for key, value in iphost_prop.items()] + + ipnetwork_prop = { + "value": "attr.content.value", + "is_default": "attr.content.is_default", + "binary_address": "attr.content.binary_address", + "version": "attr.content.version", + "prefixlen": "attr.content.prefixlen", + # "num_addresses": "attr.content.num_addresses", + } + ipnetwork_prop_list = [f"{key}: {value}" for key, value in ipnetwork_prop.items()] + query = """ MATCH (root:Root) CREATE (n:Node:%(labels)s $node_prop ) @@ -157,7 +201,43 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): FOREACH ( attr IN $attrs | CREATE (a:Attribute { uuid: attr.uuid, name: attr.name, branch_support: attr.branch_support }) CREATE (n)-[:HAS_ATTRIBUTE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(a) - MERGE (av:AttributeValue { value: attr.value }) + MERGE (av:AttributeValue { value: attr.content.value, is_default: attr.content.is_default }) + CREATE (a)-[:HAS_VALUE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(av) + MERGE (ip:Boolean { value: attr.is_protected }) + MERGE (iv:Boolean { value: attr.is_visible }) + CREATE (a)-[:IS_PROTECTED { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(ip) + CREATE (a)-[:IS_VISIBLE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(iv) + FOREACH ( prop IN attr.source_prop | + MERGE (peer:Node { uuid: prop.peer_id }) + CREATE (a)-[:HAS_SOURCE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(peer) + ) + FOREACH ( prop IN attr.owner_prop | + MERGE (peer:Node { uuid: prop.peer_id }) + CREATE (a)-[:HAS_OWNER { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(peer) + ) + ) + FOREACH ( attr IN $attrs_iphost | + CREATE (a:Attribute { uuid: attr.uuid, name: attr.name, branch_support: attr.branch_support }) + CREATE (n)-[:HAS_ATTRIBUTE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(a) + MERGE (av:AttributeValue:AttributeIPHost { %(iphost_prop)s }) + CREATE (a)-[:HAS_VALUE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(av) + MERGE (ip:Boolean { value: attr.is_protected }) + MERGE (iv:Boolean { value: attr.is_visible }) + CREATE (a)-[:IS_PROTECTED { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(ip) + CREATE (a)-[:IS_VISIBLE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(iv) + FOREACH ( prop IN attr.source_prop | + MERGE (peer:Node { uuid: prop.peer_id }) + CREATE (a)-[:HAS_SOURCE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(peer) + ) + FOREACH ( prop IN attr.owner_prop | + MERGE (peer:Node { uuid: prop.peer_id }) + CREATE (a)-[:HAS_OWNER { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(peer) + ) + ) + FOREACH ( attr IN $attrs_ipnetwork | + CREATE (a:Attribute { uuid: attr.uuid, name: attr.name, branch_support: attr.branch_support }) + CREATE (n)-[:HAS_ATTRIBUTE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(a) + MERGE (av:AttributeValue:AttributeIPNetwork { %(ipnetwork_prop)s }) CREATE (a)-[:HAS_VALUE { branch: attr.branch, branch_level: attr.branch_level, status: attr.status, from: $at, to: null }]->(av) MERGE (ip:Boolean { value: attr.is_protected }) MERGE (iv:Boolean { value: attr.is_visible }) @@ -228,7 +308,12 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): ) WITH distinct n MATCH (n)-[:HAS_ATTRIBUTE|IS_RELATED]-(rn)-[:HAS_VALUE|IS_RELATED]-(rv) - """ % {"labels": ":".join(self.node.get_labels()), "rel_prop": rel_prop_str} + """ % { + "labels": ":".join(self.node.get_labels()), + "rel_prop": rel_prop_str, + "iphost_prop": ", ".join(iphost_prop_list), + "ipnetwork_prop": ", ".join(ipnetwork_prop_list), + } self.params["at"] = at.to_string() @@ -407,54 +492,62 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.add_to_query(query) self.return_labels.extend(["owner", "rel_owner"]) - def get_attributes_group_by_node(self) -> Dict[str, Dict[str, AttrToProcess]]: - attrs_by_node = defaultdict(lambda: {"node": None, "attrs": None}) + def get_attributes_group_by_node(self) -> Dict[str, NodeAttributesFromDB]: + attrs_by_node: Dict[str, NodeAttributesFromDB] = {} for result in self.get_results_group_by(("n", "uuid"), ("a", "name")): - node_id = result.get("n").get("uuid") - attr_name = result.get("a").get("name") - attr = AttrToProcess( - name=attr_name, - attr_labels=result.get("a").labels, - attr_id=result.get("a").element_id, - attr_uuid=result.get("a").get("uuid"), - attr_value_id=result.get("av").element_id, - attr_value_uuid=result.get("av").get("uuid"), - updated_at=result.get("r2").get("from"), - value=result.get("av").get("value"), - # permission=result.permission_score, - branch=self.branch.name, - is_inherited=None, - is_protected=result.get("isp").get("value"), - is_visible=result.get("isv").get("value"), - source_uuid=None, - source_labels=None, - owner_uuid=None, - owner_labels=None, - ) + node_id: str = result.get_node("n").get("uuid") + attr_name: str = result.get_node("a").get("name") - if self.include_source and result.get("source"): - attr.source_uuid = result.get("source").get("uuid") - attr.source_labels = result.get("source").labels - - if self.include_owner and result.get("owner"): - attr.owner_uuid = result.get("owner").get("uuid") - attr.owner_labels = result.get("owner").labels + attr = self._extract_attribute_data(result=result) if node_id not in attrs_by_node: - attrs_by_node[node_id]["node"] = result.get("n") - attrs_by_node[node_id]["attrs"] = {} + attrs_by_node[node_id] = NodeAttributesFromDB(node=result.get_node("n")) - attrs_by_node[node_id]["attrs"][attr_name] = attr + attrs_by_node[node_id].attrs[attr_name] = attr return attrs_by_node - def get_result_by_id_and_name(self, node_id: str, attr_name: str) -> QueryResult: + def get_result_by_id_and_name(self, node_id: str, attr_name: str) -> Tuple[AttributeFromDB, QueryResult]: for result in self.get_results_group_by(("n", "uuid"), ("a", "name")): - if result.get("n").get("uuid") == node_id and result.get("a").get("name") == attr_name: - return result + if result.get_node("n").get("uuid") == node_id and result.get_node("a").get("name") == attr_name: + return self._extract_attribute_data(result=result), result + + raise IndexError(f"Unable to find the result with ID: {node_id} and NAME: {attr_name}") + + def _extract_attribute_data(self, result: QueryResult) -> AttributeFromDB: + attr = result.get_node("a") + attr_value = result.get_node("av") + + data = AttributeFromDB( + name=attr.get("name"), + attr_labels=list(attr.labels), + attr_id=attr.element_id, + attr_uuid=attr.get("uuid"), + attr_value_id=attr_value.element_id, + attr_value_uuid=attr_value.get("uuid"), + updated_at=result.get_rel("r2").get("from"), + value=attr_value.get("value"), + is_default=attr_value.get("is_default"), + content=attr_value._properties, + branch=self.branch.name, + flag_properties={ + "is_protected": result.get("isp").get("value"), + "is_visible": result.get("isv").get("value"), + }, + ) + + if self.include_source and result.get("source"): + data.node_properties["source"] = AttributeNodePropertyFromDB( + uuid=result.get_node("source").get("uuid"), labels=list(result.get_node("source").labels) + ) + + if self.include_owner and result.get("owner"): + data.node_properties["owner"] = AttributeNodePropertyFromDB( + uuid=result.get_node("owner").get("uuid"), labels=list(result.get_node("owner").labels) + ) - return None + return data class NodeListGetRelationshipsQuery(Query): @@ -505,21 +598,21 @@ def get_peers_group_by_node(self) -> Dict[str, Dict[str, List[str]]]: class NodeListGetInfoQuery(Query): name: str = "node_list_get_info" - def __init__(self, ids: List[str], account=None, *args, **kwargs): + def __init__(self, ids: List[str], account=None, *args: Any, **kwargs: Any) -> None: self.account = account self.ids = ids super().__init__(*args, **kwargs) - async def query_init(self, db: InfrahubDatabase, *args, **kwargs): + async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> None: branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) query = """ - MATCH p = (root:Root)<-[:IS_PART_OF]-(n) + MATCH p = (root:Root)<-[:IS_PART_OF]-(n:Node) WHERE n.uuid IN $ids CALL { WITH root, n - MATCH (root:Root)<-[r:IS_PART_OF]-(n) + MATCH (root:Root)<-[r:IS_PART_OF]-(n:Node) WHERE %(branch_filter)s RETURN n as n1, r as r1 ORDER BY r.branch_level DESC, r.from DESC @@ -527,12 +620,16 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): } WITH n1 as n, r1 as rb WHERE rb.status = "active" + OPTIONAL MATCH profile_path = (n)-[:IS_RELATED]->(profile_r:Relationship)<-[:IS_RELATED]-(profile:Node)-[:IS_PART_OF]->(:Root) + WHERE profile_r.name = "node__profile" + AND profile.namespace = "Profile" + AND all(r in relationships(profile_path) WHERE %(branch_filter)s and r.status = "active") """ % {"branch_filter": branch_filter} self.add_to_query(query) self.params["ids"] = self.ids - self.return_labels = ["n", "rb"] + self.return_labels = ["collect(profile.uuid) as profile_uuids", "n", "rb"] async def get_nodes(self, duplicate: bool = True) -> AsyncIterator[NodeToProcess]: """Return all the node objects as NodeToProcess.""" @@ -543,152 +640,445 @@ async def get_nodes(self, duplicate: bool = True) -> AsyncIterator[NodeToProcess schema=schema, node_id=result.get_node("n").element_id, node_uuid=result.get_node("n").get("uuid"), + profile_uuids=[str(puuid) for puuid in result.get("profile_uuids")], updated_at=result.get_rel("rb").get("from"), branch=self.branch.name, labels=list(result.get_node("n").labels), ) + def get_profile_ids_by_node_id(self) -> dict[str, list[str]]: + profile_id_map: dict[str, list[str]] = {} + for result in self.results: + node_id = result.get_node("n").get("uuid") + profile_ids = result.get("profile_uuids") + if not node_id or not profile_ids: + continue + if node_id not in profile_id_map: + profile_id_map[node_id] = [] + profile_id_map[node_id].extend(profile_ids) + return profile_id_map + + +class FieldAttributeRequirementType(Enum): + FILTER = "filter" + ORDER = "order" + + +@dataclass +class FieldAttributeRequirement: + field_name: str + field: Optional[Union[AttributeSchema, RelationshipSchema]] + field_attr_name: str + field_attr_value: Any + index: int + types: list[FieldAttributeRequirementType] = dataclass_field(default_factory=list) + + @property + def supports_profile(self) -> bool: + return bool(self.field and self.field.is_attribute and self.field_attr_name in ("value", "values")) + + @property + def is_filter(self) -> bool: + return FieldAttributeRequirementType.FILTER in self.types + + @property + def is_order(self) -> bool: + return FieldAttributeRequirementType.ORDER in self.types + + @property + def is_default_query_variable(self) -> str: + return f"attr{self.index}_is_default" + + @property + def node_value_query_variable(self) -> str: + return f"attr{self.index}_node_value" + + @property + def profile_value_query_variable(self) -> str: + return f"attr{self.index}_profile_value" + + @property + def profile_final_value_query_variable(self) -> str: + return f"attr{self.index}_final_profile_value" + + @property + def final_value_query_variable(self) -> str: + return f"attr{self.index}_final_value" + class NodeGetListQuery(Query): name = "node_get_list" def __init__( - self, schema: NodeSchema, filters: Optional[dict] = None, partial_match: bool = False, *args, **kwargs - ): + self, schema: NodeSchema, filters: Optional[dict] = None, partial_match: bool = False, *args: Any, **kwargs: Any + ) -> None: self.schema = schema self.filters = filters self.partial_match = partial_match + self._variables_to_track = ["n", "rb"] super().__init__(*args, **kwargs) - async def query_init(self, db: InfrahubDatabase, *args, **kwargs): - filter_has_single_id = False + def _track_variable(self, variable: str) -> None: + if variable not in self._variables_to_track: + self._variables_to_track.append(variable) + + def _untrack_variable(self, variable: str) -> None: + try: + self._variables_to_track.remove(variable) + except ValueError: + ... + + def _get_tracked_variables(self) -> list[str]: + return self._variables_to_track + + async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> None: self.order_by = [] + self.params["node_kind"] = self.schema.kind - final_return_labels = ["n.uuid", "rb.branch", "ID(rb) as rb_id"] + self.return_labels = ["n.uuid", "rb.branch", "ID(rb) as rb_id"] + where_clause_elements = [] - # Add the Branch filters branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) - query = ( - """ + query = """ MATCH p = (n:Node) WHERE $node_kind IN LABELS(n) CALL { WITH n MATCH (root:Root)<-[r:IS_PART_OF]-(n) - WHERE %s - RETURN n as n1, r as r1 + WHERE %(branch_filter)s + RETURN r ORDER BY r.branch_level DESC, r.from DESC LIMIT 1 } - WITH n1 as n, r1 as rb - """ - % branch_filter - ) + WITH n, r as rb + WHERE rb.status = "active" + """ % {"branch_filter": branch_filter} self.add_to_query(query) - self.params["node_kind"] = self.schema.kind - - where_clause = ['rb.status = "active"'] - - # Check 'id' or 'ids' is part of the filter - # if 'id' is present, we can skip ordering, filtering etc .. - # if 'ids' is present, we keep the filtering and the ordering + use_simple = False if self.filters and "id" in self.filters: - filter_has_single_id = True - where_clause.append("n.uuid = $uuid") + use_simple = True + where_clause_elements.append("n.uuid = $uuid") self.params["uuid"] = self.filters["id"] - elif self.filters and "ids" in self.filters: - where_clause.append("n.uuid IN $node_ids") - self.params["node_ids"] = self.filters["ids"] - - self.add_to_query("WHERE " + " AND ".join(where_clause)) - self.return_labels = ["n", "rb"] - - if filter_has_single_id: - self.return_labels = final_return_labels + if not self.filters and not self.schema.order_by: + use_simple = True + self.order_by = ["n.uuid"] + if use_simple: + if where_clause_elements: + self.add_to_query(" AND " + " AND ".join(where_clause_elements)) return - if self.filters: - filter_query, filter_params = await self.build_filters( - db=db, filters=self.filters, branch_filter=branch_filter - ) - - self.add_to_query(filter_query) - self.params.update(filter_params) - - if self.schema.order_by: - order_cnt = 1 - - for order_by_value in self.schema.order_by: - order_by_field_name, order_by_next_name = order_by_value.split("__", maxsplit=1) - - field = self.schema.get_field(order_by_field_name) - - subquery, subquery_params, subquery_result_name = await build_subquery_order( - db=db, - field=field, - name=order_by_field_name, - order_by=order_by_next_name, - branch_filter=branch_filter, - branch=self.branch, - subquery_idx=order_cnt, - ) - self.order_by.append(subquery_result_name) - self.params.update(subquery_params) + if self.filters and "ids" in self.filters: + self.add_to_query("AND n.uuid IN $node_ids") + self.params["node_ids"] = self.filters["ids"] - self.add_subquery(subquery=subquery) + field_attribute_requirements = self._get_field_requirements() + use_profiles = any(far for far in field_attribute_requirements if far.supports_profile) + await self._add_node_filter_attributes( + db=db, field_attribute_requirements=field_attribute_requirements, branch_filter=branch_filter + ) + await self._add_node_order_attributes( + db=db, field_attribute_requirements=field_attribute_requirements, branch_filter=branch_filter + ) - order_cnt += 1 + if use_profiles: + await self._add_profiles_per_node_query(db=db, branch_filter=branch_filter) + await self._add_profile_attributes( + db=db, field_attribute_requirements=field_attribute_requirements, branch_filter=branch_filter + ) + await self._add_profile_rollups(field_attribute_requirements=field_attribute_requirements) - else: - self.order_by.append("n.uuid") + self._add_final_filter(field_attribute_requirements=field_attribute_requirements) + self.order_by = [] + for far in field_attribute_requirements: + if not far.is_order: + continue + if far.supports_profile: + self.order_by.append(far.final_value_query_variable) + continue + self.order_by.append(far.node_value_query_variable) - self.return_labels = final_return_labels + async def _add_node_filter_attributes( + self, + db: InfrahubDatabase, + field_attribute_requirements: list[FieldAttributeRequirement], + branch_filter: str, + ) -> None: + field_attribute_requirements = [far for far in field_attribute_requirements if far.is_filter] + if not field_attribute_requirements: + return - async def build_filters( - self, db: InfrahubDatabase, filters: Dict[str, Any], branch_filter: str - ) -> Tuple[List[str], Dict[str, Any]]: filter_query: List[str] = [] filter_params: Dict[str, Any] = {} - filter_cnt = 0 - INTERNAL_FILTERS: List[str] = ["any", "attribute", "relationship"] + for far in field_attribute_requirements: + extra_tail_properties = {far.node_value_query_variable: "value"} + if far.supports_profile: + extra_tail_properties[far.is_default_query_variable] = "is_default" + subquery, subquery_params, subquery_result_name = await build_subquery_filter( + db=db, + field=far.field, + name=far.field_name, + filter_name=far.field_attr_name, + filter_value=far.field_attr_value, + branch_filter=branch_filter, + branch=self.branch, + subquery_idx=far.index, + partial_match=self.partial_match, + support_profiles=far.supports_profile, + extra_tail_properties=extra_tail_properties, + ) + for query_var in extra_tail_properties: + self._track_variable(query_var) + with_str = ", ".join( + [ + f"{subquery_result_name} as {label}" if label == "n" else label + for label in self._get_tracked_variables() + ] + ) + + filter_params.update(subquery_params) + filter_query.append("CALL {") + filter_query.append(subquery) + filter_query.append("}") + filter_query.append(f"WITH {with_str}") - for field_name in self.schema.valid_input_names + INTERNAL_FILTERS: - attr_filters = extract_field_filters(field_name=field_name, filters=filters) - if not attr_filters: - continue + if filter_query: + self.add_to_query(filter_query) + self.params.update(filter_params) - filter_cnt += 1 + async def _add_node_order_attributes( + self, + db: InfrahubDatabase, + field_attribute_requirements: list[FieldAttributeRequirement], + branch_filter: str, + ) -> None: + field_attribute_requirements = [ + far for far in field_attribute_requirements if far.is_order and not far.is_filter + ] + if not field_attribute_requirements: + return - field = self.schema.get_field(field_name, raise_on_error=False) + sort_query: List[str] = [] + sort_params: Dict[str, Any] = {} - for field_attr_name, field_attr_value in attr_filters.items(): - subquery, subquery_params, subquery_result_name = await build_subquery_filter( - db=db, - field=field, - name=field_name, - filter_name=field_attr_name, - filter_value=field_attr_value, - branch_filter=branch_filter, - branch=self.branch, - subquery_idx=filter_cnt, - partial_match=self.partial_match, - ) - filter_params.update(subquery_params) + for far in field_attribute_requirements: + if far.field is None: + continue + extra_tail_properties = {} + if far.supports_profile: + extra_tail_properties[far.is_default_query_variable] = "is_default" - with_str = ", ".join( - [f"{subquery_result_name} as {label}" if label == "n" else label for label in self.return_labels] + subquery, subquery_params, _ = await build_subquery_order( + db=db, + field=far.field, + name=far.field_name, + order_by=far.field_attr_name, + branch_filter=branch_filter, + branch=self.branch, + subquery_idx=far.index, + result_prefix=far.node_value_query_variable, + support_profiles=far.supports_profile, + extra_tail_properties=extra_tail_properties, + ) + for query_var in extra_tail_properties: + self._track_variable(query_var) + self._track_variable(far.node_value_query_variable) + with_str = ", ".join(self._get_tracked_variables()) + + sort_params.update(subquery_params) + sort_query.append("CALL {") + sort_query.append(subquery) + sort_query.append("}") + sort_query.append(f"WITH {with_str}") + + if sort_query: + self.add_to_query(sort_query) + self.params.update(sort_params) + + async def _add_profiles_per_node_query(self, db: InfrahubDatabase, branch_filter: str) -> None: + with_str = ", ".join(self._get_tracked_variables()) + froms_str = db.render_list_comprehension(items="relationships(profile_path)", item_name="from") + profiles_per_node_query = ( + """ + CALL { + WITH n + OPTIONAL MATCH profile_path = (n)-[:IS_RELATED]->(profile_r:Relationship)<-[:IS_RELATED]-(maybe_profile_n:Node)-[:IS_PART_OF]->(:Root) + WHERE profile_r.name = "node__profile" + AND all(r in relationships(profile_path) WHERE %(branch_filter)s) + WITH + maybe_profile_n, + profile_path, + reduce(br_lvl = 0, r in relationships(profile_path) | br_lvl + r.branch_level) AS branch_level, + %(froms_str)s AS froms, + all(r in relationships(profile_path) WHERE r.status = "active") AS is_active + RETURN maybe_profile_n, is_active, branch_level, froms + } + WITH %(with_str)s, maybe_profile_n, branch_level, froms, is_active + ORDER BY n.uuid, maybe_profile_n.uuid, branch_level DESC, froms[-1] DESC, froms[-2] DESC, froms[-3] DESC + WITH %(with_str)s, maybe_profile_n, collect(is_active) as ordered_is_actives + WITH %(with_str)s, CASE + WHEN ordered_is_actives[0] = True THEN maybe_profile_n ELSE NULL + END AS profile_n + CALL { + WITH profile_n + OPTIONAL MATCH profile_priority_path = (profile_n)-[pr1:HAS_ATTRIBUTE]->(a:Attribute)-[pr2:HAS_VALUE]->(av:AttributeValue) + WHERE a.name = "profile_priority" + AND all(r in relationships(profile_priority_path) WHERE %(branch_filter)s and r.status = "active") + RETURN av.value as profile_priority + ORDER BY pr1.branch_level + pr2.branch_level DESC, pr2.from DESC, pr1.from DESC + LIMIT 1 + } + WITH %(with_str)s, profile_n, profile_priority + """ + ) % {"branch_filter": branch_filter, "with_str": with_str, "froms_str": froms_str} + self.add_to_query(profiles_per_node_query) + self._track_variable("profile_n") + self._track_variable("profile_priority") + + async def _add_profile_attributes( + self, db: InfrahubDatabase, field_attribute_requirements: list[FieldAttributeRequirement], branch_filter: str + ) -> None: + attributes_queries: List[str] = [] + attributes_params: Dict[str, Any] = {} + profile_attributes = [far for far in field_attribute_requirements if far.supports_profile] + + for profile_attr in profile_attributes: + if not profile_attr.field: + continue + subquery, subquery_params, _ = await build_subquery_order( + db=db, + field=profile_attr.field, + node_alias="profile_n", + name=profile_attr.field_name, + order_by=profile_attr.field_attr_name, + branch_filter=branch_filter, + branch=self.branch, + subquery_idx=profile_attr.index, + result_prefix=profile_attr.profile_value_query_variable, + support_profiles=False, + ) + attributes_params.update(subquery_params) + self._track_variable(profile_attr.profile_value_query_variable) + with_str = ", ".join(self._get_tracked_variables()) + + attributes_queries.append("CALL {") + attributes_queries.append(subquery) + attributes_queries.append("}") + attributes_queries.append(f"WITH {with_str}") + + self.add_to_query(attributes_queries) + self.params.update(attributes_params) + + async def _add_profile_rollups(self, field_attribute_requirements: list[FieldAttributeRequirement]) -> None: + profile_attributes = [far for far in field_attribute_requirements if far.supports_profile] + profile_value_collects = [] + for profile_attr in profile_attributes: + self._untrack_variable(profile_attr.profile_value_query_variable) + profile_value_collects.append( + f"""head( + reduce( + non_null_values = [], v in collect({profile_attr.profile_value_query_variable}) | + CASE WHEN v IS NOT NULL AND v <> "NULL" THEN non_null_values + [v] ELSE non_null_values END + ) + ) as {profile_attr.profile_final_value_query_variable}""" + ) + self._untrack_variable("profile_n") + self._untrack_variable("profile_priority") + profile_rollup_with_str = ", ".join(self._get_tracked_variables() + profile_value_collects) + profile_rollup_query = f""" + ORDER BY n.uuid, profile_priority ASC, profile_n.uuid ASC + WITH {profile_rollup_with_str} + """ + self.add_to_query(profile_rollup_query) + for profile_attr in profile_attributes: + self._track_variable(profile_attr.profile_final_value_query_variable) + + final_value_with = [] + for profile_attr in profile_attributes: + final_value_with.append(f""" + CASE + WHEN {profile_attr.is_default_query_variable} AND {profile_attr.profile_final_value_query_variable} IS NOT NULL + THEN {profile_attr.profile_final_value_query_variable} + ELSE {profile_attr.node_value_query_variable} + END AS {profile_attr.final_value_query_variable} + """) + self._untrack_variable(profile_attr.is_default_query_variable) + self._untrack_variable(profile_attr.profile_final_value_query_variable) + self._untrack_variable(profile_attr.node_value_query_variable) + final_value_with_str = ", ".join(self._get_tracked_variables() + final_value_with) + self.add_to_query(f"WITH {final_value_with_str}") + + def _add_final_filter(self, field_attribute_requirements: list[FieldAttributeRequirement]) -> None: + where_parts = [] + where_str = "" + for far in field_attribute_requirements: + if not far.is_filter or not far.supports_profile: + continue + var_name = f"final_attr_value{far.index}" + self.params[var_name] = far.field_attr_value + if self.partial_match: + where_parts.append( + f"toLower(toString({far.final_value_query_variable})) CONTAINS toLower(toString(${var_name}))" ) + continue + if far.field_attr_name == "values": + operator = "IN" + else: + operator = "=" + + where_parts.append(f"{far.final_value_query_variable} {operator} ${var_name}") + if where_parts: + where_str = "WHERE " + " AND ".join(where_parts) + self.add_to_query(where_str) + + def _get_field_requirements(self) -> list[FieldAttributeRequirement]: + internal_filters = ["any", "attribute", "relationship"] + field_requirements_map: dict[tuple[str, str], FieldAttributeRequirement] = {} + index = 1 + if self.filters: + for field_name in self.schema.valid_input_names + internal_filters: + attr_filters = extract_field_filters(field_name=field_name, filters=self.filters) + if not attr_filters: + continue + field = self.schema.get_field(field_name, raise_on_error=False) + for field_attr_name, field_attr_value in attr_filters.items(): + field_requirements_map[(field_name, field_attr_name)] = FieldAttributeRequirement( + field_name=field_name, + field=field, + field_attr_name=field_attr_name, + field_attr_value=field_attr_value.value + if isinstance(field_attr_value, Enum) + else field_attr_value, + index=index, + types=[FieldAttributeRequirementType.FILTER], + ) + index += 1 + if not self.schema.order_by: + return list(field_requirements_map.values()) + + for order_by_path in self.schema.order_by: + order_by_field_name, order_by_attr_property_name = order_by_path.split("__", maxsplit=1) + + field = self.schema.get_field(order_by_field_name) + field_req = field_requirements_map.get( + (order_by_field_name, order_by_attr_property_name), + FieldAttributeRequirement( + field_name=order_by_field_name, + field=field, + field_attr_name=order_by_attr_property_name, + field_attr_value=None, + index=index, + types=[], + ), + ) + field_req.types.append(FieldAttributeRequirementType.ORDER) + field_requirements_map[(order_by_field_name, order_by_attr_property_name)] = field_req + index += 1 - filter_query.append("CALL {") - filter_query.append(subquery) - filter_query.append("}") - filter_query.append(f"WITH {with_str}") - - return filter_query, filter_params + return list(field_requirements_map.values()) def get_node_ids(self) -> List[str]: return [str(result.get("n.uuid")) for result in self.get_results()] @@ -705,9 +1095,9 @@ def __init__( direction: RelationshipHierarchyDirection, node_schema: Union[NodeSchema, GenericSchema], filters: Optional[dict] = None, - *args, - **kwargs, - ): + *args: Any, + **kwargs: Any, + ) -> None: self.filters = filters or {} self.direction = direction self.node_id = node_id @@ -717,7 +1107,7 @@ def __init__( self.hierarchy_schema = node_schema.get_hierarchy_schema(self.branch) - async def query_init(self, db: InfrahubDatabase, *args, **kwargs): # pylint: disable=too-many-statements + async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> None: # pylint: disable=too-many-statements branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string()) self.params.update(branch_params) self.order_by = [] @@ -736,28 +1126,32 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): # pylint: di froms_var = db.render_list_comprehension(items="relationships(path)", item_name="from") with_clause = ( "peer, path," - " reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level," + " reduce(br_lvl = 0, r in relationships(path) | CASE WHEN r.branch_level > br_lvl THEN r.branch_level ELSE br_lvl END) AS branch_level," f" {froms_var} AS froms" ) query = """ MATCH path = (n:Node { uuid: $uuid } )%(filter)s(peer:Node) WHERE $hierarchy IN LABELS(peer) and all(r IN relationships(path) WHERE (%(branch_filter)s)) - WITH n, last(nodes(path)) as peer + WITH n, collect(last(nodes(path))) AS peers_with_duplicates + CALL { + WITH peers_with_duplicates + UNWIND peers_with_duplicates AS pwd + RETURN DISTINCT pwd AS peer + } CALL { WITH n, peer MATCH path = (n)%(filter)s(peer) WHERE all(r IN relationships(path) WHERE (%(branch_filter)s)) WITH %(with_clause)s - RETURN peer as peer1, path as path1 - ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + RETURN peer as peer1, path as path1, all(r IN relationships(path) WHERE (r.status = "active")) AS is_active + ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC, is_active DESC } - WITH peer1 as peer, path1 as path + WITH peer1 as peer, path1 as path, is_active """ % {"filter": filter_str, "branch_filter": branch_filter, "with_clause": with_clause} self.add_to_query(query) - where_clause = ['all(r IN relationships(path) WHERE (r.status = "active"))'] + where_clause = ["is_active = TRUE"] clean_filters = extract_field_filters(field_name=self.direction.value, filters=self.filters) diff --git a/backend/infrahub/core/query/relationship.py b/backend/infrahub/core/query/relationship.py index 3229443eb6..7a15b8470a 100644 --- a/backend/infrahub/core/query/relationship.py +++ b/backend/infrahub/core/query/relationship.py @@ -7,7 +7,7 @@ from infrahub_sdk import UUIDT -from infrahub.core.constants import RelationshipDirection +from infrahub.core.constants import RelationshipDirection, RelationshipStatus from infrahub.core.query import Query, QueryType from infrahub.core.query.subquery import build_subquery_filter, build_subquery_order from infrahub.core.timestamp import Timestamp @@ -107,6 +107,23 @@ class RelationshipPeersData: destination_id: UUID destination_kind: str + def reversed(self) -> RelationshipPeersData: + return RelationshipPeersData( + id=self.id, + identifier=self.identifier, + source_id=self.destination_id, + source_kind=self.destination_kind, + destination_id=self.source_id, + destination_kind=self.source_kind, + ) + + +@dataclass +class FullRelationshipIdentifier: + identifier: str + source_kind: str + destination_kind: str + class RelationshipQuery(Query): def __init__( @@ -160,6 +177,18 @@ def __init__( super().__init__(*args, **kwargs) + def get_relationship_properties_dict(self, status: RelationshipStatus) -> dict[str, Optional[str]]: + rel_prop_dict = { + "branch": self.branch.name, + "branch_level": self.branch.hierarchy_level, + "status": status.value, + "from": self.at.to_string(), + "to": None, + } + if self.schema.hierarchical: + rel_prop_dict["hierarchy"] = self.schema.hierarchical + return rel_prop_dict + class RelationshipCreateQuery(RelationshipQuery): name = "relationship_create" @@ -194,20 +223,14 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["is_visible"] = self.rel.is_visible query_match = """ - MATCH (s { uuid: $source_id }) - MATCH (d { uuid: $destination_id }) + MATCH (s:Node { uuid: $source_id }) + MATCH (d:Node { uuid: $destination_id }) """ self.add_to_query(query_match) self.query_add_all_node_property_match() - self.params["rel_prop"] = { - "branch": self.branch.name, - "branch_level": self.branch.hierarchy_level, - "status": "active", - "from": self.at.to_string(), - "to": None, - } + self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.ACTIVE) arrows = self.schema.get_query_arrows() r1 = f"{arrows.left.start}[r1:{self.rel_type} $rel_prop ]{arrows.left.end}" r2 = f"{arrows.right.start}[r2:{self.rel_type} $rel_prop ]{arrows.right.end}" @@ -278,7 +301,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["at"] = self.at.to_string() query = """ - MATCH (rl { uuid: $rel_node_id }) + MATCH (rl:Relationship { uuid: $rel_node_id }) """ self.add_to_query(query) @@ -364,9 +387,9 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): # Match all nodes, including properties # ----------------------------------------------------------------------- query = """ - MATCH (s { uuid: $source_id }) - MATCH (d { uuid: $destination_id }) - MATCH (rl { uuid: $rel_node_id }) + MATCH (s:Node { uuid: $source_id }) + MATCH (d:Node { uuid: $destination_id }) + MATCH (rl:Relationship { uuid: $rel_node_id }) """ self.add_to_query(query) self.return_labels = ["s", "d", "rl"] @@ -376,13 +399,7 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params[f"prop_{prop_name}_id"] = element_id_to_id(prop.prop_db_id) self.return_labels.append(f"prop_{prop_name}") - self.params["rel_prop"] = { - "branch": self.branch.name, - "branch_level": self.branch.hierarchy_level, - "status": "deleted", - "from": self.at.to_string(), - "to": None, - } + self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.DELETED) arrows = self.schema.get_query_arrows() r1 = f"{arrows.left.start}[r1:{self.rel_type} $rel_prop ]{arrows.left.end}" @@ -429,20 +446,14 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): self.params["rel_id"] = self.rel.id self.params["branch"] = self.branch.name self.params["branch_level"] = self.branch.hierarchy_level - self.params["rel_prop"] = { - "branch": self.branch.name, - "branch_level": self.branch.hierarchy_level, - "status": "deleted", - "from": self.at.to_string(), - "to": None, - } + self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.DELETED) arrows = self.schema.get_query_arrows() r1 = f"{arrows.left.start}[r1:{self.rel_type} $rel_prop ]{arrows.left.end}" r2 = f"{arrows.right.start}[r2:{self.rel_type} $rel_prop ]{arrows.right.end}" query = """ - MATCH (s { uuid: $source_id })-[]-(rl:Relationship {uuid: $rel_id})-[]-(d { uuid: $destination_id }) + MATCH (s:Node { uuid: $source_id })-[]-(rl:Relationship {uuid: $rel_id})-[]-(d:Node { uuid: $destination_id }) CREATE (s)%s(rl) CREATE (rl)%s(d) """ % ( @@ -723,8 +734,8 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs): r2 = f"{arrows.right.start}[r2:{self.rel.rel_type}]{arrows.right.end}" query = """ - MATCH (s { uuid: $source_id }) - MATCH (d { uuid: $destination_id }) + MATCH (s:Node { uuid: $source_id }) + MATCH (d:Node { uuid: $destination_id }) MATCH (s)%s(rl:Relationship { name: $name })%s(d) WHERE %s """ % ( @@ -744,12 +755,24 @@ class RelationshipGetByIdentifierQuery(Query): type: QueryType = QueryType.READ - def __init__(self, identifiers: List[str], excluded_namespaces: List[str], *args, **kwargs) -> None: - if not identifiers: - raise ValueError("identifiers cannot be an empty list") + def __init__( + self, + identifiers: Optional[List[str]] = None, + full_identifiers: Optional[List[FullRelationshipIdentifier]] = None, + excluded_namespaces: Optional[List[str]] = None, + *args, + **kwargs, + ) -> None: + if (not identifiers and not full_identifiers) or (identifiers and full_identifiers): + raise ValueError("one and only one of identifiers or full_identifiers is required") - self.identifiers = identifiers - self.excluded_namespaces = excluded_namespaces + if full_identifiers: + self.identifiers = list({i.identifier for i in full_identifiers}) + self.full_identifiers = full_identifiers + else: + self.identifiers = identifiers + self.full_identifiers = [] + self.excluded_namespaces = excluded_namespaces or [] # Always exclude relationships with internal nodes if "Internal" not in self.excluded_namespaces: @@ -759,6 +782,9 @@ def __init__(self, identifiers: List[str], excluded_namespaces: List[str], *args async def query_init(self, db: InfrahubDatabase, *args, **kwargs) -> None: self.params["identifiers"] = self.identifiers + self.params["full_identifiers"] = [ + [full_id.source_kind, full_id.identifier, full_id.destination_kind] for full_id in self.full_identifiers + ] self.params["excluded_namespaces"] = self.excluded_namespaces self.params["branch"] = self.branch.name self.params["at"] = self.at.to_string() @@ -774,7 +800,10 @@ async def query_init(self, db: InfrahubDatabase, *args, **kwargs) -> None: CALL { WITH rl MATCH (src:Node)-[r1:IS_RELATED]-(rl:Relationship)-[r2:IS_RELATED]-(dst:Node) - WHERE NOT src.namespace IN $excluded_namespaces AND NOT dst.namespace IN $excluded_namespaces AND %s + WHERE (size($full_identifiers) = 0 OR [src.kind, rl.name, dst.kind] in $full_identifiers) + AND NOT src.namespace IN $excluded_namespaces + AND NOT dst.namespace IN $excluded_namespaces + AND %s RETURN src, dst, r1, r2, rl as rl1 ORDER BY r1.branch_level DESC, r2.branch_level DESC, r1.from DESC, r2.from DESC LIMIT 1 diff --git a/backend/infrahub/core/query/subquery.py b/backend/infrahub/core/query/subquery.py index f5b0f09f0b..49711f1571 100644 --- a/backend/infrahub/core/query/subquery.py +++ b/backend/infrahub/core/query/subquery.py @@ -23,9 +23,14 @@ async def build_subquery_filter( branch: Branch = None, subquery_idx: int = 1, partial_match: bool = False, + optional_match: bool = False, + result_prefix: str = "filter", + support_profiles: bool = False, + extra_tail_properties: Optional[dict[str, str]] = None, ) -> Tuple[str, dict[str, Any], str]: + support_profiles = support_profiles and field and field.is_attribute and filter_name in ("value", "values") params = {} - prefix = f"filter{subquery_idx}" + prefix = f"{result_prefix}{subquery_idx}" # If the field is not provided, it means that the query is targeting a special keyword like:: any or attribute # Currently any and attribute have the same effect and relationship is not supported yet @@ -45,6 +50,7 @@ async def build_subquery_filter( param_prefix=prefix, db=db, partial_match=partial_match, + support_profiles=support_profiles, ) params.update(field_params) @@ -53,14 +59,31 @@ async def build_subquery_filter( where_str = " AND ".join(field_where) branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)" froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from") + to_return = f"{node_alias} AS {prefix}" + with_extra = "" + final_with_extra = "" + if extra_tail_properties: + tail_node = field_filter[-1] + with_extra += f", {tail_node.name}" + final_with_extra += f", latest_node_details[2] AS {tail_node.name}" + for variable_name, tail_property in extra_tail_properties.items(): + to_return += f", {tail_node.name}.{tail_property} AS {variable_name}" + match = "OPTIONAL MATCH" if optional_match else "MATCH" query = f""" WITH {node_alias} - MATCH path = {filter_str} + {match} path = {filter_str} WHERE {where_str} - WITH {node_alias}, path, {branch_level_str} AS branch_level, {froms_str} AS froms - RETURN {node_alias} as {prefix} + WITH + {node_alias}, + path, + {branch_level_str} AS branch_level, + {froms_str} AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active{with_extra} ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, {node_alias}{with_extra}])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS {node_alias}{final_with_extra} + RETURN {to_return} """ return query, params, prefix @@ -74,9 +97,13 @@ async def build_subquery_order( name: Optional[str] = None, branch: Branch = None, subquery_idx: int = 1, + result_prefix: Optional[str] = None, + support_profiles: bool = False, + extra_tail_properties: Optional[dict[str, str]] = None, ) -> Tuple[str, dict[str, Any], str]: + support_profiles = support_profiles and field and field.is_attribute and order_by in ("value", "values") params = {} - prefix = f"order{subquery_idx}" + prefix = result_prefix or f"order{subquery_idx}" field_filter, field_params, field_where = await field.get_query_filter( db=db, @@ -86,6 +113,7 @@ async def build_subquery_order( filter_value=None, branch=branch, param_prefix=prefix, + support_profiles=support_profiles, ) params.update(field_params) @@ -102,14 +130,45 @@ async def build_subquery_order( where_str = " AND ".join(field_where) branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)" froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from") + to_return_parts = {f"last.{order_by if order_by != 'values' and '__' not in order_by else 'value'}": prefix} + with_parts: dict[str, Optional[str]] = { + "last": None, + } + if extra_tail_properties: + tail_node = field_filter[-1] + if tail_node.name not in with_parts: + with_parts[tail_node.name] = None + tail_node_name = with_parts.get(tail_node.name) or tail_node.name + for variable_name, tail_property in extra_tail_properties.items(): + to_return_parts[f"{tail_node_name}.{tail_property}"] = variable_name + with_str_to_alias_parts: list[str] = [] + with_str_alias_parts: list[str] = [] + with_str_from_list_parts: list[str] = ["latest_node_details[0] AS is_active"] + index = 1 + for k, v in with_parts.items(): + with_str_to_alias_parts.append(f"{k} AS {v}" if v else k) + alias = v or k + with_str_alias_parts.append(alias) + with_str_from_list_parts.append(f"latest_node_details[{index}] AS {alias}") + index += 1 + with_str_to_alias_parts.append(f"{branch_level_str} AS branch_level") + with_str_to_alias_parts.append(f"{froms_str} AS froms") + with_str_to_alias_parts.append("""all(r IN relationships(path) WHERE r.status = "active") AS is_active""") + with_str_to_alias = ", ".join(with_str_to_alias_parts) + with_str_alias = ", ".join(with_str_alias_parts) + with_str_from_list = ", ".join(with_str_from_list_parts) + to_return_str_parts = [] + for expression, alias in to_return_parts.items(): + to_return_str_parts.append(f"CASE WHEN is_active = TRUE THEN {expression} ELSE NULL END AS {alias}") + to_return_str = ", ".join(to_return_str_parts) query = f""" WITH {node_alias} - MATCH path = {filter_str} + OPTIONAL MATCH path = {filter_str} WHERE {where_str} - WITH last, path, {branch_level_str} AS branch_level, {froms_str} AS froms - RETURN last.value as {prefix} + WITH {with_str_to_alias} ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, {with_str_alias}])) AS latest_node_details + WITH {with_str_from_list} + RETURN {to_return_str} """ - return query, params, prefix diff --git a/backend/infrahub/core/query/task_log.py b/backend/infrahub/core/query/task_log.py index fb5f3a451e..2c5e32b8ab 100644 --- a/backend/infrahub/core/query/task_log.py +++ b/backend/infrahub/core/query/task_log.py @@ -26,7 +26,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N # we want the relationship to be setup regardless so that it's in place for when the # Task gets propperly created. query = """ - MATCH (t:Task {uuid: $task_id}) + MATCH (t:Task { uuid: $task_id }) CREATE (n:%(node_type)s $node_prop)-[:RELATES_TO]->(t) """ % {"node_type": node_type} self.add_to_query(query=query) diff --git a/backend/infrahub/core/query/utils.py b/backend/infrahub/core/query/utils.py index ef5bce73ec..82a72c4b24 100644 --- a/backend/infrahub/core/query/utils.py +++ b/backend/infrahub/core/query/utils.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Optional, Union from infrahub.core import registry -from infrahub.core.schema import NodeSchema +from infrahub.core.schema import NodeSchema, ProfileSchema if TYPE_CHECKING: from neo4j.graph import Node as Neo4jNode @@ -11,11 +11,13 @@ from infrahub.core.branch import Branch -def find_node_schema(node: Neo4jNode, branch: Union[Branch, str], duplicate: bool) -> Optional[NodeSchema]: +def find_node_schema( + node: Neo4jNode, branch: Union[Branch, str], duplicate: bool +) -> Optional[Union[NodeSchema, ProfileSchema]]: for label in node.labels: if registry.schema.has(name=label, branch=branch): schema = registry.schema.get(name=label, branch=branch, duplicate=duplicate) - if isinstance(schema, NodeSchema): + if isinstance(schema, (NodeSchema, ProfileSchema)): return schema return None diff --git a/backend/infrahub/core/registry.py b/backend/infrahub/core/registry.py index 0e0f7f1a97..fe8651be6a 100644 --- a/backend/infrahub/core/registry.py +++ b/backend/infrahub/core/registry.py @@ -9,7 +9,6 @@ from infrahub.exceptions import ( BranchNotFoundError, DataTypeNotFoundError, - Error, InitializationError, ) @@ -18,12 +17,11 @@ from infrahub.core.attribute import BaseAttribute from infrahub.core.branch import Branch - from infrahub.core.definitions import Brancher from infrahub.core.manager import NodeManager - from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema import MainSchemaTypes, NodeSchema from infrahub.core.schema_manager import SchemaManager from infrahub.database import InfrahubDatabase - from infrahub.graphql.mutations.attribute import BaseAttributeInput + from infrahub.graphql.mutations.attribute import BaseAttributeCreate, BaseAttributeUpdate from infrahub.graphql.types import InfrahubObject from infrahub.storage import InfrahubObjectStorage from infrahub.types import InfrahubDataType @@ -38,19 +36,30 @@ class Registry: branch: dict = field(default_factory=dict) node: dict = field(default_factory=dict) _default_branch: Optional[str] = None + _default_ipnamespace: Optional[str] = None _schema: Optional[SchemaManager] = None default_graphql_type: Dict[str, InfrahubObject] = field(default_factory=dict) graphql_type: dict = field(default_factory=lambda: defaultdict(dict)) data_type: Dict[str, InfrahubDataType] = field(default_factory=dict) - input_type: Dict[str, BaseAttributeInput] = field(default_factory=dict) + input_type: Dict[str, Union[BaseAttributeCreate, BaseAttributeUpdate]] = field(default_factory=dict) account: dict = field(default_factory=dict) account_id: dict = field(default_factory=dict) node_group: dict = field(default_factory=dict) attr_group: dict = field(default_factory=dict) - branch_object: Optional[Type[Brancher]] = None + _branch_object: Optional[Type[Branch]] = None _manager: Optional[Type[NodeManager]] = None _storage: Optional[InfrahubObjectStorage] = None + @property + def branch_object(self) -> Type[Branch]: + if not self._branch_object: + raise InitializationError + return self._branch_object + + @branch_object.setter + def branch_object(self, value: Type[Branch]) -> None: + self._branch_object = value + @property def default_branch(self) -> str: if not self._default_branch: @@ -59,9 +68,20 @@ def default_branch(self) -> str: return self._default_branch @default_branch.setter - def default_branch(self, value: str): + def default_branch(self, value: str) -> None: self._default_branch = value + @property + def default_ipnamespace(self) -> str: + if not self._default_ipnamespace: + raise InitializationError() + + return self._default_ipnamespace + + @default_ipnamespace.setter + def default_ipnamespace(self, value: str) -> None: + self._default_ipnamespace = value + @property def schema(self) -> SchemaManager: if not self._schema: @@ -70,7 +90,7 @@ def schema(self) -> SchemaManager: return self._schema @schema.setter - def schema(self, value: SchemaManager): + def schema(self, value: SchemaManager) -> None: self._schema = value @property @@ -81,7 +101,7 @@ def manager(self) -> Type[NodeManager]: return self._manager @manager.setter - def manager(self, value: Type[NodeManager]): + def manager(self, value: Type[NodeManager]) -> None: self._manager = value @property @@ -92,7 +112,7 @@ def storage(self) -> InfrahubObjectStorage: return self._storage @storage.setter - def storage(self, value: InfrahubObjectStorage): + def storage(self, value: InfrahubObjectStorage) -> None: self._storage = value def schema_has_been_initialized(self) -> bool: @@ -100,47 +120,8 @@ def schema_has_been_initialized(self) -> bool: return True return False - def set_item(self, kind: str, name: str, item, branch: Optional[str] = None) -> bool: - branch = branch or registry.default_branch - getattr(self, kind)[branch][name] = item - return True - - def has_item(self, kind: str, name: str, branch=None) -> bool: - try: - self.get_item(kind=kind, name=name, branch=branch) - return True - except ValueError: - return False - - def get_item(self, kind: str, name: str, branch: Optional[Union[Branch, str]] = None): - branch = get_branch_from_registry(branch=branch) - - attr = getattr(self, kind) - - if branch.name in attr and name in attr[branch.name]: - return attr[branch.name][name] - - default_branch = registry.default_branch - if name in attr[default_branch]: - return attr[default_branch][name] - - raise ValueError(f"Unable to find the {kind} {name} for the branch {branch.name} in the registry") - - def get_all_item(self, kind: str, branch: Optional[Union[Branch, str]] = None) -> dict: - """Return all the nodes in the schema for a given branch. - The current implementation is a bit simplistic, will need to re-evaluate.""" - branch = get_branch_from_registry(branch=branch) - - attr = getattr(self, kind) - - if branch.name in attr: - return attr[branch.name] - - default_branch = registry.default_branch - return attr[default_branch] - def get_node_schema(self, name: str, branch: Optional[Union[Branch, str]] = None) -> NodeSchema: - return self.schema.get(name=name, branch=branch) + return self.schema.get_node_schema(name=name, branch=branch) def get_data_type( self, @@ -150,20 +131,15 @@ def get_data_type( raise DataTypeNotFoundError(name=name) return self.data_type[name] - def get_full_schema( - self, branch: Optional[Union[Branch, str]] = None - ) -> Dict[str, Union[NodeSchema, GenericSchema]]: + def get_full_schema(self, branch: Optional[Union[Branch, str]] = None) -> Dict[str, MainSchemaTypes]: """Return all the nodes in the schema for a given branch.""" return self.schema.get_full(branch=branch) - def get_all_graphql_type(self, branch: Optional[Union[Branch, str]] = None) -> Dict[str, InfrahubObject]: - """Return all the graphql_type for a given branch.""" - return self.get_all_item(kind="graphql_type", branch=branch) - - def delete_all(self): + def delete_all(self) -> None: self.branch = {} self.node = {} - self.schema = None + self._schema = None + self._default_ipnamespace = None self.account = {} self.account_id = {} self.node_group = {} @@ -185,9 +161,8 @@ def get_branch_from_registry(self, branch: Optional[Union[Branch, str]] = None) Branch: A Branch Object """ - if self.branch_object and branch: - if self.branch_object.isinstance(branch) and not isinstance(branch, str): - return branch + if branch and not isinstance(branch, str): + return branch # if the name of the branch is not defined or not a string we used the default branch name if not branch or not isinstance(branch, str): @@ -203,8 +178,8 @@ def get_branch_from_registry(self, branch: Optional[Union[Branch, str]] = None) async def get_branch( self, + db: InfrahubDatabase, session: Optional[AsyncSession] = None, - db: Optional[InfrahubDatabase] = None, branch: Optional[Union[Branch, str]] = None, ) -> Branch: """Return a branch object based on its name. @@ -224,14 +199,8 @@ async def get_branch( Branch: A Branch Object """ - if self.branch_object and branch: - if self.branch_object.isinstance(branch) and not isinstance(branch, str): - return branch - - if (self.branch_object.isinstance(branch) and branch.name == GLOBAL_BRANCH_NAME) or ( - isinstance(branch, str) and branch == GLOBAL_BRANCH_NAME - ): - raise BranchNotFoundError(identifier=GLOBAL_BRANCH_NAME) + if branch and not isinstance(branch, str): + return branch if not branch or not isinstance(branch, str): branch = registry.default_branch @@ -242,9 +211,6 @@ async def get_branch( if not session and not db: raise - if not self.branch_object: - raise Error("Branch object not initialized") - async with lock.registry.local_schema_lock(): obj = await self.branch_object.get_by_name(name=branch, db=db) registry.branch[branch] = obj @@ -259,6 +225,3 @@ def get_global_branch(self) -> Branch: registry = Registry() - -get_branch_from_registry = registry.get_branch_from_registry -get_branch = registry.get_branch diff --git a/backend/infrahub/core/relationship/constraints/count.py b/backend/infrahub/core/relationship/constraints/count.py index 9b49dee755..f45044931b 100644 --- a/backend/infrahub/core/relationship/constraints/count.py +++ b/backend/infrahub/core/relationship/constraints/count.py @@ -26,7 +26,7 @@ def __init__(self, db: InfrahubDatabase, branch: Optional[Branch] = None): self.branch = branch async def check(self, relm: RelationshipManager) -> None: - branch = await registry.get_branch() if not self.branch else self.branch + branch = await registry.get_branch(db=self.db) if not self.branch else self.branch ( _, peer_ids_present_local_only, diff --git a/backend/infrahub/core/relationship/constraints/interface.py b/backend/infrahub/core/relationship/constraints/interface.py index b009bc3d9b..b5b5f7d9bb 100644 --- a/backend/infrahub/core/relationship/constraints/interface.py +++ b/backend/infrahub/core/relationship/constraints/interface.py @@ -5,5 +5,4 @@ class RelationshipManagerConstraintInterface(ABC): @abstractmethod - async def check(self, relm: RelationshipManager) -> None: - ... + async def check(self, relm: RelationshipManager) -> None: ... diff --git a/backend/infrahub/core/relationship/model.py b/backend/infrahub/core/relationship/model.py index 93b512209d..f6e1705b3c 100644 --- a/backend/infrahub/core/relationship/model.py +++ b/backend/infrahub/core/relationship/model.py @@ -34,7 +34,7 @@ from infrahub.core.branch import Branch from infrahub.core.node import Node - from infrahub.core.schema import GenericSchema, NodeSchema, RelationshipSchema + from infrahub.core.schema import MainSchemaTypes, RelationshipSchema from infrahub.database import InfrahubDatabase # pylint: disable=redefined-builtin @@ -252,7 +252,7 @@ async def _get_peer(self, db: InfrahubDatabase) -> None: self._peer = peer self.peer_id = self._peer.id - def get_peer_schema(self) -> Union[NodeSchema, GenericSchema]: + def get_peer_schema(self) -> MainSchemaTypes: return registry.schema.get(name=self.schema.peer, branch=self.branch, duplicate=False) def compare_properties_with_data(self, data: RelationshipPeerData) -> List[str]: @@ -573,13 +573,7 @@ def as_list(self) -> List[Relationship]: class RelationshipManager: - def __init__( # pylint: disable=unused-argument - self, - schema: RelationshipSchema, - branch: Branch, - at: Timestamp, - node: Node, - ) -> None: + def __init__(self, schema: RelationshipSchema, branch: Branch, at: Timestamp, node: Node) -> None: self.schema: RelationshipSchema = schema self.name: str = schema.name self.node: Node = node @@ -644,6 +638,12 @@ def __iter__(self) -> Iterator[Relationship]: return iter(self._relationships) + def __len__(self) -> int: + if not self.has_fetched_relationships: + raise LookupError("you can't count relationships before the cache has been populated.") + + return len(self._relationships) + async def get_peer(self, db: InfrahubDatabase) -> Optional[Node]: if self.schema.cardinality == "many": raise TypeError("peer is not available for relationship with multiple cardinality") @@ -738,7 +738,7 @@ async def get_relationships(self, db: InfrahubDatabase) -> List[Relationship]: return self._relationships.as_list() - async def update( + async def update( # pylint: disable=too-many-branches self, data: Union[List[Union[str, Node]], Dict[str, Any], str, Node, None], db: InfrahubDatabase ) -> bool: """Replace and Update the list of relationships with this one.""" @@ -762,10 +762,11 @@ async def update( self._relationships.append(previous_relationships[str(item_id)]) continue - if isinstance(item, type(None)) and previous_relationships: - for rel in previous_relationships.values(): - await rel.delete(db=db) - changed = True + if isinstance(item, type(None)): + if previous_relationships: + for rel in previous_relationships.values(): + await rel.delete(db=db) + changed = True continue if isinstance(item, str) and item in previous_relationships: @@ -795,6 +796,29 @@ async def update( return changed + async def add(self, data: Union[Dict[str, Any], Node], db: InfrahubDatabase) -> bool: + """Add a new relationship to the list of existing ones, avoid duplication.""" + if not isinstance(data, (self.rel_class, dict)) and not hasattr(data, "_schema"): + raise ValidationError({self.name: f"Invalid data provided to form a relationship {data}"}) + + previous_relationships = {rel.peer_id for rel in await self.get_relationships(db=db) if rel.peer_id} + + item_id = getattr(data, "id", None) + if not item_id and isinstance(data, dict): + item_id = data.get("id", None) + + if item_id in previous_relationships: + return False + + # If the item ID is not present in the previous set of relationships, create a new one + self._relationships.append( + await self.rel_class(schema=self.schema, branch=self.branch, at=self.at, node=self.node).new( + db=db, data=data + ) + ) + + return True + async def remove( self, peer_id: Union[str, UUID], diff --git a/backend/infrahub/core/schema/__init__.py b/backend/infrahub/core/schema/__init__.py index ff681d1897..5ca8475690 100644 --- a/backend/infrahub/core/schema/__init__.py +++ b/backend/infrahub/core/schema/__init__.py @@ -1,14 +1,12 @@ from __future__ import annotations -import enum -from typing import Any, List, Optional +import uuid +from typing import Any, List, Optional, TypeAlias, Union from pydantic import BaseModel, ConfigDict, Field from infrahub.core.constants import RESTRICTED_NAMESPACES from infrahub.core.models import HashableModel -from infrahub.core.relationship import Relationship -from infrahub.types import ATTRIBUTE_KIND_LABELS from .attribute_schema import AttributeSchema from .basenode_schema import AttributePathParsingError, BaseNodeSchema, SchemaAttributePath, SchemaAttributePathValue @@ -18,15 +16,10 @@ from .filter import FilterSchema from .generic_schema import GenericSchema from .node_schema import NodeSchema +from .profile_schema import ProfileSchema from .relationship_schema import RelationshipSchema -# pylint: disable=redefined-builtin - -# Generate an Enum for Pydantic based on a List of String -attribute_dict = {attr.upper(): attr for attr in ATTRIBUTE_KIND_LABELS} -AttributeKind = enum.Enum("AttributeKind", dict(attribute_dict)) - -RELATIONSHIPS_MAPPING = {"Relationship": Relationship} +MainSchemaTypes: TypeAlias = Union[NodeSchema, GenericSchema, ProfileSchema] # ----------------------------------------------------- @@ -74,6 +67,16 @@ def validate_namespaces(self) -> List[str]: return errors + def generate_uuid(self) -> None: + """Generate UUID for all nodes, attributes & relationships + Mainly useful during unit tests.""" + for node in self.nodes + self.generics: + if not node.id: + node.id = str(uuid.uuid4()) + for item in node.relationships + node.attributes: + if not item.id: + item.id = str(uuid.uuid4()) + internal_schema = internal.to_dict() @@ -87,8 +90,10 @@ def validate_namespaces(self) -> List[str]: "FilterSchema", "NodeSchema", "GenericSchema", + "ProfileSchema", "RelationshipSchema", "SchemaAttributePath", "SchemaAttributePathValue", "SchemaRoot", + "MainSchemaTypes", ] diff --git a/backend/infrahub/core/schema/attribute_schema.py b/backend/infrahub/core/schema/attribute_schema.py index 185349dfe7..8af809e3af 100644 --- a/backend/infrahub/core/schema/attribute_schema.py +++ b/backend/infrahub/core/schema/attribute_schema.py @@ -1,7 +1,7 @@ from __future__ import annotations import enum -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union from pydantic import field_validator, model_validator @@ -62,42 +62,40 @@ def get_branch(self) -> BranchSupportType: raise ValueError("branch hasn't been defined yet") return self.branch - def get_attribute_enum_class(self) -> Optional[enum.EnumType]: - if not self.uses_enum_class: - return None - return generate_python_enum(f"{self.name.title()}Enum", {v: v for v in self.enum}) + def get_enum_class(self) -> Type[enum.Enum]: + if not self.enum: + raise ValueError(f"{self.name} is not an Enum") + return generate_python_enum(name=f"{self.name.title()}Enum", options=self.enum) - def convert_to_attribute_enum(self, value: Any) -> Any: - if not self.uses_enum_class or not value: + def convert_value_to_enum(self, value: Any) -> Optional[enum.Enum]: + if isinstance(value, enum.Enum) or value is None: return value - attribute_enum_class = self.get_attribute_enum_class() - if isinstance(value, attribute_enum_class): - return value - if isinstance(value, enum.Enum): - value = value.value - return attribute_enum_class(value) + enum_class = self.get_enum_class() + return enum_class(value) - def convert_to_enum_value(self, value: Any) -> Any: - if not self.uses_enum_class: - return value - if isinstance(value, list): - value = [self.convert_to_attribute_enum(element) for element in value] + def convert_enum_to_value(self, data: Any) -> Any: + if isinstance(data, list): + value = [self.convert_enum_to_value(element) for element in data] return [element.value if isinstance(element, enum.Enum) else element for element in value] - value = self.convert_to_attribute_enum(value) - return value.value if isinstance(value, enum.Enum) else value + if isinstance(data, enum.Enum): + return data.value + return data async def get_query_filter( self, name: str, filter_name: str, branch: Optional[Branch] = None, - filter_value: Optional[Union[str, int, bool, list, enum.Enum]] = None, + filter_value: Optional[Union[str, int, bool, list]] = None, include_match: bool = True, param_prefix: Optional[str] = None, db: Optional[InfrahubDatabase] = None, partial_match: bool = False, + support_profiles: bool = False, ) -> Tuple[List[QueryElement], Dict[str, Any], List[str]]: - filter_value = self.convert_to_enum_value(filter_value) + if self.enum: + filter_value = self.convert_enum_to_value(filter_value) + return await default_attribute_query_filter( name=name, filter_name=filter_name, @@ -107,4 +105,5 @@ async def get_query_filter( param_prefix=param_prefix, db=db, partial_match=partial_match, + support_profiles=support_profiles, ) diff --git a/backend/infrahub/core/schema/basenode_schema.py b/backend/infrahub/core/schema/basenode_schema.py index 19f446ba4c..2a0854fa39 100644 --- a/backend/infrahub/core/schema/basenode_schema.py +++ b/backend/infrahub/core/schema/basenode_schema.py @@ -2,8 +2,9 @@ import hashlib import keyword +import os from dataclasses import asdict, dataclass -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Type, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Literal, Optional, Type, Union, overload from infrahub_sdk.utils import compare_lists, intersection from pydantic import field_validator @@ -19,6 +20,7 @@ from typing_extensions import Self from infrahub.core.branch import Branch + from infrahub.core.constants import RelationshipKind from infrahub.core.schema import GenericSchema, NodeSchema # pylint: disable=redefined-builtin @@ -135,6 +137,11 @@ def _diff_element( # Process element b for name in sorted(present_both): + # If the element doesn't have an ID on either side + # this most likely means it was added recently from the internal schema. + if os.environ.get("PYTEST_RUNNING", "") != "true" and local_map[name] is None and other_map[name] is None: + elements_diff.added[name] = None + continue local_element: obj_type = get_func(self, name=name) other_element: obj_type = get_func(other, name=name) element_diff = local_element.diff(other_element) @@ -150,6 +157,16 @@ def _diff_element( return elements_diff + @overload + def get_field( + self, name: str, raise_on_error: Literal[True] = True + ) -> Union[AttributeSchema, RelationshipSchema]: ... + + @overload + def get_field( + self, name: str, raise_on_error: Literal[False] = False + ) -> Optional[Union[AttributeSchema, RelationshipSchema]]: ... + def get_field(self, name: str, raise_on_error: bool = True) -> Optional[Union[AttributeSchema, RelationshipSchema]]: if field := self.get_attribute_or_none(name=name): return field @@ -157,10 +174,10 @@ def get_field(self, name: str, raise_on_error: bool = True) -> Optional[Union[At if field := self.get_relationship_or_none(name=name): return field - if not raise_on_error: - return None + if raise_on_error: + raise ValueError(f"Unable to find the field {name}") - raise ValueError(f"Unable to find the field {name}") + return None def get_attribute(self, name: str) -> AttributeSchema: for item in self.attributes: @@ -230,6 +247,9 @@ def get_relationships_by_identifier(self, id: str) -> List[RelationshipSchema]: return rels + def get_relationships_of_kind(self, relationship_kinds: Iterable[RelationshipKind]) -> list[RelationshipSchema]: + return [r for r in self.relationships if r.kind in relationship_kinds] + def get_attributes_name_id_map(self) -> Dict[str, str]: name_id_map = {} for attr in self.attributes: @@ -324,7 +344,7 @@ def parse_attribute_path( branch: Optional[Union[Branch, str]] = None, schema_map_override: Optional[Dict[str, Union[NodeSchema, GenericSchema]]] = None, ) -> SchemaAttributePath: - allowed_leaf_properties = ["value"] + allowed_leaf_properties = ["value", "version", "binary_address"] schema_path = SchemaAttributePath() relationship_piece: Optional[str] = None attribute_piece: Optional[str] = None @@ -365,10 +385,9 @@ def parse_attribute_path( return schema_path def get_unique_constraint_schema_attribute_paths( - self, include_unique_attributes: bool = False + self, include_unique_attributes: bool = False, branch: Optional[Branch] = None ) -> List[List[SchemaAttributePath]]: constraint_paths_groups = [] - if include_unique_attributes: for attribute_schema in self.unique_attributes: constraint_paths_groups.append( @@ -381,7 +400,7 @@ def get_unique_constraint_schema_attribute_paths( for uniqueness_path_group in self.uniqueness_constraints: constraint_paths_groups.append( [ - self.parse_attribute_path(attribute_path=uniqueness_path_part) + self.parse_attribute_path(attribute_path=uniqueness_path_part, branch=branch) for uniqueness_path_part in uniqueness_path_group ] ) @@ -407,5 +426,4 @@ def from_schema_attribute_path( return cls(**asdict(schema_attribute_path), value=value) -class AttributePathParsingError(Exception): - ... +class AttributePathParsingError(Exception): ... diff --git a/backend/infrahub/core/schema/definitions/core.py b/backend/infrahub/core/schema/definitions/core.py index b78703b65e..5e9bb99152 100644 --- a/backend/infrahub/core/schema/definitions/core.py +++ b/backend/infrahub/core/schema/definitions/core.py @@ -5,12 +5,15 @@ DEFAULT_KIND_MIN_LENGTH, AccountRole, AccountType, + AllowOverrideType, ArtifactStatus, BranchConflictKeep, BranchSupportType, ContentType, + GeneratorInstanceStatus, InfrahubKind, ProposedChangeState, + RelationshipDeleteBehavior, Severity, ValidatorConclusion, ValidatorState, @@ -18,6 +21,34 @@ # pylint: disable=too-many-lines +core_profile_schema_definition = { + "name": "Profile", + "namespace": "Core", + "include_in_menu": False, + "icon": "mdi:shape-plus-outline", + "description": "Base Profile in Infrahub.", + "label": "Profile", + "display_labels": ["profile_name__value"], + "default_filter": "profile_name__value", + "attributes": [ + { + "name": "profile_name", + "kind": "Text", + "min_length": 3, + "max_length": 32, + "optional": False, + "unique": True, + }, + { + "name": "profile_priority", + "kind": "Number", + "default_value": 1000, + "optional": True, + }, + ], +} + + core_models: dict[str, Any] = { "generics": [ { @@ -33,23 +64,16 @@ "description": "Any Entities that is responsible for some data.", "label": "Owner", "include_in_menu": False, - "display_labels": ["name__value"], - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], + "documentation": "/topics/metadata", }, + core_profile_schema_definition, { "name": "Source", "namespace": "Lineage", "description": "Any Entities that stores or produces data.", "label": "Source", "include_in_menu": False, - "display_labels": ["name__value"], - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], + "documentation": "/topics/metadata", }, { "name": "Comment", @@ -103,6 +127,7 @@ "kind": "Component", "optional": True, "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, }, { "name": "created_by", @@ -133,14 +158,14 @@ "relationships": [ { "name": "members", - "peer": "CoreNode", + "peer": InfrahubKind.NODE, "optional": True, "identifier": "group_member", "cardinality": "many", }, { "name": "subscribers", - "peer": "CoreNode", + "peer": InfrahubKind.NODE, "optional": True, "identifier": "group_subscriber", "cardinality": "many", @@ -189,6 +214,7 @@ "optional": True, "cardinality": "many", "identifier": "validator__check", + "on_delete": RelationshipDeleteBehavior.CASCADE, }, ], }, @@ -251,6 +277,7 @@ "order_by": ["name__value"], "display_labels": ["label__value"], "branch": BranchSupportType.AWARE.value, + "documentation": "/topics/proposed-change", "attributes": [ {"name": "name", "kind": "Text", "unique": True}, {"name": "label", "kind": "Text", "optional": True}, @@ -339,6 +366,7 @@ "order_by": ["name__value"], "display_labels": ["name__value"], "branch": BranchSupportType.AGNOSTIC.value, + "documentation": "/topics/repository", "attributes": [ { "name": "name", @@ -405,6 +433,229 @@ "optional": True, "cardinality": "many", }, + { + "name": "generators", + "peer": InfrahubKind.GENERATORDEFINITION, + "identifier": "generator_definition__repository", + "optional": True, + "cardinality": "many", + }, + ], + }, + { + "name": "IPNamespace", + "namespace": "Builtin", + "label": "IP Namespace", + "description": "A generic container for IP prefixes and IP addresses", + "include_in_menu": False, + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "icon": "mdi:format-list-group", + "branch": BranchSupportType.AWARE.value, + "attributes": [ + { + "name": "name", + "kind": "Text", + "unique": True, + "branch": BranchSupportType.AWARE.value, + "order_weight": 1000, + }, + { + "name": "description", + "kind": "Text", + "optional": True, + "branch": BranchSupportType.AWARE.value, + "order_weight": 2000, + }, + ], + "relationships": [ + { + "name": "ip_prefixes", + "label": "IP Prefixes", + "peer": InfrahubKind.IPPREFIX, + "identifier": "ip_namespace__ip_prefix", + "optional": True, + "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "ip_addresses", + "label": "IP Addresses", + "peer": InfrahubKind.IPADDRESS, + "identifier": "ip_namespace__ip_address", + "optional": True, + "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, + "allow_override": AllowOverrideType.NONE, + }, + ], + }, + { + "name": "IPPrefix", + "label": "IP Prefix", + "namespace": "Builtin", + "description": "IPv6 or IPv4 prefix also referred as network", + "include_in_menu": False, + "default_filter": "prefix__value", + "order_by": ["prefix__version", "prefix__binary_address"], + "display_labels": ["prefix__value"], + "icon": "mdi:ip-network", + "branch": BranchSupportType.AWARE.value, + "hierarchical": True, + "attributes": [ + { + "name": "prefix", + "kind": "IPNetwork", + "branch": BranchSupportType.AWARE.value, + "order_weight": 1000, + }, + { + "name": "description", + "kind": "Text", + "optional": True, + "branch": BranchSupportType.AWARE.value, + "order_weight": 2000, + }, + { + "name": "member_type", + "kind": "Dropdown", + "choices": [ + { + "name": "prefix", + "label": "Prefix", + "description": "Prefix serves as container for other prefixes", + }, + { + "name": "address", + "label": "Address", + "description": "Prefix serves as subnet for IP addresses", + }, + ], + "branch": BranchSupportType.AWARE.value, + "default_value": "address", + "order_weight": 3000, + }, + { + "name": "is_pool", + "kind": "Boolean", + "branch": BranchSupportType.AWARE.value, + "default_value": False, + "order_weight": 4000, + "description": "All IP addresses within this prefix are considered usable", + }, + { + "name": "is_top_level", + "kind": "Boolean", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "utilization", + "kind": "Number", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "netmask", + "kind": "Text", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "hostmask", + "kind": "Text", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "network_address", + "kind": "Text", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "broadcast_address", + "kind": "Text", + "read_only": True, + "optional": True, + "allow_override": AllowOverrideType.NONE, + }, + ], + "relationships": [ + { + "name": "ip_namespace", + "label": "IP Namespace", + "peer": InfrahubKind.IPNAMESPACE, + "identifier": "ip_namespace__ip_prefix", + "optional": True, + "cardinality": "one", + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "ip_addresses", + "label": "IP Addresses", + "peer": InfrahubKind.IPADDRESS, + "identifier": "ip_prefix__ip_address", + "optional": True, + "cardinality": "many", + "allow_override": AllowOverrideType.NONE, + "read_only": True, + }, + ], + }, + { + "name": "IPAddress", + "label": "IP Address", + "namespace": "Builtin", + "description": "IPv6 or IPv4 address", + "include_in_menu": False, + "default_filter": "address__value", + "order_by": ["address__version", "address__binary_address"], + "display_labels": ["address__value"], + "icon": "mdi:ip-outline", + "branch": BranchSupportType.AWARE.value, + "attributes": [ + { + "name": "address", + "kind": "IPHost", + "branch": BranchSupportType.AWARE.value, + "order_weight": 1000, + }, + { + "name": "description", + "kind": "Text", + "optional": True, + "branch": BranchSupportType.AWARE.value, + "order_weight": 2000, + }, + ], + "relationships": [ + { + "name": "ip_namespace", + "label": "IP Namespace", + "peer": InfrahubKind.IPNAMESPACE, + "identifier": "ip_namespace__ip_address", + "optional": True, + "cardinality": "one", + "allow_override": AllowOverrideType.NONE, + }, + { + "name": "ip_prefix", + "label": "IP Prefix", + "peer": InfrahubKind.IPPREFIX, + "identifier": "ip_prefix__ip_address", + "optional": True, + "cardinality": "one", + "allow_override": AllowOverrideType.NONE, + "read_only": True, + }, ], }, ], @@ -413,7 +664,7 @@ "name": "StandardGroup", "namespace": "Core", "description": "Group of nodes of any kind.", - "include_in_menu": True, + "include_in_menu": False, "icon": "mdi:account-group", "label": "Standard Group", "default_filter": "name__value", @@ -422,25 +673,38 @@ "branch": BranchSupportType.AWARE.value, "inherit_from": [InfrahubKind.GENERICGROUP], }, + { + "name": "GeneratorGroup", + "namespace": "Core", + "description": "Group of nodes that are created by a generator.", + "include_in_menu": False, + "icon": "mdi:state-machine", + "label": "Generator Group", + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "branch": BranchSupportType.LOCAL.value, + "inherit_from": [InfrahubKind.GENERICGROUP], + }, { "name": "GraphQLQueryGroup", "namespace": "Core", "description": "Group of nodes associated with a given GraphQLQuery.", - "include_in_menu": True, + "include_in_menu": False, "icon": "mdi:account-group", "label": "GraphQL Query Group", "default_filter": "name__value", "order_by": ["name__value"], "display_labels": ["name__value"], "branch": BranchSupportType.LOCAL.value, - "inherit_from": ["CoreGroup"], + "inherit_from": [InfrahubKind.GENERICGROUP], "attributes": [ {"name": "parameters", "kind": "JSON", "optional": True}, ], "relationships": [ { "name": "query", - "peer": "CoreGraphQLQuery", + "peer": InfrahubKind.GRAPHQLQUERY, "optional": False, "cardinality": "one", "kind": "Attribute", @@ -474,7 +738,8 @@ "order_by": ["name__value"], "display_labels": ["label__value"], "branch": BranchSupportType.AGNOSTIC.value, - "inherit_from": ["LineageOwner", "LineageSource"], + "inherit_from": [InfrahubKind.LINEAGEOWNER, InfrahubKind.LINEAGESOURCE], + "documentation": "/topics/auth", "attributes": [ {"name": "name", "kind": "Text", "unique": True}, {"name": "password", "kind": "HashedPassword", "unique": False}, @@ -506,6 +771,7 @@ "default_filter": "token__value", "display_labels": ["token__value"], "branch": BranchSupportType.AGNOSTIC.value, + "documentation": "/topics/auth", "attributes": [ {"name": "name", "kind": "Text", "optional": True}, {"name": "token", "kind": "Text", "unique": True}, @@ -551,6 +817,7 @@ "display_labels": ["name__value"], "branch": BranchSupportType.AGNOSTIC.value, "inherit_from": [InfrahubKind.TASKTARGET], + "documentation": "/topics/proposed-change", "attributes": [ {"name": "name", "kind": "Text", "optional": False}, {"name": "description", "kind": "TextArea", "optional": True}, @@ -597,6 +864,7 @@ "kind": "Component", "optional": True, "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, }, { "name": "threads", @@ -605,6 +873,7 @@ "kind": "Component", "optional": True, "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, }, { "name": "validations", @@ -613,6 +882,7 @@ "identifier": "proposed_change__validator", "optional": True, "cardinality": "many", + "on_delete": RelationshipDeleteBehavior.CASCADE, }, ], }, @@ -731,7 +1001,13 @@ "order_by": ["name__value"], "display_labels": ["name__value"], "branch": BranchSupportType.AGNOSTIC.value, - "inherit_from": ["LineageOwner", "LineageSource", InfrahubKind.GENERICREPOSITORY, InfrahubKind.TASKTARGET], + "inherit_from": [ + InfrahubKind.LINEAGEOWNER, + InfrahubKind.LINEAGESOURCE, + InfrahubKind.GENERICREPOSITORY, + InfrahubKind.TASKTARGET, + ], + "documentation": "/topics/repository", "attributes": [ {"name": "default_branch", "kind": "Text", "default_value": "main", "order_weight": 6000}, { @@ -753,7 +1029,13 @@ "order_by": ["name__value"], "display_labels": ["name__value"], "branch": BranchSupportType.AGNOSTIC.value, - "inherit_from": ["LineageOwner", "LineageSource", InfrahubKind.GENERICREPOSITORY, InfrahubKind.TASKTARGET], + "inherit_from": [ + InfrahubKind.LINEAGEOWNER, + InfrahubKind.LINEAGESOURCE, + InfrahubKind.GENERICREPOSITORY, + InfrahubKind.TASKTARGET, + ], + "documentation": "/topics/repository", "attributes": [ { "name": "ref", @@ -782,6 +1064,7 @@ "display_labels": ["name__value"], "inherit_from": [InfrahubKind.TRANSFORM], "branch": BranchSupportType.AWARE.value, + "documentation": "/topics/transformation", "attributes": [ {"name": "template_path", "kind": "Text"}, ], @@ -854,6 +1137,23 @@ {"name": "line_number", "kind": "Number", "optional": True}, ], }, + { + "name": "GeneratorCheck", + "namespace": "Core", + "description": "A check related to a Generator instance", + "include_in_menu": False, + "label": "Generator Check", + "display_labels": ["label__value"], + "inherit_from": ["CoreCheck"], + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [ + { + "name": "instance", + "kind": "Text", + "optional": False, + }, + ], + }, { "name": "DataValidator", "namespace": "Core", @@ -942,6 +1242,26 @@ }, ], }, + { + "name": "GeneratorValidator", + "namespace": "Core", + "description": "A validator related to generators", + "include_in_menu": False, + "label": "Generator Validator", + "display_labels": ["label__value"], + "inherit_from": [InfrahubKind.VALIDATOR], + "branch": BranchSupportType.AGNOSTIC.value, + "relationships": [ + { + "name": "definition", + "peer": InfrahubKind.GENERATORDEFINITION, + "kind": "Attribute", + "optional": False, + "cardinality": "one", + "branch": BranchSupportType.AGNOSTIC.value, + }, + ], + }, { "name": "CheckDefinition", "namespace": "Core", @@ -1006,6 +1326,7 @@ "display_labels": ["name__value"], "inherit_from": [InfrahubKind.TRANSFORM], "branch": BranchSupportType.AWARE.value, + "documentation": "/topics/transformation", "attributes": [ {"name": "file_path", "kind": "Text"}, {"name": "class_name", "kind": "Text"}, @@ -1022,6 +1343,7 @@ "order_by": ["name__value"], "display_labels": ["name__value"], "branch": BranchSupportType.AWARE.value, + "documentation": "/topics/graphql", "attributes": [ {"name": "name", "kind": "Text", "unique": True}, {"name": "description", "kind": "Text", "optional": True}, @@ -1085,6 +1407,7 @@ "display_labels": ["name__value"], "branch": BranchSupportType.LOCAL.value, "inherit_from": [InfrahubKind.TASKTARGET], + "documentation": "/topics/artifact", "attributes": [ {"name": "name", "kind": "Text"}, { @@ -1113,7 +1436,7 @@ "relationships": [ { "name": "object", - "peer": "CoreNode", + "peer": InfrahubKind.NODE, "kind": "Attribute", "identifier": "artifact__node", "cardinality": "one", @@ -1140,6 +1463,7 @@ "display_labels": ["name__value"], "branch": BranchSupportType.AWARE.value, "inherit_from": [InfrahubKind.TASKTARGET], + "documentation": "/topics/artifact", "attributes": [ {"name": "name", "kind": "Text", "unique": True}, {"name": "artifact_name", "kind": "Text"}, @@ -1170,6 +1494,90 @@ }, ], }, + { + "name": "GeneratorDefinition", + "namespace": "Core", + "include_in_menu": False, + "icon": "mdi:state-machine", + "label": "Generator Definition", + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.TASKTARGET], + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + {"name": "description", "kind": "Text", "optional": True}, + {"name": "parameters", "kind": "JSON"}, + {"name": "file_path", "kind": "Text"}, + {"name": "class_name", "kind": "Text"}, + {"name": "convert_query_response", "kind": "Boolean", "optional": True, "default_value": False}, + ], + "relationships": [ + { + "name": "query", + "peer": InfrahubKind.GRAPHQLQUERY, + "identifier": "generator_definition__graphql_query", + "kind": "Attribute", + "cardinality": "one", + "optional": False, + }, + { + "name": "repository", + "peer": InfrahubKind.GENERICREPOSITORY, + "kind": "Attribute", + "cardinality": "one", + "identifier": "generator_definition__repository", + "optional": False, + }, + { + "name": "targets", + "peer": InfrahubKind.GENERICGROUP, + "kind": "Attribute", + "identifier": "generator_definition___group", + "cardinality": "one", + "optional": False, + }, + ], + }, + { + "name": "GeneratorInstance", + "namespace": "Core", + "label": "Generator Instance", + "include_in_menu": False, + "icon": "mdi:file-document-outline", + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "branch": BranchSupportType.LOCAL.value, + "inherit_from": [InfrahubKind.TASKTARGET], + "attributes": [ + {"name": "name", "kind": "Text"}, + { + "name": "status", + "kind": "Text", + "enum": GeneratorInstanceStatus.available_types(), + }, + ], + "relationships": [ + { + "name": "object", + "peer": InfrahubKind.NODE, + "kind": "Attribute", + "identifier": "generator__node", + "cardinality": "one", + "optional": False, + }, + { + "name": "definition", + "peer": InfrahubKind.GENERATORDEFINITION, + "kind": "Attribute", + "identifier": "generator__generator_definition", + "cardinality": "one", + "optional": False, + }, + ], + }, { "name": "StandardWebhook", "namespace": "Core", @@ -1211,5 +1619,21 @@ }, ], }, + { + "name": "Namespace", + "namespace": "Ipam", + "description": "A namespace that segments IPAM", + "label": "IPAM Namespace", + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["name__value"], + "include_in_menu": False, + "icon": "mdi:format-list-group", + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.IPNAMESPACE], + "attributes": [ + {"name": "default", "kind": "Boolean", "optional": True, "read_only": True, "order_weight": 9000} + ], + }, ], } diff --git a/backend/infrahub/core/schema/definitions/deprecated.py b/backend/infrahub/core/schema/definitions/deprecated.py new file mode 100644 index 0000000000..070be5c3c8 --- /dev/null +++ b/backend/infrahub/core/schema/definitions/deprecated.py @@ -0,0 +1,26 @@ +from typing import Any + +from infrahub.core.constants import HashableModelState + +deprecated_models: dict[str, Any] = { + "generics": [ + { + "name": "Source", + "namespace": "Lineage", + "display_labels": [], + "attributes": [ + {"name": "name", "kind": "Text", "state": HashableModelState.ABSENT}, + {"name": "description", "kind": "Text", "state": HashableModelState.ABSENT}, + ], + }, + { + "name": "Owner", + "namespace": "Lineage", + "display_labels": [], + "attributes": [ + {"name": "name", "kind": "Text", "state": HashableModelState.ABSENT}, + {"name": "description", "kind": "Text", "state": HashableModelState.ABSENT}, + ], + }, + ] +} diff --git a/backend/infrahub/core/schema/definitions/internal.py b/backend/infrahub/core/schema/definitions/internal.py index 9354b04d6a..321d6ad905 100644 --- a/backend/infrahub/core/schema/definitions/internal.py +++ b/backend/infrahub/core/schema/definitions/internal.py @@ -20,9 +20,11 @@ NAMESPACE_REGEX, NODE_KIND_REGEX, NODE_NAME_REGEX, + AllowOverrideType, BranchSupportType, HashableModelState, RelationshipCardinality, + RelationshipDeleteBehavior, RelationshipDirection, RelationshipKind, UpdateSupport, @@ -101,6 +103,7 @@ def object_kind(self) -> str: "Text": "str", "List": "list", "Number": "int", + "URL": "str", } return kind_map[self.kind] @@ -317,6 +320,13 @@ def to_dict(self) -> dict[str, Any]: optional=True, extra={"update": UpdateSupport.VALIDATE_CONSTRAINT}, ), + SchemaAttribute( + name="documentation", + kind="URL", + description="Link to a documentation associated with this object, can be internal or external.", + optional=True, + extra={"update": UpdateSupport.ALLOWED}, + ), SchemaAttribute( name="state", kind="Text", @@ -471,7 +481,7 @@ def to_dict(self) -> dict[str, Any]: SchemaAttribute( name="regex", kind="Text", - description="Regex uses to limit limit the characters allowed in for the attributes.", + description="Regex uses to limit the characters allowed in for the attributes.", optional=True, extra={"update": UpdateSupport.VALIDATE_CONSTRAINT}, ), @@ -543,7 +553,7 @@ def to_dict(self) -> dict[str, Any]: SchemaAttribute( name="order_weight", kind="Number", - description="Number used to order the attribute in the frontend (table and view).", + description="Number used to order the attribute in the frontend (table and view). Lowest value will be ordered first.", optional=True, extra={"update": UpdateSupport.ALLOWED}, ), @@ -572,6 +582,16 @@ def to_dict(self) -> dict[str, Any]: optional=True, extra={"update": UpdateSupport.NOT_APPLICABLE}, ), + SchemaAttribute( + name="allow_override", + kind="Text", + internal_kind=AllowOverrideType, + description="Type of allowed override for the attribute.", + enum=AllowOverrideType.available_types(), + default_value=AllowOverrideType.ANY, + optional=True, + extra={"update": UpdateSupport.ALLOWED}, + ), ], relationships=[ SchemaRelationship( @@ -615,8 +635,6 @@ def to_dict(self) -> dict[str, Any]: kind="Text", description="Type (kind) of objects supported on the other end of the relationship.", regex=str(NODE_KIND_REGEX), - min_length=DEFAULT_KIND_MIN_LENGTH, - max_length=DEFAULT_KIND_MAX_LENGTH, extra={"update": UpdateSupport.VALIDATE_CONSTRAINT}, ), SchemaAttribute( @@ -683,7 +701,7 @@ def to_dict(self) -> dict[str, Any]: SchemaAttribute( name="order_weight", kind="Number", - description="Number used to order the relationship in the frontend (table and view).", + description="Number used to order the relationship in the frontend (table and view). Lowest value will be ordered first.", optional=True, extra={"update": UpdateSupport.ALLOWED}, ), @@ -750,6 +768,34 @@ def to_dict(self) -> dict[str, Any]: optional=True, extra={"update": UpdateSupport.NOT_APPLICABLE}, ), + SchemaAttribute( + name="on_delete", + kind="Text", + internal_kind=RelationshipDeleteBehavior, + description="Default is no-action. If cascade, related node(s) are deleted when this node is deleted.", + enum=RelationshipDeleteBehavior.available_types(), + default_value=None, + optional=True, + extra={"update": UpdateSupport.ALLOWED}, + ), + SchemaAttribute( + name="allow_override", + kind="Text", + internal_kind=AllowOverrideType, + description="Type of allowed override for the relationship.", + enum=AllowOverrideType.available_types(), + default_value=AllowOverrideType.ANY, + optional=True, + extra={"update": UpdateSupport.ALLOWED}, + ), + SchemaAttribute( + name="read_only", + kind="Boolean", + description="Set the relationship as read-only, users won't be able to change its value.", + default_value=False, + optional=True, + extra={"update": UpdateSupport.ALLOWED}, + ), ], relationships=[ SchemaRelationship( diff --git a/backend/infrahub/core/schema/generated/attribute_schema.py b/backend/infrahub/core/schema/generated/attribute_schema.py index b02c3372da..7d9adb2288 100644 --- a/backend/infrahub/core/schema/generated/attribute_schema.py +++ b/backend/infrahub/core/schema/generated/attribute_schema.py @@ -6,7 +6,7 @@ from pydantic import Field -from infrahub.core.constants import HashableModelState +from infrahub.core.constants import AllowOverrideType, HashableModelState from infrahub.core.models import HashableModel from infrahub.core.schema.dropdown import DropdownChoice # noqa: TCH001 @@ -41,7 +41,7 @@ class GeneratedAttributeSchema(HashableModel): ) regex: Optional[str] = Field( default=None, - description="Regex uses to limit limit the characters allowed in for the attributes.", + description="Regex uses to limit the characters allowed in for the attributes.", json_schema_extra={"update": "validate_constraint"}, ) max_length: Optional[int] = Field( @@ -88,7 +88,7 @@ class GeneratedAttributeSchema(HashableModel): ) order_weight: Optional[int] = Field( default=None, - description="Number used to order the attribute in the frontend (table and view).", + description="Number used to order the attribute in the frontend (table and view). Lowest value will be ordered first.", json_schema_extra={"update": "allowed"}, ) default_value: Optional[Any] = Field( @@ -104,3 +104,8 @@ class GeneratedAttributeSchema(HashableModel): description="Expected state of the attribute after loading the schema", json_schema_extra={"update": "not_applicable"}, ) + allow_override: AllowOverrideType = Field( + default=AllowOverrideType.ANY, + description="Type of allowed override for the attribute.", + json_schema_extra={"update": "allowed"}, + ) diff --git a/backend/infrahub/core/schema/generated/base_node_schema.py b/backend/infrahub/core/schema/generated/base_node_schema.py index c9f56d5d2d..ffb9319309 100644 --- a/backend/infrahub/core/schema/generated/base_node_schema.py +++ b/backend/infrahub/core/schema/generated/base_node_schema.py @@ -86,6 +86,11 @@ class GeneratedBaseNodeSchema(HashableModel): description="List of multi-element uniqueness constraints that can combine relationships and attributes", json_schema_extra={"update": "validate_constraint"}, ) + documentation: Optional[str] = Field( + default=None, + description="Link to a documentation associated with this object, can be internal or external.", + json_schema_extra={"update": "allowed"}, + ) state: HashableModelState = Field( default=HashableModelState.PRESENT, description="Expected state of the node/generic after loading the schema", diff --git a/backend/infrahub/core/schema/generated/relationship_schema.py b/backend/infrahub/core/schema/generated/relationship_schema.py index 5da2174bd4..53d45f050c 100644 --- a/backend/infrahub/core/schema/generated/relationship_schema.py +++ b/backend/infrahub/core/schema/generated/relationship_schema.py @@ -7,9 +7,11 @@ from pydantic import Field from infrahub.core.constants import ( + AllowOverrideType, BranchSupportType, HashableModelState, RelationshipCardinality, + RelationshipDeleteBehavior, RelationshipDirection, RelationshipKind, ) @@ -35,8 +37,6 @@ class GeneratedRelationshipSchema(HashableModel): ..., description="Type (kind) of objects supported on the other end of the relationship.", pattern="^[A-Z][a-zA-Z0-9]+$", - min_length=3, - max_length=32, json_schema_extra={"update": "validate_constraint"}, ) kind: RelationshipKind = Field( @@ -80,7 +80,7 @@ class GeneratedRelationshipSchema(HashableModel): ) order_weight: Optional[int] = Field( default=None, - description="Number used to order the relationship in the frontend (table and view).", + description="Number used to order the relationship in the frontend (table and view). Lowest value will be ordered first.", json_schema_extra={"update": "allowed"}, ) optional: bool = Field( @@ -116,3 +116,18 @@ class GeneratedRelationshipSchema(HashableModel): filters: list[FilterSchema] = Field( default_factory=list, description="Relationship filters", json_schema_extra={"update": "not_applicable"} ) + on_delete: Optional[RelationshipDeleteBehavior] = Field( + default=None, + description="Default is no-action. If cascade, related node(s) are deleted when this node is deleted.", + json_schema_extra={"update": "allowed"}, + ) + allow_override: AllowOverrideType = Field( + default=AllowOverrideType.ANY, + description="Type of allowed override for the relationship.", + json_schema_extra={"update": "allowed"}, + ) + read_only: bool = Field( + default=False, + description="Set the relationship as read-only, users won't be able to change its value.", + json_schema_extra={"update": "allowed"}, + ) diff --git a/backend/infrahub/core/schema/node_schema.py b/backend/infrahub/core/schema/node_schema.py index d369658d27..bb2070083b 100644 --- a/backend/infrahub/core/schema/node_schema.py +++ b/backend/infrahub/core/schema/node_schema.py @@ -3,18 +3,38 @@ from typing import TYPE_CHECKING, List, Optional, Union from infrahub.core import registry -from infrahub.core.constants import InfrahubKind +from infrahub.core.constants import AllowOverrideType, InfrahubKind -from .attribute_schema import AttributeSchema from .generated.node_schema import GeneratedNodeSchema from .generic_schema import GenericSchema -from .relationship_schema import RelationshipSchema if TYPE_CHECKING: from infrahub.core.branch import Branch class NodeSchema(GeneratedNodeSchema): + def validate_inheritance(self, interface: GenericSchema) -> None: + """Check that protected attributes and relationships are not overriden before inheriting them from interface.""" + for attribute in self.attributes: + if ( + attribute.name in interface.attribute_names + and not attribute.inherited + and interface.get_attribute(attribute.name).allow_override == AllowOverrideType.NONE + ): + raise ValueError( + f"{self.kind}'s attribute {attribute.name} inherited from {interface.kind} cannot be overriden" + ) + + for relationship in self.relationships: + if ( + relationship.name in interface.relationship_names + and not relationship.inherited + and interface.get_relationship(relationship.name).allow_override == AllowOverrideType.NONE + ): + raise ValueError( + f"{self.kind}'s relationship {relationship.name} inherited from {interface.kind} cannot be overriden" + ) + def inherit_from_interface(self, interface: GenericSchema) -> None: existing_inherited_attributes = {item.name: idx for idx, item in enumerate(self.attributes) if item.inherited} existing_inherited_relationships = { @@ -24,25 +44,35 @@ def inherit_from_interface(self, interface: GenericSchema) -> None: existing_inherited_relationships.keys() ) - for item in interface.attributes + interface.relationships: - if item.name in self.valid_input_names: + for attribute in interface.attributes: + if attribute.name in self.valid_input_names: + continue + + new_attribute = attribute.duplicate() + new_attribute.inherited = True + + if attribute.name not in existing_inherited_fields: + self.attributes.append(new_attribute) + else: + item_idx = existing_inherited_attributes[attribute.name] + self.attributes[item_idx] = new_attribute + + for relationship in interface.relationships: + if relationship.name in self.valid_input_names: continue - new_item = item.duplicate() - new_item.inherited = True + new_relationship = relationship.duplicate() + new_relationship.inherited = True - if isinstance(item, AttributeSchema) and item.name not in existing_inherited_fields: - self.attributes.append(new_item) - elif isinstance(item, AttributeSchema) and item.name in existing_inherited_fields: - item_idx = existing_inherited_attributes[item.name] - self.attributes[item_idx] = new_item - elif isinstance(item, RelationshipSchema) and item.name not in existing_inherited_fields: - self.relationships.append(new_item) - elif isinstance(item, RelationshipSchema) and item.name in existing_inherited_fields: - item_idx = existing_inherited_relationships[item.name] - self.relationships[item_idx] = new_item + if relationship.name not in existing_inherited_fields: + self.relationships.append(new_relationship) + else: + item_idx = existing_inherited_relationships[relationship.name] + self.relationships[item_idx] = new_relationship def get_hierarchy_schema(self, branch: Optional[Union[Branch, str]] = None) -> GenericSchema: + if not self.hierarchy: + raise ValueError("The node is not part of a hierarchy") schema = registry.schema.get(name=self.hierarchy, branch=branch) if not isinstance(schema, GenericSchema): raise TypeError @@ -54,5 +84,13 @@ def get_labels(self) -> List[str]: labels: List[str] = [self.kind] + self.inherit_from if self.namespace not in ["Schema", "Internal"] and InfrahubKind.GENERICGROUP not in self.inherit_from: - labels.append("CoreNode") + labels.append(InfrahubKind.NODE) return labels + + def is_ip_prefix(self) -> bool: + """Return whether a node is a derivative of built-in IP prefixes.""" + return InfrahubKind.IPPREFIX in self.inherit_from + + def is_ip_address(self) -> bool: + """Return whether a node is a derivative of built-in IP addreses.""" + return InfrahubKind.IPADDRESS in self.inherit_from diff --git a/backend/infrahub/core/schema/profile_schema.py b/backend/infrahub/core/schema/profile_schema.py new file mode 100644 index 0000000000..0e097d59bd --- /dev/null +++ b/backend/infrahub/core/schema/profile_schema.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import List + +from pydantic import Field + +from infrahub.core.constants import InfrahubKind +from infrahub.core.schema.basenode_schema import BaseNodeSchema + + +class ProfileSchema(BaseNodeSchema): + inherit_from: list[str] = Field( + default_factory=list, + description="List of Generic Kind that this profile is inheriting from", + ) + + def get_labels(self) -> List[str]: + """Return the labels for this object, composed of the kind + and the list of Generic this object is inheriting from.""" + + labels: List[str] = [self.kind] + self.inherit_from + if self.namespace not in ["Schema", "Internal"] and InfrahubKind.GENERICGROUP not in self.inherit_from: + labels.append(InfrahubKind.PROFILE) + return labels diff --git a/backend/infrahub/core/schema/relationship_schema.py b/backend/infrahub/core/schema/relationship_schema.py index a7180ce5c6..22280f068b 100644 --- a/backend/infrahub/core/schema/relationship_schema.py +++ b/backend/infrahub/core/schema/relationship_schema.py @@ -15,7 +15,7 @@ if TYPE_CHECKING: from infrahub.core.branch import Branch from infrahub.core.query import QueryElement - from infrahub.core.schema import GenericSchema, NodeSchema + from infrahub.core.schema import MainSchemaTypes from infrahub.database import InfrahubDatabase @@ -34,7 +34,7 @@ def is_relationship(self) -> bool: def get_class(self) -> type[Relationship]: return Relationship - def get_peer_schema(self, branch: Optional[Union[Branch, str]] = None) -> Union[NodeSchema, GenericSchema]: + def get_peer_schema(self, branch: Optional[Union[Branch, str]] = None) -> MainSchemaTypes: return registry.schema.get(name=self.peer, branch=branch, duplicate=False) @property @@ -66,6 +66,7 @@ async def get_query_filter( include_match: bool = True, param_prefix: Optional[str] = None, partial_match: bool = False, + support_profiles: bool = False, # pylint: disable=unused-argument ) -> Tuple[List[QueryElement], Dict[str, Any], List[str]]: """Generate Query String Snippet to filter the right node.""" diff --git a/backend/infrahub/core/schema_manager.py b/backend/infrahub/core/schema_manager.py index b0865e1894..a741dd0236 100644 --- a/backend/infrahub/core/schema_manager.py +++ b/backend/infrahub/core/schema_manager.py @@ -5,11 +5,11 @@ from collections import defaultdict from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from infrahub_sdk.topological_sort import DependencyCycleExistsError, topological_sort from infrahub_sdk.utils import compare_lists, duplicates, intersection from pydantic import BaseModel from infrahub import lock -from infrahub.core import get_branch, get_branch_from_registry from infrahub.core.constants import ( RESERVED_ATTR_GEN_NAMES, RESERVED_ATTR_REL_NAMES, @@ -19,6 +19,7 @@ HashableModelState, InfrahubKind, RelationshipCardinality, + RelationshipDeleteBehavior, RelationshipDirection, RelationshipKind, ) @@ -40,12 +41,15 @@ BaseNodeSchema, FilterSchema, GenericSchema, + MainSchemaTypes, NodeSchema, + ProfileSchema, RelationshipSchema, SchemaAttributePath, SchemaRoot, internal_schema, ) +from infrahub.core.schema.definitions.core import core_profile_schema_definition from infrahub.core.utils import parse_node_kind from infrahub.core.validators import CONSTRAINT_VALIDATOR_MAP from infrahub.exceptions import SchemaNotFoundError @@ -56,7 +60,6 @@ log = get_logger() - if TYPE_CHECKING: from graphql import GraphQLSchema @@ -94,12 +97,14 @@ def __init__(self, cache: Dict, name: Optional[str] = None, data: Optional[Dict[ self.name: Optional[str] = name self.nodes: Dict[str, str] = {} self.generics: Dict[str, str] = {} + self.profiles: Dict[str, str] = {} self._graphql_schema: Optional[GraphQLSchema] = None self._graphql_manager: Optional[GraphQLSchemaManager] = None if data: self.nodes = data.get("nodes", {}) self.generics = data.get("generics", {}) + self.profiles = data.get("profiles", {}) @property def node_names(self) -> List[str]: @@ -109,16 +114,26 @@ def node_names(self) -> List[str]: def generic_names(self) -> List[str]: return list(self.generics.keys()) - def get_all_kind_id_map(self) -> Dict[str, str]: + @property + def profile_names(self) -> List[str]: + return list(self.profiles.keys()) + + def get_all_kind_id_map(self, exclude_profiles: bool = False) -> Dict[str, str]: kind_id_map = {} - for name in self.all_names: + if exclude_profiles: + names = self.node_names + [gn for gn in self.generic_names if gn != InfrahubKind.PROFILE] + else: + names = self.all_names + for name in names: + if name == InfrahubKind.NODE: + continue item = self.get(name=name, duplicate=False) kind_id_map[name] = item.id return kind_id_map @property def all_names(self) -> List[str]: - return self.node_names + self.generic_names + return self.node_names + self.generic_names + self.profile_names def get_hash(self) -> str: """Calculate the hash for this objects based on the content of nodes and generics. @@ -136,11 +151,12 @@ def get_hash_full(self) -> SchemaBranchHash: return SchemaBranchHash(main=self.get_hash(), nodes=self.nodes, generics=self.generics) def to_dict(self) -> Dict[str, Any]: - return {"nodes": self.nodes, "generics": self.generics} + return {"nodes": self.nodes, "generics": self.generics, "profiles": self.profiles} - def to_dict_schema_object(self, duplicate: bool = False) -> Dict[str, Dict[str, Union[NodeSchema, GenericSchema]]]: + def to_dict_schema_object(self, duplicate: bool = False) -> Dict[str, Dict[str, MainSchemaTypes]]: return { "nodes": {name: self.get(name, duplicate=duplicate) for name in self.nodes}, + "profiles": {name: self.get(name, duplicate=duplicate) for name in self.profiles}, "generics": {name: self.get(name, duplicate=duplicate) for name in self.generics}, } @@ -171,8 +187,8 @@ def get_graphql_schema( def diff(self, other: SchemaBranch) -> SchemaDiff: # Identify the nodes or generics that have been added or removed - local_kind_id_map = self.get_all_kind_id_map() - other_kind_id_map = other.get_all_kind_id_map() + local_kind_id_map = self.get_all_kind_id_map(exclude_profiles=True) + other_kind_id_map = other.get_all_kind_id_map(exclude_profiles=True) clean_local_ids = [id for id in local_kind_id_map.values() if id is not None] clean_other_ids = [id for id in other_kind_id_map.values() if id is not None] shared_ids = intersection(list1=clean_local_ids, list2=clean_other_ids) @@ -242,7 +258,7 @@ def duplicate(self, name: Optional[str] = None) -> SchemaBranch: """Duplicate the current object but conserve the same cache.""" return self.__class__(name=name, data=copy.deepcopy(self.to_dict()), cache=self._cache) - def set(self, name: str, schema: Union[NodeSchema, GenericSchema]) -> str: + def set(self, name: str, schema: MainSchemaTypes) -> str: """Store a NodeSchema or GenericSchema associated with a specific name. The object will be stored in the internal cache based on its hash value. @@ -256,10 +272,12 @@ def set(self, name: str, schema: Union[NodeSchema, GenericSchema]) -> str: self.nodes[name] = schema_hash elif "Generic" in schema.__class__.__name__: self.generics[name] = schema_hash + elif "Profile" in schema.__class__.__name__: + self.profiles[name] = schema_hash return schema_hash - def get(self, name: str, duplicate: bool = True) -> Union[NodeSchema, GenericSchema]: + def get(self, name: str, duplicate: bool = True) -> MainSchemaTypes: """Access a specific NodeSchema or GenericSchema, defined by its kind. To ensure that no-one will ever change an object in the cache, @@ -272,6 +290,8 @@ def get(self, name: str, duplicate: bool = True) -> Union[NodeSchema, GenericSch key = self.nodes[name] elif name in self.generics: key = self.generics[name] + elif name in self.profiles: + key = self.profiles[name] if key and duplicate: return self._cache[key].duplicate() @@ -296,17 +316,26 @@ def get_generic(self, name: str, duplicate: bool = True) -> GenericSchema: raise ValueError(f"{name!r} is not of type GenericSchema") return item + def get_profile(self, name: str, duplicate: bool = True) -> ProfileSchema: + """Access a specific ProfileSchema, defined by its kind.""" + item = self.get(name=name, duplicate=duplicate) + if not isinstance(item, ProfileSchema): + raise ValueError(f"{name!r} is not of type ProfileSchema") + return item + def delete(self, name: str) -> None: if name in self.nodes: del self.nodes[name] elif name in self.generics: del self.generics[name] + elif name in self.profiles: + del self.profiles[name] else: raise SchemaNotFoundError( branch_name=self.name, identifier=name, message=f"Unable to find the schema {name!r} in the registry" ) - def get_by_id(self, id: str, duplicate: bool = True) -> Union[NodeSchema, GenericSchema]: + def get_by_id(self, id: str, duplicate: bool = True) -> MainSchemaTypes: for name in self.all_names: node = self.get(name=name, duplicate=False) if node.id != id: @@ -321,7 +350,7 @@ def get_by_id(self, id: str, duplicate: bool = True) -> Union[NodeSchema, Generi message=f"Unable to find the schema with the id {id!r} in the registry", ) - def get_by_any_id(self, id: str) -> Union[NodeSchema, GenericSchema]: + def get_by_any_id(self, id: str) -> MainSchemaTypes: for name in self.all_names: node = self.get(name=name, duplicate=False) if node.id == id: @@ -354,14 +383,12 @@ def has(self, name: str) -> bool: except SchemaNotFoundError: return False - def get_all( - self, include_internal: bool = False, duplicate: bool = True - ) -> Dict[str, Union[NodeSchema, GenericSchema]]: + def get_all(self, include_internal: bool = False, duplicate: bool = True) -> Dict[str, MainSchemaTypes]: """Retrieve everything in a single dictionary.""" return { name: self.get(name=name, duplicate=duplicate) - for name in list(self.nodes.keys()) + list(self.generics.keys()) + for name in self.all_names if include_internal or name not in INTERNAL_SCHEMA_NODE_KINDS } @@ -379,14 +406,14 @@ def get_namespaces(self, include_internal: bool = False) -> List[SchemaNamespace def get_schemas_for_namespaces( self, namespaces: Optional[List[str]] = None, include_internal: bool = False - ) -> List[Union[NodeSchema, GenericSchema]]: + ) -> List[MainSchemaTypes]: """Retrive everything in a single dictionary.""" all_schemas = self.get_all(include_internal=include_internal, duplicate=False) if namespaces: return [schema for schema in all_schemas.values() if schema.namespace in namespaces] return list(all_schemas.values()) - def get_schemas_by_rel_identifier(self, identifier: str) -> List[Union[NodeSchema, GenericSchema]]: + def get_schemas_by_rel_identifier(self, identifier: str) -> List[MainSchemaTypes]: nodes: List[RelationshipSchema] = [] for node_name in list(self.nodes.keys()) + list(self.generics.keys()): node = self.get(name=node_name, duplicate=False) @@ -430,6 +457,7 @@ def process_pre_validation(self) -> None: self.process_inheritance() self.process_hierarchy() self.process_branch_support() + self.add_profile_schemas() def process_validate(self) -> None: self.validate_names() @@ -441,14 +469,17 @@ def process_validate(self) -> None: self.validate_display_labels() self.validate_order_by() self.validate_default_filters() + self.validate_parent_component() def process_post_validation(self) -> None: self.add_groups() self.add_hierarchy() + self.add_profile_relationships() self.process_filters() self.generate_weight() self.process_labels() self.process_dropdowns() + self.process_relationships() def generate_identifiers(self) -> None: """Generate the identifier for all relationships if it's not already present.""" @@ -482,12 +513,12 @@ def validate_identifiers(self) -> None: for identifier, rels_per_kind in rels_per_identifier.items(): # Per node kind, check if the directions are good - for _, rels in rels_per_kind.items(): + for kind, rels in rels_per_kind.items(): directions = sorted([rel.direction.value for rel in rels]) if not (len(rels) == 1 or (len(rels) == 2 and directions == ["inbound", "outbound"])): names_directions = [(rel.name, rel.direction.value) for rel in rels] raise ValueError( - f"{node.kind}: Identifier of relationships must be unique for a given direction > {identifier!r} : {names_directions}" + f"{kind}: Identifier of relationships must be unique for a given direction > {identifier!r} : {names_directions}" ) from None # Continue if no other model is using this identifier @@ -530,7 +561,7 @@ def _validate_attribute_path( ) -> SchemaAttributePath: error_header = f"{node_schema.kind}" error_header += f".{schema_attribute_name}" if schema_attribute_name else "" - allowed_leaf_properties = ["value"] + allowed_leaf_properties = ["value", "version", "binary_address"] try: schema_attribute_path = node_schema.parse_attribute_path(path, schema_map_override=schema_map_override) except AttributePathParsingError as exc: @@ -631,6 +662,68 @@ def validate_default_filters(self) -> None: node_schema, node_schema.default_filter, schema_map, schema_attribute_name="default_filter" ) + def validate_parent_component(self) -> None: + # {parent_kind: {component_kind_1, component_kind_2, ...}} + dependency_map: dict[str, set[str]] = defaultdict(set) + for name in self.generic_names + self.node_names: + node_schema = self.get(name=name, duplicate=False) + + parent_relationships: list[RelationshipSchema] = [] + component_relationships: list[RelationshipSchema] = [] + for rel_schema in node_schema.relationships: + if rel_schema.kind == RelationshipKind.PARENT and rel_schema.inherited is False: + parent_relationships.append(rel_schema) + dependency_map[rel_schema.peer].add(node_schema.kind) + elif rel_schema.kind == RelationshipKind.COMPONENT: + component_relationships.append(rel_schema) + dependency_map[node_schema.kind].add(rel_schema.peer) + + if isinstance(node_schema, NodeSchema) and node_schema.inherit_from: + for generic_schema_name in node_schema.inherit_from: + generic_schema = self.get_generic(name=generic_schema_name, duplicate=False) + generic_parent_relationships = generic_schema.get_relationships_of_kind( + relationship_kinds=[RelationshipKind.PARENT] + ) + for gpr in generic_parent_relationships: + dependency_map[gpr.peer].add(node_schema.kind) + parent_relationships.extend(generic_parent_relationships) + generic_component_relationships = generic_schema.get_relationships_of_kind( + relationship_kinds=[RelationshipKind.COMPONENT] + ) + for gcr in generic_component_relationships: + dependency_map[node_schema.kind].add(gcr.peer) + + if not parent_relationships and not component_relationships: + continue + + self._validate_parents_one_schema(node_schema=node_schema, parent_relationships=parent_relationships) + + try: + topological_sort(dependency_map) + except DependencyCycleExistsError as exc: + raise ValueError(f"Cycles exist among parents and components in schema: {exc.get_cycle_strings()}") from exc + + def _validate_parents_one_schema( + self, node_schema: Union[NodeSchema, GenericSchema], parent_relationships: list[RelationshipSchema] + ) -> None: + if not parent_relationships: + return + if len(parent_relationships) > 1: + parent_names = [pr.name for pr in parent_relationships] + raise ValueError( + f"{node_schema.kind}: Only one relationship of type parent is allowed, but all the following are of type parent: {parent_names}" + ) + + parent_relationship = parent_relationships[0] + if parent_relationship.cardinality != RelationshipCardinality.ONE: + raise ValueError( + f"{node_schema.kind}.{parent_relationship.name}: Relationship of type parent must be cardinality=one" + ) + if parent_relationship.optional is True: + raise ValueError( + f"{node_schema.kind}.{parent_relationship.name}: Relationship of type parent must not be optional" + ) + def validate_names(self) -> None: for name in self.all_names: node = self.get(name=name, duplicate=False) @@ -778,6 +871,26 @@ def check_if_need_to_update_label(node) -> bool: self.set(name=name, schema=node) + def process_relationships(self) -> None: + for name in self.all_names: + node = self.get(name=name, duplicate=False) + + schema_to_update: Optional[Union[NodeSchema, GenericSchema]] = None + for relationship in node.relationships: + if relationship.on_delete is not None: + continue + if not schema_to_update: + schema_to_update = node.duplicate() + + relationship_to_update = schema_to_update.get_relationship(name=relationship.name) + if relationship.kind == RelationshipKind.COMPONENT: + relationship_to_update.on_delete = RelationshipDeleteBehavior.CASCADE + else: + relationship_to_update.on_delete = RelationshipDeleteBehavior.NO_ACTION + + if schema_to_update: + self.set(name=schema_to_update.kind, schema=schema_to_update) + def process_hierarchy(self) -> None: for name in self.nodes.keys(): node = self.get(name=name, duplicate=False) @@ -824,12 +937,19 @@ def process_inheritance(self) -> None: # For all node_schema, add the attributes & relationships from the generic / interface for name in self.nodes.keys(): node = self.get(name=name, duplicate=False) + + if node.inherit_from or node.namespace not in RESTRICTED_NAMESPACES: + generics_used_by[InfrahubKind.NODE].append(node.kind) + if not node.inherit_from: continue node = node.duplicate() - generics_used_by["CoreNode"].append(node.kind) + if InfrahubKind.IPPREFIX in node.inherit_from and InfrahubKind.IPADDRESS in node.inherit_from: + raise ValueError( + f"{node.kind} cannot inherit from both {InfrahubKind.IPPREFIX} and {InfrahubKind.IPADDRESS}" + ) generic_with_hierarchical_support = [] for generic_kind in node.inherit_from: @@ -841,6 +961,9 @@ def process_inheritance(self) -> None: if generic_kind_schema.hierarchical: generic_with_hierarchical_support.append(generic_kind) + # Check if a node redefine protected generic attributes or relationships + node.validate_inheritance(interface=generic_kind_schema) + # Store the list of node referencing a specific generics generics_used_by[generic_kind].append(node.kind) node.inherit_from_interface(interface=generic_kind_schema) @@ -1012,7 +1135,7 @@ def add_groups(self): return for node_name in self.all_names: - schema: Union[NodeSchema, GenericSchema] = self.get(name=node_name, duplicate=False) + schema: MainSchemaTypes = self.get(name=node_name, duplicate=False) changed = False if isinstance(schema, NodeSchema) and InfrahubKind.GENERICGROUP in schema.inherit_from: @@ -1021,6 +1144,9 @@ def add_groups(self): if schema.kind in INTERNAL_SCHEMA_NODE_KINDS or schema.kind == InfrahubKind.GENERICGROUP: continue + if schema.kind in (InfrahubKind.LINEAGEOWNER, InfrahubKind.LINEAGESOURCE): + continue + if "member_of_groups" not in schema.relationship_names: if not changed: schema = schema.duplicate() @@ -1055,6 +1181,47 @@ def add_groups(self): self.set(name=node_name, schema=schema) def add_hierarchy(self): + for generic_name in self.generics.keys(): + generic = self.get_generic(name=generic_name, duplicate=False) + + if not generic.hierarchical: + continue + + generic = generic.duplicate() + read_only = generic.kind == InfrahubKind.IPPREFIX + + if "parent" not in generic.relationship_names: + generic.relationships.append( + RelationshipSchema( + name="parent", + identifier="parent__child", + peer=generic_name, + kind=RelationshipKind.HIERARCHY, + cardinality=RelationshipCardinality.ONE, + max_count=1, + branch=BranchSupportType.AWARE, + direction=RelationshipDirection.OUTBOUND, + hierarchical=generic_name, + read_only=read_only, + ) + ) + if "children" not in generic.relationship_names: + generic.relationships.append( + RelationshipSchema( + name="children", + identifier="parent__child", + peer=generic_name, + kind=RelationshipKind.HIERARCHY, + cardinality=RelationshipCardinality.MANY, + branch=BranchSupportType.AWARE, + direction=RelationshipDirection.INBOUND, + hierarchical=generic_name, + read_only=read_only, + ) + ) + + self.set(name=generic_name, schema=generic) + for node_name in self.nodes.keys(): node = self.get_node(name=node_name, duplicate=False) @@ -1062,6 +1229,7 @@ def add_hierarchy(self): continue node = node.duplicate() + read_only = InfrahubKind.IPPREFIX in node.inherit_from if node.parent and "parent" not in node.relationship_names: node.relationships.append( @@ -1075,6 +1243,7 @@ def add_hierarchy(self): branch=BranchSupportType.AWARE, direction=RelationshipDirection.OUTBOUND, hierarchical=node.hierarchy, + read_only=read_only, ) ) @@ -1089,11 +1258,110 @@ def add_hierarchy(self): branch=BranchSupportType.AWARE, direction=RelationshipDirection.INBOUND, hierarchical=node.hierarchy, + read_only=read_only, ) ) self.set(name=node_name, schema=node) + def add_profile_schemas(self): + if not self.has(name=InfrahubKind.PROFILE): + core_profile_schema = GenericSchema(**core_profile_schema_definition) + self.set(name=core_profile_schema.kind, schema=core_profile_schema) + else: + core_profile_schema = self.get(name=InfrahubKind.PROFILE, duplicate=False) + profile_schema_kinds = set() + for node_name in self.nodes.keys(): + node = self.get_node(name=node_name, duplicate=False) + if node.namespace in RESTRICTED_NAMESPACES: + continue + + profile = self.generate_profile_from_node(node=node) + self.set(name=profile.kind, schema=profile) + profile_schema_kinds.add(profile.kind) + if not profile_schema_kinds: + return + core_profile_schema = self.get(name=InfrahubKind.PROFILE, duplicate=False) + current_used_by = set(core_profile_schema.used_by) + new_used_by = profile_schema_kinds - current_used_by + if not new_used_by: + return + core_profile_schema = self.get(name=InfrahubKind.PROFILE, duplicate=True) + core_profile_schema.used_by = sorted(list(profile_schema_kinds)) + self.set(name=core_profile_schema.kind, schema=core_profile_schema) + + def add_profile_relationships(self): + for node_name in self.nodes.keys(): + node = self.get_node(name=node_name, duplicate=False) + if node.namespace in RESTRICTED_NAMESPACES: + continue + + if "profiles" in node.relationship_names: + continue + + # Add relationship between node and profile + node.relationships.append( + RelationshipSchema( + name="profiles", + identifier="node__profile", + peer=self._get_profile_kind(node_kind=node.kind), + kind=RelationshipKind.PROFILE, + cardinality=RelationshipCardinality.MANY, + branch=BranchSupportType.AWARE, + ) + ) + + # Add relationship between group and profile + + def _get_profile_kind(self, node_kind: str) -> str: + return f"Profile{node_kind}" + + def generate_profile_from_node(self, node: NodeSchema) -> ProfileSchema: + core_profile_schema = self.get(name=InfrahubKind.PROFILE, duplicate=False) + core_name_attr = core_profile_schema.get_attribute(name="profile_name") + profile_name_attr = AttributeSchema( + **core_name_attr.model_dump(exclude=["id", "inherited"]), + ) + profile_name_attr.branch = node.branch + core_priority_attr = core_profile_schema.get_attribute(name="profile_priority") + profile_priority_attr = AttributeSchema( + **core_priority_attr.model_dump(exclude=["id", "inherited"]), + ) + profile_priority_attr.branch = node.branch + profile = ProfileSchema( + name=node.kind, + namespace="Profile", + description=f"Profile for {node.kind}", + branch=node.branch, + include_in_menu=False, + display_labels=["profile_name__value"], + inherit_from=[InfrahubKind.LINEAGESOURCE, InfrahubKind.PROFILE], + default_filter="profile_name__value", + attributes=[profile_name_attr, profile_priority_attr], + relationships=[ + RelationshipSchema( + name="related_nodes", + identifier="node__profile", + peer=node.kind, + kind=RelationshipKind.PROFILE, + cardinality=RelationshipCardinality.MANY, + branch=BranchSupportType.AWARE, + ) + ], + ) + + for node_attr in node.attributes: + if node_attr.read_only or node_attr.optional is False: + continue + + attr = AttributeSchema( + optional=True, + **node_attr.model_dump(exclude=["id", "unique", "optional", "read_only", "default_value", "inherited"]), + ) + profile.attributes.append(attr) + + return profile + def generate_filters( self, schema: Union[NodeSchema, GenericSchema], include_relationships: bool = False ) -> List[FilterSchema]: @@ -1189,11 +1457,9 @@ def has(self, name: str, branch: Optional[Union[Branch, str]] = None) -> bool: except SchemaNotFoundError: return False - def get( - self, name: str, branch: Optional[Union[Branch, str]] = None, duplicate: bool = True - ) -> Union[NodeSchema, GenericSchema]: + def get(self, name: str, branch: Optional[Union[Branch, str]] = None, duplicate: bool = True) -> MainSchemaTypes: # For now we assume that all branches are present, will see how we need to pull new branches later. - branch = get_branch_from_registry(branch=branch) + branch = registry.get_branch_from_registry(branch=branch) if branch.name in self._branches: try: @@ -1215,8 +1481,8 @@ def get_node_schema( def get_full( self, branch: Optional[Union[Branch, str]] = None, duplicate: bool = True - ) -> Dict[str, Union[NodeSchema, GenericSchema]]: - branch = get_branch_from_registry(branch=branch) + ) -> Dict[str, MainSchemaTypes]: + branch = registry.get_branch_from_registry(branch=branch) branch_name = None if branch.name in self._branches: @@ -1257,7 +1523,7 @@ async def update_schema_branch( limit: Optional[List[str]] = None, update_db: bool = True, ): - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) updated_schema = None if update_db: @@ -1299,7 +1565,7 @@ async def update_schema_to_db( ) -> SchemaBranchDiff: """Load all nodes, generics and groups from a SchemaRoot object into the database.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) item_kinds = [] for item_kind, item_diff in diff.added.items(): @@ -1337,9 +1603,11 @@ async def load_schema_to_db( ) -> None: """Load all nodes, generics and groups from a SchemaRoot object into the database.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) - for item_kind in schema.all_names: + for item_kind in schema.node_names + schema.generic_names: + if item_kind == InfrahubKind.PROFILE: + continue if limit and item_kind not in limit: continue item = schema.get(name=item_kind, duplicate=False) @@ -1357,15 +1625,15 @@ async def load_node_to_db( branch: Optional[Union[str, Branch]] = None, ) -> Union[NodeSchema, GenericSchema]: """Load a Node with its attributes and its relationships to the database.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) node_type = "SchemaNode" if isinstance(node, GenericSchema): node_type = "SchemaGeneric" - node_schema = self.get_node_schema(name=node_type, branch=branch) - attribute_schema = self.get_node_schema(name="SchemaAttribute", branch=branch) - relationship_schema = self.get_node_schema(name="SchemaRelationship", branch=branch) + node_schema = self.get_node_schema(name=node_type, branch=branch, duplicate=False) + attribute_schema = self.get_node_schema(name="SchemaAttribute", branch=branch, duplicate=False) + relationship_schema = self.get_node_schema(name="SchemaRelationship", branch=branch, duplicate=False) # Duplicate the node in order to store the IDs after inserting them in the database new_node = node.duplicate() @@ -1405,7 +1673,7 @@ async def update_node_in_db( branch: Optional[Union[str, Branch]] = None, ) -> Union[NodeSchema, GenericSchema]: """Update a Node with its attributes and its relationships in the database.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) obj = await self.get_one(id=node.get_id(), branch=branch, db=db) if not obj: @@ -1469,7 +1737,7 @@ async def update_node_in_db_based_on_diff( # pylint: disable=too-many-branches, branch: Optional[Union[str, Branch]] = None, ) -> Union[NodeSchema, GenericSchema]: """Update a Node with its attributes and its relationships in the database based on a HashableModelDiff.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) obj = await self.get_one(id=node.get_id(), branch=branch, db=db) if not obj: @@ -1579,7 +1847,7 @@ async def delete_node_in_db( branch: Optional[Union[str, Branch]] = None, ) -> None: """Delete the node with its attributes and relationships.""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) obj = await self.get_one(id=node.get_id(), branch=branch, db=db) if not obj: @@ -1645,10 +1913,10 @@ async def load_schema( branch: Optional[Union[str, Branch]] = None, ) -> SchemaBranch: """Load the schema either from the cache or from the database""" - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) if not branch.is_default and branch.origin_branch: - origin_branch: Branch = await get_branch(branch=branch.origin_branch, db=db) + origin_branch: Branch = await registry.get_branch(branch=branch.origin_branch, db=db) if origin_branch.schema_hash.main == branch.schema_hash.main: origin_schema = self.get_schema_branch(name=origin_branch.name) @@ -1658,11 +1926,11 @@ async def load_schema( return new_branch_schema current_schema = self.get_schema_branch(name=branch.name) - current_schema.clear_cache() schema_diff = current_schema.get_hash_full().compare(branch.schema_hash) branch_schema = await self.load_schema_from_db( db=db, branch=branch, schema=current_schema, schema_diff=schema_diff ) + branch_schema.clear_cache() self.set_schema_branch(name=branch.name, schema=branch_schema) return branch_schema @@ -1686,7 +1954,7 @@ async def load_schema_from_db( SchemaBranch """ - branch = await get_branch(branch=branch, db=db) + branch = await registry.get_branch(branch=branch, db=db) schema = schema or SchemaBranch(cache=self._cache, name=branch.name) # If schema_diff has been provided, we need to build the proper filters for the queries based on the namespace and the name of the object. @@ -1744,16 +2012,15 @@ async def load_schema_from_db( return schema - @staticmethod - async def convert_node_schema_to_schema(schema_node: Node, db: InfrahubDatabase) -> NodeSchema: - """Convert a schema_node object loaded from the database into NodeSchema object.""" - + @classmethod + async def _prepare_node_data(cls, schema_node: Node, db: InfrahubDatabase) -> dict[str, Any]: node_data = {"id": schema_node.id} # First pull all the local attributes at the top level, then convert all the local relationships # for a standard node_schema, the relationships will be attributes and relationships for attr_name in schema_node._attributes: - node_data[attr_name] = getattr(schema_node, attr_name).value + attr = getattr(schema_node, attr_name) + node_data[attr_name] = attr.get_value() for rel_name in schema_node._relationships: if rel_name not in node_data: @@ -1764,34 +2031,20 @@ async def convert_node_schema_to_schema(schema_node: Node, db: InfrahubDatabase) item = await rel.get_peer(db=db) item_data = {"id": item.id} for item_name in item._attributes: - item_data[item_name] = getattr(item, item_name).value + item_attr = getattr(item, item_name) + item_data[item_name] = item_attr.get_value() node_data[rel_name].append(item_data) + return node_data + @classmethod + async def convert_node_schema_to_schema(cls, schema_node: Node, db: InfrahubDatabase) -> NodeSchema: + """Convert a schema_node object loaded from the database into NodeSchema object.""" + node_data = await cls._prepare_node_data(schema_node=schema_node, db=db) return NodeSchema(**node_data) - @staticmethod - async def convert_generic_schema_to_schema(schema_node: Node, db: InfrahubDatabase) -> GenericSchema: + @classmethod + async def convert_generic_schema_to_schema(cls, schema_node: Node, db: InfrahubDatabase) -> GenericSchema: """Convert a schema_node object loaded from the database into GenericSchema object.""" - - node_data = {"id": schema_node.id} - - # First pull all the attributes at the top level, then convert all the relationships - # for a standard node_schema, the relationships will be attributes and relationships - for attr_name in schema_node._attributes: - node_data[attr_name] = getattr(schema_node, attr_name).value - - for rel_name in schema_node._relationships: - if rel_name not in node_data: - node_data[rel_name] = [] - - rm = getattr(schema_node, rel_name) - for rel in await rm.get(db=db): - item = await rel.get_peer(db=db) - item_data = {"id": item.id} - for item_name in item._attributes: - item_data[item_name] = getattr(item, item_name).value - - node_data[rel_name].append(item_data) - + node_data = await cls._prepare_node_data(schema_node=schema_node, db=db) return GenericSchema(**node_data) diff --git a/backend/infrahub/core/timestamp.py b/backend/infrahub/core/timestamp.py index 18fdf2e03c..7657482263 100644 --- a/backend/infrahub/core/timestamp.py +++ b/backend/infrahub/core/timestamp.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Dict, Tuple from infrahub_sdk import Timestamp as BaseTimestamp @@ -12,6 +12,23 @@ class Timestamp(BaseTimestamp): async def to_graphql(self, *args: Any, **kwargs: Any) -> DateTime: # pylint: disable=unused-argument return self.obj + def get_query_filter_path(self, rel_name: str = "r") -> Tuple[str, Dict]: + """ + Generate a CYPHER Query filter based on a path to query a part of the graph at a specific time on all branches. + + There is a currently an assumption that the relationship in the path will be named 'r' + """ + + params = {"at": self.to_string()} + + filters = [ + f"({rel_name}.from <= $at AND {rel_name}.to IS NULL)", + f"({rel_name}.from <= $at AND {rel_name}.to >= $at)", + ] + filter_str = "(" + "\n OR ".join(filters) + ")" + + return filter_str, params + def current_timestamp() -> str: return Timestamp().to_string() diff --git a/backend/infrahub/core/utils.py b/backend/infrahub/core/utils.py index 9d98b784d5..9ee5717814 100644 --- a/backend/infrahub/core/utils.py +++ b/backend/infrahub/core/utils.py @@ -1,8 +1,9 @@ from __future__ import annotations +import ipaddress import re from inspect import isclass -from typing import TYPE_CHECKING, List, Optional, Union +from typing import TYPE_CHECKING, Any, List, Optional, Union from infrahub.core.constants import RelationshipStatus from infrahub.core.models import NodeKind @@ -10,6 +11,8 @@ from infrahub.core.timestamp import Timestamp if TYPE_CHECKING: + from neo4j.graph import Node as Neo4jNode + from infrahub.database import InfrahubDatabase @@ -42,10 +45,7 @@ async def add_relationship( "status": status.value, } - results = await db.execute_query( - query=create_rel_query, - params=params, - ) + results = await db.execute_query(query=create_rel_query, params=params, name="add_relationship") if not results: return None return results[0][0] @@ -57,7 +57,7 @@ async def delete_all_relationships_for_branch(branch_name: str, db: InfrahubData """ params = {"branch_name": branch_name} - await db.execute_query(query=query, params=params) + await db.execute_query(query=query, params=params, name="delete_all_relationships_for_branch") async def update_relationships_to( @@ -82,7 +82,7 @@ async def update_relationships_to( params = {"to": to.to_string()} - return await db.execute_query(query=query, params=params) + return await db.execute_query(query=query, params=params, name="update_relationships_to") async def get_paths_between_nodes( @@ -130,10 +130,22 @@ async def count_relationships(db: InfrahubDatabase) -> int: params: dict = {} - result = await db.execute_query(query=query, params=params) + result = await db.execute_query(query=query, params=params, name="count_relationships") return result[0][0] +async def get_nodes(db: InfrahubDatabase, label: str) -> List[Neo4jNode]: + """Return theall nodes of a given label in the database.""" + query = """ + MATCH (node) + WHERE $label IN LABELS(node) + RETURN node + """ + params: dict = {"label": label} + results = await db.execute_query(query=query, params=params, name="get_nodes") + return [result[0] for result in results] + + async def count_nodes(db: InfrahubDatabase, label: str) -> int: """Return the total number of nodes of a given label in the database.""" query = """ @@ -142,7 +154,7 @@ async def count_nodes(db: InfrahubDatabase, label: str) -> int: RETURN count(node) as count """ params: dict = {"label": label} - result = await db.execute_query(query=query, params=params) + result = await db.execute_query(query=query, params=params, name="count_nodes") return result[0][0] @@ -154,7 +166,7 @@ async def delete_all_nodes(db: InfrahubDatabase): params: dict = {} - return await db.execute_query(query=query, params=params) + return await db.execute_query(query=query, params=params, name="delete_all_nodes") def element_id_to_id(element_id: Union[str, int]) -> int: @@ -167,7 +179,7 @@ def element_id_to_id(element_id: Union[str, int]) -> int: return int(element_id.split(":")[2]) -def extract_field_filters(field_name: str, filters: dict) -> dict: +def extract_field_filters(field_name: str, filters: dict) -> dict[str, Any]: """Extract the filters for a given field (attribute or relationship) from a filters dict.""" return { key.replace(f"{field_name}__", ""): value for key, value in filters.items() if key.startswith(f"{field_name}__") @@ -183,6 +195,17 @@ def parse_node_kind(kind: str) -> NodeKind: raise ValueError("The String provided is not a valid Node kind") +def convert_ip_to_binary_str( + obj: Union[ipaddress.IPv6Network, ipaddress.IPv4Network, ipaddress.IPv4Interface, ipaddress.IPv6Interface], +) -> str: + if isinstance(obj, (ipaddress.IPv6Network, ipaddress.IPv4Network)): + prefix_bin = bin(int(obj.network_address))[2:] + return prefix_bin.zfill(obj.max_prefixlen) + + ip_bin = bin(int(obj))[2:] + return ip_bin.zfill(obj.max_prefixlen) + + # -------------------------------------------------------------------------------- # CODE IMPORTED FROM: # https://github.com/graphql-python/graphene/blob/9c3e4bb7da001aac48002a3b7d83dcd072087770/graphene/utils/subclass_with_meta.py#L18 diff --git a/backend/infrahub/core/validators/attribute/choices.py b/backend/infrahub/core/validators/attribute/choices.py index e7064039c9..e6bc3204e8 100644 --- a/backend/infrahub/core/validators/attribute/choices.py +++ b/backend/infrahub/core/validators/attribute/choices.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional -from infrahub.core.constants import PathType +from infrahub.core.constants import NULL_VALUE, PathType from infrahub.core.path import DataPath, GroupedDataPaths from ..interface import ConstraintCheckerInterface @@ -28,6 +28,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["allowed_values"] = [choice.name for choice in self.attribute_schema.choices] + self.params["null_value"] = NULL_VALUE query = """ MATCH p = (n:Node) @@ -47,7 +48,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") AND attribute_value IS NOT NULL - AND attribute_value <> "NULL" + AND attribute_value <> $null_value AND NOT (attribute_value IN $allowed_values) """ % {"branch_filter": branch_filter} diff --git a/backend/infrahub/core/validators/attribute/enum.py b/backend/infrahub/core/validators/attribute/enum.py index a8ed5c4048..ec047599d6 100644 --- a/backend/infrahub/core/validators/attribute/enum.py +++ b/backend/infrahub/core/validators/attribute/enum.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional -from infrahub.core.constants import PathType +from infrahub.core.constants import NULL_VALUE, PathType from infrahub.core.path import DataPath, GroupedDataPaths from ..interface import ConstraintCheckerInterface @@ -28,7 +28,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["allowed_values"] = self.attribute_schema.enum - + self.params["null_value"] = NULL_VALUE query = """ MATCH p = (n:Node) WHERE $node_kind IN LABELS(n) @@ -47,7 +47,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") AND attribute_value IS NOT NULL - AND attribute_value <> "NULL" + AND attribute_value <> $null_value AND NOT (attribute_value IN $allowed_values) """ % {"branch_filter": branch_filter} diff --git a/backend/infrahub/core/validators/attribute/kind.py b/backend/infrahub/core/validators/attribute/kind.py index b335628cf3..5ae52aef8c 100644 --- a/backend/infrahub/core/validators/attribute/kind.py +++ b/backend/infrahub/core/validators/attribute/kind.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Dict, List, Optional -from infrahub.core.constants import PathType +from infrahub.core.constants import NULL_VALUE, PathType from infrahub.core.path import DataPath, GroupedDataPaths from infrahub.exceptions import ValidationError from infrahub.types import get_attribute_type @@ -34,6 +34,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name + self.params["null_value"] = NULL_VALUE query = """ MATCH p = (n:Node) @@ -53,7 +54,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") AND attribute_value IS NOT NULL - AND attribute_value <> "NULL" + AND attribute_value <> $null_value """ % {"branch_filter": branch_filter} self.add_to_query(query) @@ -65,7 +66,7 @@ async def get_paths(self) -> GroupedDataPaths: infrahub_attribute_class = infrahub_data_type.get_infrahub_class() for result in self.get_results(): value = result.get("attribute_value") - if value in (None, "NULL"): + if value in (None, NULL_VALUE): continue try: infrahub_attribute_class.validate( diff --git a/backend/infrahub/core/validators/attribute/optional.py b/backend/infrahub/core/validators/attribute/optional.py index 74f94e5735..4564f1def1 100644 --- a/backend/infrahub/core/validators/attribute/optional.py +++ b/backend/infrahub/core/validators/attribute/optional.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional -from infrahub.core.constants import PathType +from infrahub.core.constants import NULL_VALUE, PathType from infrahub.core.path import DataPath, GroupedDataPaths from ..interface import ConstraintCheckerInterface @@ -24,6 +24,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name + self.params["null_value"] = NULL_VALUE query = """ MATCH p = (n:Node) @@ -42,7 +43,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, WITH full_path, node, attribute_value, value_relationship WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") - AND (attribute_value IS NULL OR attribute_value = "NULL") + AND (attribute_value IS NULL OR attribute_value = $null_value) """ % {"branch_filter": branch_filter} self.add_to_query(query) diff --git a/backend/infrahub/core/validators/attribute/regex.py b/backend/infrahub/core/validators/attribute/regex.py index 028c9ac6da..881a071f57 100644 --- a/backend/infrahub/core/validators/attribute/regex.py +++ b/backend/infrahub/core/validators/attribute/regex.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional -from infrahub.core.constants import PathType +from infrahub.core.constants import NULL_VALUE, PathType from infrahub.core.path import DataPath, GroupedDataPaths from ..interface import ConstraintCheckerInterface @@ -25,7 +25,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, self.params["node_kind"] = self.node_schema.kind self.params["attr_name"] = self.attribute_schema.name self.params["attr_value_regex"] = self.attribute_schema.regex - + self.params["null_value"] = NULL_VALUE query = """ MATCH p = (n:Node) WHERE $node_kind IN LABELS(n) @@ -43,7 +43,7 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Dict[str, WITH full_path, node, attribute_value, value_relationship WITH full_path, node, attribute_value, value_relationship WHERE all(r in relationships(full_path) WHERE r.status = "active") - AND attribute_value <> "NULL" + AND attribute_value <> $null_value AND NOT attribute_value =~ $attr_value_regex """ % {"branch_filter": branch_filter} diff --git a/backend/infrahub/core/validators/determiner.py b/backend/infrahub/core/validators/determiner.py index bc2d4297bb..c3cccfb3d8 100644 --- a/backend/infrahub/core/validators/determiner.py +++ b/backend/infrahub/core/validators/determiner.py @@ -8,9 +8,7 @@ from infrahub.core.diff.model import DiffElementType from infrahub.core.models import SchemaUpdateConstraintInfo from infrahub.core.path import SchemaPath -from infrahub.core.schema.attribute_schema import AttributeSchema -from infrahub.core.schema.generic_schema import GenericSchema -from infrahub.core.schema.node_schema import NodeSchema +from infrahub.core.schema import AttributeSchema, MainSchemaTypes from infrahub.core.schema.relationship_schema import RelationshipSchema from infrahub.core.schema_manager import SchemaBranch from infrahub.core.validators import CONSTRAINT_VALIDATOR_MAP @@ -87,9 +85,7 @@ async def get_constraints( return validated_constraints - async def _get_constraints_for_one_schema( - self, schema: Union[NodeSchema, GenericSchema] - ) -> list[SchemaUpdateConstraintInfo]: + async def _get_constraints_for_one_schema(self, schema: MainSchemaTypes) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] constraints.extend(await self._get_attribute_constraints_for_one_schema(schema=schema)) constraints.extend(await self._get_relationship_constraints_for_one_schema(schema=schema)) @@ -102,7 +98,7 @@ async def _get_all_property_constraints(self) -> list[SchemaUpdateConstraintInfo return constraints async def _get_property_constraints_for_one_schema( - self, schema: Union[NodeSchema, GenericSchema] + self, schema: MainSchemaTypes ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for prop_name, prop_field_info in schema.model_fields.items(): @@ -132,7 +128,7 @@ async def _get_property_constraints_for_one_schema( return constraints async def _get_attribute_constraints_for_one_schema( - self, schema: Union[NodeSchema, GenericSchema] + self, schema: MainSchemaTypes ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for field_name in schema.attribute_names: @@ -144,7 +140,7 @@ async def _get_attribute_constraints_for_one_schema( return constraints async def _get_relationship_constraints_for_one_schema( - self, schema: Union[NodeSchema, GenericSchema] + self, schema: MainSchemaTypes ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for field_name in schema.relationship_names: @@ -156,7 +152,7 @@ async def _get_relationship_constraints_for_one_schema( return constraints async def _get_constraints_for_one_field( - self, schema: Union[NodeSchema, GenericSchema], field: Union[AttributeSchema, RelationshipSchema] + self, schema: MainSchemaTypes, field: Union[AttributeSchema, RelationshipSchema] ) -> list[SchemaUpdateConstraintInfo]: constraints: list[SchemaUpdateConstraintInfo] = [] for prop_name, prop_field_info in field.model_fields.items(): diff --git a/backend/infrahub/core/validators/interface.py b/backend/infrahub/core/validators/interface.py index 56f03a8594..02db3577bd 100644 --- a/backend/infrahub/core/validators/interface.py +++ b/backend/infrahub/core/validators/interface.py @@ -9,13 +9,10 @@ class ConstraintCheckerInterface(ABC): @property @abstractmethod - def name(self) -> str: - ... + def name(self) -> str: ... @abstractmethod - def supports(self, request: SchemaConstraintValidatorRequest) -> bool: - ... + def supports(self, request: SchemaConstraintValidatorRequest) -> bool: ... @abstractmethod - async def check(self, request: SchemaConstraintValidatorRequest) -> List[GroupedDataPaths]: - ... + async def check(self, request: SchemaConstraintValidatorRequest) -> List[GroupedDataPaths]: ... diff --git a/backend/infrahub/core/validators/uniqueness/checker.py b/backend/infrahub/core/validators/uniqueness/checker.py index 462f26f4d0..1436ac66e8 100644 --- a/backend/infrahub/core/validators/uniqueness/checker.py +++ b/backend/infrahub/core/validators/uniqueness/checker.py @@ -6,7 +6,11 @@ from infrahub.core.branch import Branch from infrahub.core.path import DataPath, GroupedDataPaths from infrahub.core.query import QueryResult -from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema, RelationshipSchema +from infrahub.core.schema import ( + AttributeSchema, + MainSchemaTypes, + RelationshipSchema, +) from infrahub.core.validators.uniqueness.index import UniquenessQueryResultsIndex from infrahub.database import InfrahubDatabase @@ -24,7 +28,7 @@ def get_attribute_path_from_string( - path: str, schema: Union[NodeSchema, GenericSchema] + path: str, schema: MainSchemaTypes ) -> tuple[Union[AttributeSchema, RelationshipSchema], Optional[str]]: if "__" in path: name, property_name = path.split("__") @@ -61,7 +65,6 @@ async def get_branch(self) -> Branch: async def check(self, request: SchemaConstraintValidatorRequest) -> list[GroupedDataPaths]: schema_objects = [request.node_schema] - non_unique_nodes_lists = await asyncio.gather(*[self.check_one_schema(schema) for schema in schema_objects]) grouped_data_paths = GroupedDataPaths() @@ -69,7 +72,7 @@ async def check(self, request: SchemaConstraintValidatorRequest) -> list[Grouped self.generate_data_paths(non_unique_node, grouped_data_paths) return [grouped_data_paths] - async def build_query_request(self, schema: Union[NodeSchema, GenericSchema]) -> NodeUniquenessQueryRequest: + async def build_query_request(self, schema: MainSchemaTypes) -> NodeUniquenessQueryRequest: unique_attr_paths = { QueryAttributePath(attribute_name=attr_schema.name, property_name="value") for attr_schema in schema.unique_attributes @@ -105,10 +108,13 @@ async def build_query_request(self, schema: Union[NodeSchema, GenericSchema]) -> async def check_one_schema( self, - schema: Union[NodeSchema, GenericSchema], + schema: MainSchemaTypes, ) -> list[NonUniqueNode]: query_request = await self.build_query_request(schema) + if not query_request: + return [] + query = await NodeUniqueAttributeConstraintQuery.init( db=self.db, branch=await self.get_branch(), query_request=query_request ) @@ -117,13 +123,13 @@ async def check_one_schema( return await self._parse_results(schema=schema, query_results=query_results.results) - async def _parse_results( - self, schema: Union[NodeSchema, GenericSchema], query_results: list[QueryResult] - ) -> list[NonUniqueNode]: + async def _parse_results(self, schema: MainSchemaTypes, query_results: list[QueryResult]) -> list[NonUniqueNode]: relationship_schema_by_identifier = {rel.identifier: rel for rel in schema.relationships} all_non_unique_nodes: list[NonUniqueNode] = [] results_index = UniquenessQueryResultsIndex(query_results=query_results) - path_groups = schema.get_unique_constraint_schema_attribute_paths(include_unique_attributes=True) + path_groups = schema.get_unique_constraint_schema_attribute_paths( + include_unique_attributes=True, branch=await self.get_branch() + ) for constraint_group in path_groups: non_unique_nodes_by_id: dict[str, NonUniqueNode] = {} constraint_group_relationship_identifiers = [ diff --git a/backend/infrahub/core/validators/uniqueness/index.py b/backend/infrahub/core/validators/uniqueness/index.py index 6ffc0e4226..5465040225 100644 --- a/backend/infrahub/core/validators/uniqueness/index.py +++ b/backend/infrahub/core/validators/uniqueness/index.py @@ -3,6 +3,8 @@ from collections import defaultdict from typing import TYPE_CHECKING, Any, Iterable, Optional +from infrahub.core.constants import NULL_VALUE + if TYPE_CHECKING: from infrahub.core.query import QueryResult from infrahub.core.schema import SchemaAttributePath, SchemaAttributePathValue @@ -47,13 +49,13 @@ def __init__(self, query_results: Iterable[QueryResult], exclude_node_ids: Optio if relationship_identifier: if relationship_identifier not in self._relationship_index: self._relationship_index[relationship_identifier] = defaultdict(set) - if attr_value and node_id: + if attr_value and attr_value != NULL_VALUE and node_id: self._relationship_index[relationship_identifier][attr_value].add(node_id) self._node_index[node_id][relationship_identifier] = attr_value elif attr_name: if attr_name not in self._attribute_index: self._attribute_index[attr_name] = defaultdict(set) - if attr_value and node_id: + if attr_value and attr_value != NULL_VALUE and node_id: self._attribute_index[attr_name][attr_value].add(node_id) self._node_index[node_id][attr_name] = attr_value @@ -84,9 +86,16 @@ def get_node_ids_for_path_group(self, path_group: list[SchemaAttributePath]) -> else: continue for node_id, attribute_details in self._node_index.items(): + node_includes_none = False grouped_key = GroupedIndexKey() for element_key in key_group: - grouped_key.add_key((element_key, attribute_details.get(element_key))) + element_value = attribute_details.get(element_key) + if element_value is None: + node_includes_none = True + break + grouped_key.add_key((element_key, element_value)) + if node_includes_none: + continue if grouped_key not in node_ids_by_attr_name_and_value: node_ids_by_attr_name_and_value[grouped_key] = set() node_ids_by_attr_name_and_value[grouped_key].add(node_id) diff --git a/backend/infrahub/core/validators/uniqueness/model.py b/backend/infrahub/core/validators/uniqueness/model.py index 1f4e9aa948..7513a8bebf 100644 --- a/backend/infrahub/core/validators/uniqueness/model.py +++ b/backend/infrahub/core/validators/uniqueness/model.py @@ -3,7 +3,7 @@ from pydantic import BaseModel, Field from infrahub.core.constants import PathType -from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema, RelationshipSchema +from infrahub.core.schema import AttributeSchema, MainSchemaTypes, RelationshipSchema class QueryRelationshipAttributePath(BaseModel): @@ -39,6 +39,11 @@ class NodeUniquenessQueryRequest(BaseModel): unique_attribute_paths: Set[QueryAttributePath] = Field(default_factory=set) relationship_attribute_paths: Set[QueryRelationshipAttributePath] = Field(default_factory=set) + def __bool__(self) -> bool: + if self.unique_attribute_paths or self.relationship_attribute_paths: + return True + return False + class NonUniqueRelatedAttribute(BaseModel): relationship: RelationshipSchema @@ -93,7 +98,7 @@ def property_name(self) -> str: class NonUniqueNode(BaseModel): - node_schema: Union[NodeSchema, GenericSchema] + node_schema: MainSchemaTypes node_id: str non_unique_attributes: List[NonUniqueAttribute] = Field(default_factory=list) non_unique_related_attributes: List[NonUniqueRelatedAttribute] = Field(default_factory=list) diff --git a/backend/infrahub/core/validators/uniqueness/query.py b/backend/infrahub/core/validators/uniqueness/query.py index b2eb6dc430..434f84b074 100644 --- a/backend/infrahub/core/validators/uniqueness/query.py +++ b/backend/infrahub/core/validators/uniqueness/query.py @@ -35,7 +35,8 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N items="relationships(active_path)", item_names=["branch", "branch_level"] ) - attr_paths_param, attr_paths_with_value_param = [], [] + attribute_names = set() + attr_paths, attr_paths_with_value = [], [] for attr_path in self.query_request.unique_attribute_paths: try: property_rel_name = self.attribute_property_map[attr_path.property_name or "value"] @@ -43,15 +44,18 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N raise ValueError( f"{attr_path.property_name} is not a valid property for a uniqueness constraint" ) from exc + attribute_names.add(attr_path.attribute_name) if attr_path.value: - attr_paths_with_value_param.append((attr_path.attribute_name, property_rel_name, attr_path.value)) + attr_paths_with_value.append((attr_path.attribute_name, property_rel_name, attr_path.value)) else: - attr_paths_param.append((attr_path.attribute_name, property_rel_name)) + attr_paths.append((attr_path.attribute_name, property_rel_name)) + relationship_names = set() relationship_attr_paths = [] relationship_only_attr_paths = [] relationship_attr_paths_with_value = [] for rel_path in self.query_request.relationship_attribute_paths: + relationship_names.add(rel_path.identifier) if rel_path.attribute_name and rel_path.value: relationship_attr_paths_with_value.append( (rel_path.identifier, rel_path.attribute_name, rel_path.value) @@ -60,11 +64,25 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N relationship_attr_paths.append((rel_path.identifier, rel_path.attribute_name)) else: relationship_only_attr_paths.append(rel_path.identifier) + + if ( + not attr_paths + and not attr_paths_with_value + and not relationship_attr_paths + and not relationship_attr_paths_with_value + and not relationship_only_attr_paths + ): + raise ValueError( + "The NodeUniquenessQueryRequest provided for node_constraints_uniqueness doesn't have enough information to continue" + ) + self.params.update( { "node_kind": self.query_request.kind, - "attr_paths": attr_paths_param, - "attr_paths_with_value": attr_paths_with_value_param, + "attr_paths": attr_paths, + "attr_paths_with_value": attr_paths_with_value, + "attribute_names": list(attribute_names), + "relationship_names": list(relationship_names), "relationship_attr_paths": relationship_attr_paths, "relationship_attr_paths_with_value": relationship_attr_paths_with_value, "relationship_only_attr_paths": relationship_only_attr_paths, @@ -72,27 +90,48 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N } ) + attr_paths_subquery = """ + WITH start_node + MATCH attr_path = (start_node)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue) + WHERE attr.name in $attribute_names + AND ([attr.name, type(r)] in $attr_paths + OR [attr.name, type(r), attr_value.value] in $attr_paths_with_value) + RETURN attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value + """ + + relationship_attr_paths_with_value_subquery = """ + WITH start_node + MATCH rel_path = (start_node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue) + WHERE relationship_node.name in $relationship_names + AND ([relationship_node.name, rel_attr.name] in $relationship_attr_paths + OR [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value) + RETURN rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value + """ + + relationship_only_attr_paths_subquery = """ + WITH start_node + MATCH rel_path = (start_node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node) + WHERE relationship_node.name in $relationship_only_attr_paths + RETURN rel_path as potential_path, relationship_node.name as rel_identifier, "id" as potential_attr, related_n.uuid as potential_attr_value + """ + + select_subqueries = [] + if attr_paths or attr_paths_with_value: + select_subqueries.append(attr_paths_subquery) + if relationship_attr_paths_with_value or relationship_attr_paths: + select_subqueries.append(relationship_attr_paths_with_value_subquery) + if relationship_only_attr_paths: + select_subqueries.append(relationship_only_attr_paths_subquery) + + select_subqueries_str = "UNION".join(select_subqueries) + # ruff: noqa: E501 query = """ // group by node - MATCH (start_node:Node) - WHERE $node_kind IN LABELS(start_node) + MATCH (start_node:%(node_kind)s) // get attributes for node and its relationships CALL { - WITH start_node - MATCH attr_path = (start_node:Node)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue) - WHERE [attr.name, type(r)] in $attr_paths OR [attr.name, type(r), attr_value.value] in $attr_paths_with_value - RETURN attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value - UNION - WITH start_node - MATCH rel_path = (start_node:Node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue) - WHERE [relationship_node.name, rel_attr.name] in $relationship_attr_paths OR [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value - RETURN rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value - UNION - WITH start_node - MATCH rel_path = (start_node:Node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node) - WHERE relationship_node.name in $relationship_only_attr_paths - RETURN rel_path as potential_path, relationship_node.name as rel_identifier, "id" as potential_attr, related_n.uuid as potential_attr_value + %(select_subqueries_str)s } CALL { WITH potential_path @@ -158,7 +197,13 @@ async def query_init(self, db: InfrahubDatabase, *args: Any, **kwargs: Any) -> N attr_name, attr_value, relationship_identifier - """ % {"branch_filter": branch_filter, "from_times": from_times, "branch_name_and_level": branch_name_and_level} + """ % { + "node_kind": self.query_request.kind, + "select_subqueries_str": select_subqueries_str, + "branch_filter": branch_filter, + "from_times": from_times, + "branch_name_and_level": branch_name_and_level, + } self.add_to_query(query) self.return_labels = [ diff --git a/backend/infrahub/database/__init__.py b/backend/infrahub/database/__init__.py index c751e689ee..f5556dc721 100644 --- a/backend/infrahub/database/__init__.py +++ b/backend/infrahub/database/__init__.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import random from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type from neo4j import ( @@ -14,6 +15,7 @@ Record, ) from neo4j.exceptions import ClientError, Neo4jError, ServiceUnavailable, TransientError +from opentelemetry import trace from typing_extensions import Self from infrahub import config @@ -22,11 +24,15 @@ from infrahub.utils import InfrahubStringEnum from .constants import DatabaseType +from .memgraph import DatabaseManagerMemgraph from .metrics import QUERY_EXECUTION_METRICS, TRANSACTION_RETRIES +from .neo4j import DatabaseManagerNeo4j if TYPE_CHECKING: from types import TracebackType + from .manager import DatabaseManager + validated_database = {} log = get_logger() @@ -51,6 +57,7 @@ def __init__( driver: AsyncDriver, mode: InfrahubDatabaseMode = InfrahubDatabaseMode.DRIVER, db_type: Optional[DatabaseType] = None, + db_manager: Optional[DatabaseManager] = None, session: Optional[AsyncSession] = None, session_mode: InfrahubDatabaseSessionMode = InfrahubDatabaseSessionMode.WRITE, transaction: Optional[AsyncTransaction] = None, @@ -66,6 +73,14 @@ def __init__( else: self.db_type = config.SETTINGS.database.db_type + if db_manager: + self.manager = db_manager + self.manager.db = self + elif self.db_type == DatabaseType.NEO4J: + self.manager = DatabaseManagerNeo4j(db=self) + elif self.db_type == DatabaseType.MEMGRAPH: + self.manager = DatabaseManagerMemgraph(db=self) + @property def is_session(self): if self._mode == InfrahubDatabaseMode.SESSION: @@ -85,13 +100,18 @@ def start_session(self, read_only: bool = False) -> InfrahubDatabase: session_mode = InfrahubDatabaseSessionMode.READ return self.__class__( - mode=InfrahubDatabaseMode.SESSION, db_type=self.db_type, driver=self._driver, session_mode=session_mode + mode=InfrahubDatabaseMode.SESSION, + db_type=self.db_type, + db_manager=self.manager, + driver=self._driver, + session_mode=session_mode, ) def start_transaction(self) -> InfrahubDatabase: return self.__class__( mode=InfrahubDatabaseMode.TRANSACTION, db_type=self.db_type, + db_manager=self.manager, driver=self._driver, session=self._session, session_mode=self._session_mode, @@ -167,17 +187,23 @@ async def close(self): async def execute_query( self, query: str, params: Optional[Dict[str, Any]] = None, name: Optional[str] = "undefined" ) -> List[Record]: - with QUERY_EXECUTION_METRICS.labels(str(self._session_mode), name).time(): - response = await self.run_query(query=query, params=params) - return [item async for item in response] + with trace.get_tracer(__name__).start_as_current_span("execute_db_query") as span: + span.set_attribute("query", query) + + with QUERY_EXECUTION_METRICS.labels(str(self._session_mode), name).time(): + response = await self.run_query(query=query, params=params) + return [item async for item in response] async def execute_query_with_metadata( self, query: str, params: Optional[Dict[str, Any]] = None, name: Optional[str] = "undefined" ) -> Tuple[List[Record], Dict[str, Any]]: - with QUERY_EXECUTION_METRICS.labels(str(self._session_mode), name).time(): - response = await self.run_query(query=query, params=params) - results = [item async for item in response] - return results, response._metadata or {} + with trace.get_tracer(__name__).start_as_current_span("execute_db_query_with_metadata") as span: + span.set_attribute("query", query) + + with QUERY_EXECUTION_METRICS.labels(str(self._session_mode), name).time(): + response = await self.run_query(query=query, params=params) + results = [item async for item in response] + return results, response._metadata or {} async def run_query(self, query: str, params: Optional[Dict[str, Any]] = None) -> AsyncResult: if self.is_transaction: @@ -268,9 +294,14 @@ async def wrapper(*args, **kwargs): try: return await func(*args, **kwargs) except TransientError as exc: - log.info(f"Retrying database transaction, attempt {attempt}/{config.SETTINGS.database.retry_limit}") + retry_time: float = random.randrange(100, 500) / 1000 + log.info( + f"Retrying database transaction, attempt {attempt}/{config.SETTINGS.database.retry_limit}", + retry_time=retry_time, + ) log.debug("database transaction failed", message=exc.message) TRANSACTION_RETRIES.labels(name).inc() + await asyncio.sleep(retry_time) if attempt == config.SETTINGS.database.retry_limit: raise diff --git a/backend/infrahub/database/analyzer.py b/backend/infrahub/database/analyzer.py new file mode 100644 index 0000000000..4252900c32 --- /dev/null +++ b/backend/infrahub/database/analyzer.py @@ -0,0 +1,207 @@ +import time +from collections import defaultdict +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +import matplotlib.pyplot as plt +import pandas as pd +from infrahub_sdk import Timestamp +from neo4j import Record + +# pylint: skip-file +from infrahub import config +from infrahub.database import InfrahubDatabase +from infrahub.log import get_logger + +log = get_logger() + + +@dataclass +class QueryMeasurement: + duration: float + query_name: str + start_time: float + memory: Optional[float] = None + index: Optional[int] = None + profile: bool = False + + +class QueryAnalyzer: + def __init__(self) -> None: + self._start_time: Optional[Timestamp] = None + self.name = "query_analyzer" + self.index = 0 + self.measurements: List[QueryMeasurement] = [] + self.count_per_query: Dict[str, int] = defaultdict(int) + self._df: Optional[pd.DataFrame] = None + self.measure_memory_usage: bool = False + self.sampling_memory_usage: int = 25 + self.output_location: Path = Path.cwd() + + @property + def start_time(self) -> Timestamp: + if self._start_time: + return self._start_time + raise ValueError("start_time hasnt't been initialized yet") + + def create_directory(self) -> Path: + time_str = self.start_time.to_string() + for char in [":", "-", "."]: + time_str = time_str.replace(char, "_") + directory_name = f"{time_str}_{self.name}" + full_directory = self.output_location / directory_name + if not full_directory.exists(): + full_directory.mkdir(parents=True) + return full_directory + + def start_tracking(self, name: Optional[str] = None) -> None: + self._start_time = Timestamp() + self.index = 0 + if name: + self.name = name + + def get_df(self) -> pd.DataFrame: + data = {} + for item in QueryMeasurement.__dataclass_fields__.keys(): + data[item] = [getattr(m, item) for m in self.measurements] + + return pd.DataFrame(data) + + def sample_memory(self, name: str) -> bool: + if not self._start_time or not self.measure_memory_usage: + return False + + if self.count_per_query[name] % self.sampling_memory_usage == 0: + return True + + return False + + def add_measurement(self, measurement: QueryMeasurement) -> None: + if not self._start_time: + return + + self.index += 1 + measurement.index = self.index + + self.measurements.append(measurement) + self.count_per_query[measurement.query_name] += 1 + + def create_graphs(self, prefix: Optional[str] = None) -> None: + df = self.get_df() + query_names = set(df["query_name"].tolist()) + + output_dir = self.create_directory() + + for query_name in query_names: + self.create_duration_graph( + query_name=query_name, metric="duration", prefix=self.name, output_dir=output_dir + ) + self.create_memory_graph(query_name=query_name, metric="memory", prefix=self.name, output_dir=output_dir) + + def create_duration_graph( + self, query_name: str, metric: str = "duration", prefix: Optional[str] = None, output_dir: Optional[Path] = None + ) -> None: + df = self.get_df() + df_query = df[(df["query_name"] == query_name) & (df["profile"] == False)] # noqa: E712 + + name = f"{query_name}_{metric}" + plt.figure(name) + + serie_name = f"{metric}_min_r10" + serie2 = df_query[metric].rolling(10).min().multiply(1000).round(2) + plt.plot(df_query.index, serie2, label=serie_name) + + plt.ylabel("msec", fontsize=15) + plt.title(f"Query - {query_name} | {metric}", fontsize=20) + plt.grid() + + file_name = f"{name}.png" + if prefix: + file_name = f"{prefix}_{name}.png" + + if output_dir: + plt.savefig(str(output_dir / file_name)) + else: + plt.savefig(f"{self.start_time.to_string()}_{file_name}") + + def create_memory_graph( + self, query_name: str, metric: str = "memory", prefix: Optional[str] = None, output_dir: Optional[Path] = None + ) -> None: + df = self.get_df() + df_query = df[(df["query_name"] == query_name) & (df["profile"] == True)] # noqa: E712 + + plt.figure(query_name) + + serie_name = f"{metric}" + serie1 = df_query[metric] + plt.plot(df_query.index, serie1, label=serie_name) + + plt.ylabel("memory", fontsize=15) + plt.title(f"Query - {query_name} | {metric}", fontsize=20) + + file_name = f"{query_name}_{metric}.png" + if prefix: + file_name = f"{prefix}_{query_name}_{metric}.png" + + if output_dir: + plt.savefig(str(output_dir / file_name)) + else: + plt.savefig(f"{self.start_time.to_string()}_{file_name}") + + +query_stats = QueryAnalyzer() + + +class InfrahubDatabaseAnalyzer(InfrahubDatabase): + async def execute_query( + self, query: str, params: config.Dict[str, Any] | None = None, name: str | None = "undefined" + ) -> List[Record]: + time_start = time.time() + if name and query_stats.sample_memory(name=name): + query = "PROFILE\n" + query + response, metadata = await super().execute_query_with_metadata(query, params, name) + duration_time = time.time() - time_start + query_stats.add_measurement( + QueryMeasurement( + duration=duration_time, + profile=True, + memory=metadata["profile"]["args"]["GlobalMemory"], + query_name=str(name), + start_time=time_start, + ) + ) + else: + response = await super().execute_query(query, params, name) + duration_time = time.time() - time_start + query_stats.add_measurement( + QueryMeasurement(duration=duration_time, profile=False, query_name=str(name), start_time=time_start) + ) + + return response + + async def execute_query_with_metadata( + self, query: str, params: config.Dict[str, Any] | None = None, name: str | None = "undefined" + ) -> Tuple[List[Record], Dict[str, Any]]: + time_start = time.time() + if name and query_stats.sample_memory(name=name): + query = "PROFILE\n" + query + response, metadata = await super().execute_query_with_metadata(query, params, name) + duration_time = time.time() - time_start + query_stats.add_measurement( + QueryMeasurement( + duration=duration_time, + profile=True, + memory=metadata["profile"]["args"]["GlobalMemory"], + query_name=str(name), + start_time=time_start, + ) + ) + else: + response, metadata = await super().execute_query_with_metadata(query, params, name) + duration_time = time.time() - time_start + query_stats.add_measurement( + QueryMeasurement(duration=duration_time, profile=False, query_name=str(name), start_time=time_start) + ) + + return response, metadata diff --git a/backend/infrahub/database/constants.py b/backend/infrahub/database/constants.py index 77cb61670b..4b79526bda 100644 --- a/backend/infrahub/database/constants.py +++ b/backend/infrahub/database/constants.py @@ -4,3 +4,22 @@ class DatabaseType(str, Enum): NEO4J = "neo4j" MEMGRAPH = "memgraph" + + +class Neo4jRuntime(str, Enum): + DEFAULT = "default" + INTERPRETED = "interpreted" + SLOTTED = "slotted" + PIPELINED = "pipelined" + + +class IndexType(str, Enum): + TEXT = "text" + RANGE = "range" + LOOKUP = "lookup" + NOT_APPLICABLE = "not_applicable" + + +class EntityType(str, Enum): + NODE = "node" + RELATIONSHIP = "relationship" diff --git a/backend/infrahub/database/index.py b/backend/infrahub/database/index.py new file mode 100644 index 0000000000..f4d7b9fd8d --- /dev/null +++ b/backend/infrahub/database/index.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, List + +from pydantic import BaseModel + +from .constants import EntityType, IndexType # noqa: TCH001 + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class IndexInfo(BaseModel): + name: str + label: str + properties: List[str] + type: IndexType + entity_type: EntityType + + +class IndexItem(BaseModel): + name: str + label: str + properties: List[str] + type: IndexType + + def get_add_query(self) -> str: + raise NotImplementedError() + + def get_drop_query(self) -> str: + raise NotImplementedError() + + +class IndexManagerBase(ABC): + def __init__(self, db: InfrahubDatabase): + self.db = db + + self.nodes: List[IndexItem] = [] + self.rels: List[IndexItem] = [] + self.initialized: bool = False + + def init(self, nodes: List[IndexItem], rels: List[IndexItem]) -> None: + self.nodes = nodes + self.rels = rels + self.initialized = True + + @property + def items(self) -> List[IndexItem]: + return self.nodes + self.rels + + async def add(self) -> None: + async with self.db.start_transaction() as dbt: + for item in self.items: + await dbt.execute_query(query=item.get_add_query(), params={}, name="index_add") + + async def drop(self) -> None: + async with self.db.start_transaction() as dbt: + for item in self.items: + await dbt.execute_query(query=item.get_drop_query(), params={}, name="index_drop") + + @abstractmethod + async def list(self) -> List[IndexInfo]: + pass diff --git a/backend/infrahub/database/manager.py b/backend/infrahub/database/manager.py new file mode 100644 index 0000000000..7b70a1efaf --- /dev/null +++ b/backend/infrahub/database/manager.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from abc import ABC +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from . import InfrahubDatabase + from .index import IndexManagerBase + + +class DatabaseManager(ABC): + index: IndexManagerBase + + def __init__(self, db: InfrahubDatabase): + self.db = db diff --git a/backend/infrahub/database/memgraph.py b/backend/infrahub/database/memgraph.py new file mode 100644 index 0000000000..6a2e676eee --- /dev/null +++ b/backend/infrahub/database/memgraph.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, List + +from .constants import EntityType, IndexType +from .index import IndexInfo, IndexItem, IndexManagerBase +from .manager import DatabaseManager + +if TYPE_CHECKING: + from . import InfrahubDatabase + + +class IndexNodeMemgraph(IndexItem): + def get_add_query(self) -> str: + properties_str = ", ".join(self.properties) + return f"CREATE INDEX ON :{self.label}({properties_str})" + + def get_drop_query(self) -> str: + properties_str = ", ".join(self.properties) + return f"DROP INDEX ON :{self.label}({properties_str})" + + +class IndexManagerMemgraph(IndexManagerBase): + def init(self, nodes: List[IndexItem], rels: List[IndexItem]) -> None: + self.nodes = [IndexNodeMemgraph(**item.model_dump()) for item in nodes] + self.initialized = True + + async def add(self) -> None: + for item in self.items: + await self.db.execute_query(query=item.get_add_query(), params={}, name="index_add") + + async def drop(self) -> None: + for item in self.items: + await self.db.execute_query(query=item.get_drop_query(), params={}, name="index_drop") + + async def list(self) -> List[IndexInfo]: + query = "SHOW INDEX INFO" + records = await self.db.execute_query(query=query, params={}, name="index_show") + results = [] + for record in records: + if not record["label"]: + continue + results.append( + IndexInfo( + name="n/a", + label=record["label"], + properties=[record["property"]], + type=IndexType.NOT_APPLICABLE, + entity_type=EntityType.NODE, # Memgraph only support Node Indexes + ) + ) + + return results + + +class DatabaseManagerMemgraph(DatabaseManager): + def __init__(self, db: InfrahubDatabase): + super().__init__(db=db) + self.index = IndexManagerMemgraph(db=db) diff --git a/backend/infrahub/database/metrics.py b/backend/infrahub/database/metrics.py index 22501cf5fb..d7cf2d0772 100644 --- a/backend/infrahub/database/metrics.py +++ b/backend/infrahub/database/metrics.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from prometheus_client import Counter, Histogram METRIC_PREFIX = "infrahub_db" @@ -6,7 +8,7 @@ f"{METRIC_PREFIX}_query_execution_seconds", "Execution time to query the database", labelnames=["type", "query"], - buckets=[0.005, 0.025, 0.1, 0.5, 1], + buckets=[0.005, 0.01, 0.02, 0.03, 0.04, 0.05, 0.1, 0.5, 1], ) TRANSACTION_RETRIES = Counter( diff --git a/backend/infrahub/database/neo4j.py b/backend/infrahub/database/neo4j.py new file mode 100644 index 0000000000..bb7e55d750 --- /dev/null +++ b/backend/infrahub/database/neo4j.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, List + +from .constants import EntityType, IndexType +from .index import IndexInfo, IndexItem, IndexManagerBase +from .manager import DatabaseManager + +if TYPE_CHECKING: + from . import InfrahubDatabase + + +class IndexRelNeo4j(IndexItem): + @property + def _index_name(self) -> str: + return f"rel_{self.type.value.lower()}_{self.name}_{'_'.join(self.properties)}" + + def get_add_query(self) -> str: + properties_str = ", ".join([f"r.{prop}" for prop in self.properties]) + return ( + f"CREATE {self.type.value.upper()} INDEX {self._index_name} IF NOT EXISTS " + f"FOR ()-[r:{self.label}]-() ON ({properties_str})" + ) + + def get_drop_query(self) -> str: + return f"DROP INDEX {self._index_name} IF EXISTS" + + +class IndexNodeNeo4j(IndexItem): + @property + def _index_name(self) -> str: + return f"node_{self.type.value.lower()}_{self.name}_{'_'.join(self.properties)}" + + def get_add_query(self) -> str: + properties_str = ", ".join([f"n.{prop}" for prop in self.properties]) + return ( + f"CREATE {self.type.value.upper()} INDEX {self._index_name} IF NOT EXISTS " + f"FOR (n:{self.label}) ON ({properties_str})" + ) + + def get_drop_query(self) -> str: + return f"DROP INDEX {self._index_name} IF EXISTS" + + +class IndexManagerNeo4j(IndexManagerBase): + def init(self, nodes: List[IndexItem], rels: List[IndexItem]) -> None: + self.nodes = [IndexNodeNeo4j(**item.model_dump()) for item in nodes] + self.rels = [IndexRelNeo4j(**item.model_dump()) for item in rels] + self.initialized = True + + async def list(self) -> List[IndexInfo]: + query = "SHOW INDEXES" + records = await self.db.execute_query(query=query, params={}, name="index_show") + results = [] + for record in records: + if not record["labelsOrTypes"]: + continue + results.append( + IndexInfo( + name=record["name"], + label=", ".join(record["labelsOrTypes"]), + properties=record["properties"], + type=IndexType(str(record["type"]).lower()), + entity_type=EntityType(str(record["entityType"]).lower()), + ) + ) + + return results + + +class DatabaseManagerNeo4j(DatabaseManager): + def __init__(self, db: InfrahubDatabase): + super().__init__(db=db) + self.index = IndexManagerNeo4j(db=db) diff --git a/backend/infrahub/dependencies/builder/node/__init__.py b/backend/infrahub/dependencies/builder/node/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/infrahub/dependencies/builder/node/delete_validator.py b/backend/infrahub/dependencies/builder/node/delete_validator.py new file mode 100644 index 0000000000..9301efc094 --- /dev/null +++ b/backend/infrahub/dependencies/builder/node/delete_validator.py @@ -0,0 +1,8 @@ +from infrahub.core.node.delete_validator import NodeDeleteValidator +from infrahub.dependencies.interface import DependencyBuilder, DependencyBuilderContext + + +class NodeDeleteValidatorDependency(DependencyBuilder[NodeDeleteValidator]): + @classmethod + def build(cls, context: DependencyBuilderContext) -> NodeDeleteValidator: + return NodeDeleteValidator(db=context.db, branch=context.branch) diff --git a/backend/infrahub/dependencies/component/exceptions.py b/backend/infrahub/dependencies/component/exceptions.py index 7d72f0de72..59ca7ef311 100644 --- a/backend/infrahub/dependencies/component/exceptions.py +++ b/backend/infrahub/dependencies/component/exceptions.py @@ -1,2 +1 @@ -class UntrackedDependencyError(Exception): - ... +class UntrackedDependencyError(Exception): ... diff --git a/backend/infrahub/dependencies/interface.py b/backend/infrahub/dependencies/interface.py index 2cede446b0..40ce24e903 100644 --- a/backend/infrahub/dependencies/interface.py +++ b/backend/infrahub/dependencies/interface.py @@ -17,5 +17,4 @@ class DependencyBuilderContext: class DependencyBuilder(ABC, Generic[T]): @classmethod @abstractmethod - def build(cls, context: DependencyBuilderContext) -> T: - ... + def build(cls, context: DependencyBuilderContext) -> T: ... diff --git a/backend/infrahub/dependencies/registry.py b/backend/infrahub/dependencies/registry.py index 578df610d2..c584aa2821 100644 --- a/backend/infrahub/dependencies/registry.py +++ b/backend/infrahub/dependencies/registry.py @@ -7,6 +7,7 @@ from .builder.constraint.schema.attribute_uniqueness import SchemaAttributeUniqueConstraintDependency from .builder.constraint.schema.relationship_optional import SchemaRelationshipOptionalConstraintDependency from .builder.constraint.schema.uniqueness import SchemaUniquenessConstraintDependency +from .builder.node.delete_validator import NodeDeleteValidatorDependency from .component.registry import ComponentDependencyRegistry @@ -21,6 +22,7 @@ def build_component_registry() -> ComponentDependencyRegistry: component_registry.track_dependency(NodeGroupedUniquenessConstraintDependency) component_registry.track_dependency(RelationshipCountConstraintDependency) component_registry.track_dependency(NodeConstraintRunnerDependency) + component_registry.track_dependency(NodeDeleteValidatorDependency) return component_registry diff --git a/backend/infrahub/exceptions.py b/backend/infrahub/exceptions.py index 127241d605..635174dbe3 100644 --- a/backend/infrahub/exceptions.py +++ b/backend/infrahub/exceptions.py @@ -51,7 +51,7 @@ def __init__(self, errors: list): class RepositoryError(Error): def __init__(self, identifier, message=None): self.identifier = identifier - self.message = message or f"An error occured with GitRepository '{identifier}'." + self.message = message or f"An error occurred with GitRepository '{identifier}'." super().__init__(self.message) @@ -77,7 +77,7 @@ def __init__(self, name, message=None): class RepositoryFileNotFoundError(Error): HTTP_CODE: int = 404 - def __init__(self, repository_name, location, commit, message=None): + def __init__(self, repository_name: str, location: str, commit: str, message=None): self.repository_name = repository_name self.location = location self.commit = commit @@ -85,13 +85,24 @@ def __init__(self, repository_name, location, commit, message=None): super().__init__(self.message) +class FileOutOfRepositoryError(Error): + HTTP_CODE: int = 403 + + def __init__(self, repository_name: str, location: str, commit: str, message=None): + self.repository_name = repository_name + self.location = location + self.commit = commit + self.message = message or f"File not in repository '{repository_name}::{commit}::{location}'." + super().__init__(self.message) + + class TransformError(Error): def __init__(self, repository_name, location, commit, message=None): self.repository_name = repository_name self.location = location self.commit = commit self.message = ( - message or f"An error occured with the transform function at '{repository_name}::{commit}::{location}'." + message or f"An error occurred with the transform function at '{repository_name}::{commit}::{location}'." ) super().__init__(self.message) @@ -104,7 +115,7 @@ def __init__(self, repository_name, location, class_name, commit, message=None): self.class_name = class_name self.message = ( message - or f"An error occured with the check function at '{repository_name}::{commit}::{location}::{class_name}'." + or f"An error occurred with the check function at '{repository_name}::{commit}::{location}::{class_name}'." ) super().__init__(self.message) @@ -218,6 +229,13 @@ def __init__(self, message: str): self.message = message +class MigrationError(Error): + HTTP_CODE = 502 + + def __init__(self, message: str): + self.message = message + + class ValidationError(Error): HTTP_CODE = 422 @@ -259,9 +277,7 @@ def __init__(self, message: str): self.message = message -class DiffRangeValidationError(DiffError): - ... +class DiffRangeValidationError(DiffError): ... -class DiffFromRequiredOnDefaultBranchError(DiffError): - ... +class DiffFromRequiredOnDefaultBranchError(DiffError): ... diff --git a/backend/infrahub/git/__init__.py b/backend/infrahub/git/__init__.py index f701575655..baad34c96b 100644 --- a/backend/infrahub/git/__init__.py +++ b/backend/infrahub/git/__init__.py @@ -5,6 +5,7 @@ ArtifactGenerateResult, CheckDefinitionInformation, GraphQLQueryInformation, + InfrahubReadOnlyRepository, InfrahubRepository, RepoFileInformation, TransformPythonInformation, @@ -18,6 +19,7 @@ "COMMITS_DIRECTORY_NAME", "TEMPORARY_DIRECTORY_NAME", "ArtifactGenerateResult", + "InfrahubReadOnlyRepository", "InfrahubRepository", "TransformPythonInformation", "CheckDefinitionInformation", diff --git a/backend/infrahub/git/actions.py b/backend/infrahub/git/actions.py index 702a5dedb3..b6a0345718 100644 --- a/backend/infrahub/git/actions.py +++ b/backend/infrahub/git/actions.py @@ -1,4 +1,5 @@ from infrahub import lock +from infrahub.core.constants import InfrahubKind from infrahub.exceptions import RepositoryError from infrahub.services import InfrahubServices @@ -7,7 +8,7 @@ async def sync_remote_repositories(service: InfrahubServices) -> None: branches = await service.client.branch.all() - repositories = await service.client.get_list_repositories(branches=branches, kind="CoreRepository") + repositories = await service.client.get_list_repositories(branches=branches, kind=InfrahubKind.REPOSITORY) for repo_name, repository_data in repositories.items(): async with service.task_report( diff --git a/backend/infrahub/git/repository.py b/backend/infrahub/git/repository.py index cbb0d595b0..b47b367555 100644 --- a/backend/infrahub/git/repository.py +++ b/backend/infrahub/git/repository.py @@ -25,8 +25,10 @@ InfrahubRepositoryConfig, ValidationError, ) +from infrahub_sdk.exceptions import ModuleImportError from infrahub_sdk.schema import ( InfrahubCheckDefinitionConfig, + InfrahubGeneratorDefinitionConfig, InfrahubJinja2TransformConfig, InfrahubPythonTransformConfig, ) @@ -44,6 +46,7 @@ CheckError, CommitNotFoundError, Error, + FileOutOfRepositoryError, InitializationError, RepositoryError, RepositoryFileNotFoundError, @@ -579,7 +582,7 @@ async def get_branches_from_graph(self) -> Dict[str, BranchInGraph]: branches = await self.client.branch.all() # TODO Need to optimize this query, right now we are querying everything unnecessarily - repositories = await self.client.get_list_repositories(branches=branches, kind="CoreRepository") + repositories = await self.client.get_list_repositories(branches=branches, kind=InfrahubKind.REPOSITORY) repository = repositories[self.name] for branch_name, branch in branches.items(): @@ -658,7 +661,7 @@ async def update_commit_value(self, branch_name: str, commit: str) -> bool: "Unable to update the value of the commit because a valid client hasn't been provided.", repository=self.name, ) - return + return False log.debug( f"Updating commit value to {commit} for branch {branch_name}", repository=self.name, branch=branch_name @@ -1103,7 +1106,7 @@ async def get_repository_config(self, branch_name: str, commit: str) -> Optional branch=branch_name, commit=commit, ) - return + return None config_file_content = config_file.read_text(encoding="utf-8") try: @@ -1115,7 +1118,7 @@ async def get_repository_config(self, branch_name: str, commit: str) -> Optional branch=branch_name, commit=commit, ) - return + return None # Convert data to a dictionary to avoid it being `None` if the yaml file is just an empty document data = data or {} @@ -1131,7 +1134,7 @@ async def get_repository_config(self, branch_name: str, commit: str) -> Optional branch=branch_name, commit=commit, ) - return + return None async def import_schema_files(self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig) -> None: # pylint: disable=too-many-branches @@ -1196,19 +1199,19 @@ async def import_schema_files(self, branch_name: str, commit: str, config_file: if has_error: return - _, errors = await self.client.schema.load(schemas=[item.content for item in schemas_data], branch=branch_name) + response = await self.client.schema.load(schemas=[item.content for item in schemas_data], branch=branch_name) - if errors: + if response.errors: error_messages = [] - if "detail" in errors: - for error in errors["detail"]: + if "detail" in response.errors: + for error in response.errors["detail"]: loc_str = [str(item) for item in error["loc"][1:]] error_messages.append(f"{'/'.join(loc_str)} | {error['msg']} ({error['type']})") - elif "error" in errors: - error_messages.append(f"{errors.get('error')}") + elif "error" in response.errors: + error_messages.append(f"{response.errors.get('error')}") else: - error_messages.append(f"{errors}") + error_messages.append(f"{response.errors}") await self.log.error( f"Unable to load the schema : {', '.join(error_messages)}", repository=self.name, commit=commit @@ -1372,6 +1375,109 @@ async def import_python_check_definitions( ) await check_definition_in_graph[check_name].delete() + async def import_generator_definitions( + self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig + ) -> None: + commit_wt = self.get_worktree(identifier=commit) + branch_wt = self.get_worktree(identifier=commit or branch_name) + + generators = [] + for generator in config_file.generator_definitions: + log.debug(self.name, import_type="generator_definition", file=generator.file_path) + file_info = extract_repo_file_information( + full_filename=os.path.join(branch_wt.directory, generator.file_path.as_posix()), + repo_directory=self.directory_root, + worktree_directory=commit_wt.directory, + ) + + try: + generator.load_class(import_root=self.directory_root, relative_path=file_info.relative_repo_path_dir) + generators.append(generator) + except ModuleImportError as exc: + await self.log.warning( + self.name, import_type="generator_definition", file=generator.file_path.as_posix(), error=str(exc) + ) + continue + + local_generator_definitions = {generator.name: generator for generator in generators} + generator_definition_in_graph = { + generator.name.value: generator + for generator in await self.client.filters( + kind=InfrahubKind.GENERATORDEFINITION, branch=branch_name, repository__ids=[str(self.id)] + ) + } + + present_in_both, only_graph, only_local = compare_lists( + list1=list(generator_definition_in_graph.keys()), list2=list(local_generator_definitions.keys()) + ) + + for generator_name in only_local: + await self.log.info( + f"New GeneratorDefinition {generator_name!r} found, creating", + repository=self.name, + branch=branch_name, + commit=commit, + ) + await self._create_generator_definition( + branch_name=branch_name, generator=local_generator_definitions[generator_name] + ) + + for generator_name in present_in_both: + if await self._generator_requires_update( + generator=local_generator_definitions[generator_name], + existing_generator=generator_definition_in_graph[generator_name], + branch_name=branch_name, + ): + await self.log.info( + f"New version of GeneratorDefinition {generator_name!r} found, updating", + repository=self.name, + branch=branch_name, + commit=commit, + ) + + await self._update_generator_definition( + generator=local_generator_definitions[generator_name], + existing_generator=generator_definition_in_graph[generator_name], + ) + + for generator_name in only_graph: + await self.log.info( + f"GeneratorDefinition '{generator_name!r}' not found locally, deleting", + repository=self.name, + branch=branch_name, + commit=commit, + ) + await generator_definition_in_graph[generator_name].delete() + + async def _generator_requires_update( + self, generator: InfrahubGeneratorDefinitionConfig, existing_generator: InfrahubNode, branch_name: str + ) -> bool: + graphql_queries = await self.client.filters( + kind=InfrahubKind.GRAPHQLQUERY, branch=branch_name, name__value=generator.query, populate_store=True + ) + if graphql_queries: + generator.query = graphql_queries[0].id + targets = await self.client.filters( + kind=InfrahubKind.GENERICGROUP, + branch=branch_name, + name__value=generator.targets, + populate_store=True, + fragment=True, + ) + if targets: + generator.targets = targets[0].id + + if ( # pylint: disable=too-many-boolean-expressions + existing_generator.query.id != generator.query + or existing_generator.file_path.value != str(generator.file_path) + or existing_generator.class_name.value != generator.class_name + or existing_generator.parameters.value != generator.parameters + or existing_generator.convert_query_response.value != generator.convert_query_response + or existing_generator.targets.id != generator.targets + ): + return True + return False + async def import_python_transforms( self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig ) -> None: @@ -1412,7 +1518,7 @@ async def import_python_transforms( transform_definition_in_graph = { transform.name.value: transform for transform in await self.client.filters( - kind="CoreTransformPython", branch=branch_name, repository__ids=[str(self.id)] + kind=InfrahubKind.TRANSFORMPYTHON, branch=branch_name, repository__ids=[str(self.id)] ) } @@ -1488,7 +1594,7 @@ async def get_check_definition( except Exception as exc: # pylint: disable=broad-exception-caught await self.log.error( - f"An error occured while processing the CheckDefinition {check_class.__name__} from {file_path} : {exc} ", + f"An error occurred while processing the CheckDefinition {check_class.__name__} from {file_path} : {exc} ", repository=self.name, branch=branch_name, ) @@ -1520,13 +1626,58 @@ async def get_python_transforms( except Exception as exc: # pylint: disable=broad-exception-caught await self.log.error( - f"An error occured while processing the PythonTransform {transform.name} from {file_path} : {exc} ", + f"An error occurred while processing the PythonTransform {transform.name} from {file_path} : {exc} ", repository=self.name, branch=branch_name, ) return transforms + async def _create_generator_definition( + self, generator: InfrahubGeneratorDefinitionConfig, branch_name: str + ) -> InfrahubNode: + data = generator.dict(exclude_none=True, exclude={"file_path"}) + data["file_path"] = str(generator.file_path) + data["repository"] = self.id + + schema = await self.client.schema.get(kind=InfrahubKind.GENERATORDEFINITION, branch=branch_name) + + create_payload = self.client.schema.generate_payload_create( + schema=schema, + data=data, + source=str(self.id), + is_protected=True, + ) + obj = await self.client.create(kind=InfrahubKind.GENERATORDEFINITION, branch=branch_name, **create_payload) + await obj.save() + + return obj + + async def _update_generator_definition( + self, + generator: InfrahubGeneratorDefinitionConfig, + existing_generator: InfrahubNode, + ) -> None: + if existing_generator.query.id != generator.query: + existing_generator.query = {"id": generator.query, "source": str(self.id), "is_protected": True} + + if existing_generator.class_name.value != generator.class_name: + existing_generator.class_name.value = generator.class_name + + if existing_generator.file_path.value != str(generator.file_path): + existing_generator.file_path.value = str(generator.file_path) + + if existing_generator.convert_query_response.value != generator.convert_query_response: + existing_generator.convert_query_response.value = generator.convert_query_response + + if existing_generator.parameters.value != generator.parameters: + existing_generator.parameters.value = generator.parameters + + if existing_generator.targets.id != generator.targets: + existing_generator.targets = {"id": generator.targets, "source": str(self.id), "is_protected": True} + + await existing_generator.save() + async def create_python_check_definition(self, branch_name: str, check: CheckDefinitionInformation) -> InfrahubNode: data = { "name": check.name, @@ -1591,7 +1742,7 @@ async def compare_python_check_definition( return True async def create_python_transform(self, branch_name: str, transform: TransformPythonInformation) -> InfrahubNode: - schema = await self.client.schema.get(kind="CoreTransformPython", branch=branch_name) + schema = await self.client.schema.get(kind=InfrahubKind.TRANSFORMPYTHON, branch=branch_name) data = { "name": transform.name, "repository": transform.repository, @@ -1606,7 +1757,7 @@ async def create_python_transform(self, branch_name: str, transform: TransformPy source=self.id, is_protected=True, ) - obj = await self.client.create(kind="CoreTransformPython", branch=branch_name, **create_payload) + obj = await self.client.create(kind=InfrahubKind.TRANSFORMPYTHON, branch=branch_name, **create_payload) await obj.save() return obj @@ -1639,6 +1790,7 @@ async def compare_python_transform( async def import_all_python_files(self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig): await self.import_python_check_definitions(branch_name=branch_name, commit=commit, config_file=config_file) await self.import_python_transforms(branch_name=branch_name, commit=commit, config_file=config_file) + await self.import_generator_definitions(branch_name=branch_name, commit=commit, config_file=config_file) async def find_files( self, @@ -1687,15 +1839,9 @@ async def find_graphql_queries(self, commit: str) -> List[GraphQLQueryInformatio async def get_file(self, commit: str, location: str) -> str: commit_worktree = self.get_commit_worktree(commit=commit) + path = self.validate_location(commit=commit, worktree_directory=commit_worktree.directory, file_path=location) - self.validate_location(commit=commit, worktree_directory=commit_worktree.directory, file_path=location) - - full_filename = os.path.join(commit_worktree.directory, location) - - with open(full_filename, "r", encoding="UTF-8") as obj: - content = obj.read() - - return content + return path.read_text(encoding="UTF-8") async def render_jinja2_template(self, commit: str, location: str, data: dict): commit_worktree = self.get_commit_worktree(commit=commit) @@ -1867,7 +2013,7 @@ async def artifact_generate( artifact_content = await self.render_jinja2_template( commit=commit, location=transformation.template_path.value, data=response ) - elif transformation.typename == "CoreTransformPython": + elif transformation.typename == InfrahubKind.TRANSFORMPYTHON: transformation_location = f"{transformation.file_path.value}::{transformation.class_name.value}" artifact_content = await self.execute_python_transform( branch_name=branch_name, @@ -1916,7 +2062,7 @@ async def render_artifact( artifact_content = await self.render_jinja2_template( commit=message.commit, location=message.transform_location, data=response ) - elif message.transform_type == "CoreTransformPython": + elif message.transform_type == InfrahubKind.TRANSFORMPYTHON: artifact_content = await self.execute_python_transform( branch_name=message.branch_name, commit=message.commit, @@ -1946,10 +2092,18 @@ async def render_artifact( await artifact.save() return ArtifactGenerateResult(changed=True, checksum=checksum, storage_id=storage_id, artifact_id=artifact.id) - def validate_location(self, commit: str, worktree_directory: str, file_path: str) -> None: - if not os.path.exists(os.path.join(worktree_directory, file_path)): + def validate_location(self, commit: str, worktree_directory: str, file_path: str) -> Path: + """Validate that a file is found inside a repository and return a corresponding `pathlib.Path` object for it.""" + path = Path(worktree_directory, file_path).resolve() + + if not str(path).startswith(worktree_directory): + raise FileOutOfRepositoryError(repository_name=self.name, commit=commit, location=file_path) + + if not path.exists(): raise RepositoryFileNotFoundError(repository_name=self.name, commit=commit, location=file_path) + return path + class InfrahubRepository(InfrahubRepositoryBase): """ diff --git a/backend/infrahub/git_credential/askpass.py b/backend/infrahub/git_credential/askpass.py index ca04d2d647..cce0adcfef 100644 --- a/backend/infrahub/git_credential/askpass.py +++ b/backend/infrahub/git_credential/askpass.py @@ -3,7 +3,7 @@ from typing import List, Optional import typer -from infrahub_sdk import InfrahubClientSync +from infrahub_sdk import Config, InfrahubClientSync from infrahub import config from infrahub.core.constants import InfrahubKind @@ -37,7 +37,7 @@ def askpass( if not request_type: raise typer.Exit(f"Unable to identify the request type in '{text}'") - client = InfrahubClientSync.init(address=config.SETTINGS.main.internal_address, insert_tracker=True) + client = InfrahubClientSync.init(config=Config(address=config.SETTINGS.main.internal_address, insert_tracker=True)) repo = client.get(kind=InfrahubKind.GENERICREPOSITORY, location__value=location) attr = getattr(repo, request_type) diff --git a/backend/infrahub/git_credential/helper.py b/backend/infrahub/git_credential/helper.py index 3023bd929d..c84e6a616f 100644 --- a/backend/infrahub/git_credential/helper.py +++ b/backend/infrahub/git_credential/helper.py @@ -1,7 +1,7 @@ import sys import typer -from infrahub_sdk import InfrahubClientSync +from infrahub_sdk import Config, InfrahubClientSync from infrahub import config from infrahub.core.constants import InfrahubKind @@ -50,7 +50,7 @@ def get( # FIXME currently we are only querying the repo in the main branch, # this will not work if a new repository is added in a branch first. - client = InfrahubClientSync.init(address=config.SETTINGS.main.internal_address, insert_tracker=True) + client = InfrahubClientSync.init(config=Config(address=config.SETTINGS.main.internal_address, insert_tracker=True)) repo = client.get(kind=InfrahubKind.GENERICREPOSITORY, location__value=location) if not repo: diff --git a/backend/infrahub/graphql/api/dependencies.py b/backend/infrahub/graphql/api/dependencies.py index b72131f8f1..71aa7574f5 100644 --- a/backend/infrahub/graphql/api/dependencies.py +++ b/backend/infrahub/graphql/api/dependencies.py @@ -8,7 +8,6 @@ from ..auth.query_permission_checker.default_checker import DefaultGraphQLPermissionChecker from ..auth.query_permission_checker.read_only_checker import ReadOnlyGraphQLPermissionChecker from ..auth.query_permission_checker.read_write_checker import ReadWriteGraphQLPermissionChecker -from .graphiql import make_graphiql_handler def get_anonymous_access_setting() -> bool: @@ -27,6 +26,4 @@ def build_graphql_query_permission_checker() -> GraphQLQueryPermissionChecker: def build_graphql_app(**kwargs: Any) -> InfrahubGraphQLApp: - if "on_get" not in kwargs: - kwargs["on_get"] = make_graphiql_handler() return InfrahubGraphQLApp(build_graphql_query_permission_checker(), **kwargs) diff --git a/backend/infrahub/graphql/api/endpoints.py b/backend/infrahub/graphql/api/endpoints.py index 6dff0c504c..1e2ff7e541 100644 --- a/backend/infrahub/graphql/api/endpoints.py +++ b/backend/infrahub/graphql/api/endpoints.py @@ -12,8 +12,8 @@ graphql_app = build_graphql_app() -router.add_route(path="/graphql", endpoint=graphql_app, methods=["GET", "POST", "OPTIONS"]) -router.add_route(path="/graphql/{branch_name:path}", endpoint=graphql_app, methods=["GET", "POST", "OPTIONS"]) +router.add_route(path="/graphql", endpoint=graphql_app, methods=["POST", "OPTIONS"]) +router.add_route(path="/graphql/{branch_name:path}", endpoint=graphql_app, methods=["POST", "OPTIONS"]) router.add_websocket_route(path="/graphql", endpoint=graphql_app) router.add_websocket_route(path="/graphql/{branch_name:str}", endpoint=graphql_app) diff --git a/backend/infrahub/graphql/api/graphiql.py b/backend/infrahub/graphql/api/graphiql.py deleted file mode 100644 index 52789b3df0..0000000000 --- a/backend/infrahub/graphql/api/graphiql.py +++ /dev/null @@ -1,104 +0,0 @@ -from typing import Callable - -from starlette.requests import Request -from starlette.responses import HTMLResponse, Response - - -def make_graphiql_handler() -> Callable[[Request], Response]: - def handler(_: Request) -> Response: - return HTMLResponse(_GRAPHIQL_HTML) - - return handler - - -_GRAPHIQL_HTML = """ - - - - - GraphiQL - - - - - - - - - - - - - - -
Loading...
- - -""".strip() # noqa: B950 diff --git a/backend/infrahub/graphql/app.py b/backend/infrahub/graphql/app.py index 2f2110241b..02cfd0a344 100644 --- a/backend/infrahub/graphql/app.py +++ b/backend/infrahub/graphql/app.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio -import json from inspect import isawaitable from typing import ( TYPE_CHECKING, @@ -22,6 +21,7 @@ cast, ) +import ujson from graphql import ( ExecutionContext, ExecutionResult, @@ -38,6 +38,7 @@ from graphql.utilities import ( get_operation_ast, ) +from opentelemetry import trace from starlette.datastructures import UploadFile from starlette.requests import HTTPConnection, Request from starlette.responses import JSONResponse, Response @@ -45,7 +46,6 @@ from infrahub.api.dependencies import api_key_scheme, cookie_auth_scheme, jwt_scheme from infrahub.auth import AccountSession, authentication_token -from infrahub.core import get_branch from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.exceptions import BranchNotFoundError, Error @@ -137,7 +137,7 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: # Retrieve the branch name from the request and validate that it exist in the database try: branch_name = request.path_params.get("branch_name", registry.default_branch) - branch = await get_branch(db=db, branch=branch_name) + branch = await registry.get_branch(db=db, branch=branch_name) except BranchNotFoundError as exc: response = JSONResponse({"errors": [exc.message]}, status_code=404) @@ -162,7 +162,7 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: async with db.start_session() as db: branch_name = websocket.path_params.get("branch_name", registry.default_branch) - branch = await get_branch(db=db, branch=branch_name) + branch = await registry.get_branch(db=db, branch=branch_name) await self._run_websocket_server(db=db, branch=branch, websocket=websocket) @@ -223,17 +223,20 @@ async def _handle_http_request( "query_id": "", } - with GRAPHQL_DURATION_METRICS.labels(**labels).time(): - result = await graphql( - schema=graphql_params.schema, - source=query, - context_value=graphql_params.context, - root_value=self.root_value, - middleware=self.middleware, - variable_values=variable_values, - operation_name=operation_name, - execution_context_class=self.execution_context_class, - ) + with trace.get_tracer(__name__).start_as_current_span("execute_graphql") as span: + span.set_attributes(labels) + + with GRAPHQL_DURATION_METRICS.labels(**labels).time(): + result = await graphql( + schema=graphql_params.schema, + source=query, + context_value=graphql_params.context, + root_value=self.root_value, + middleware=self.middleware, + variable_values=variable_values, + operation_name=operation_name, + execution_context_class=self.execution_context_class, + ) response: Dict[str, Any] = {"data": result.data} if result.errors: @@ -454,14 +457,14 @@ async def _get_operation_from_multipart( raise ValueError("Request body is not a valid multipart/form-data") try: - operations = json.loads(request_body.get("operations")) + operations = ujson.loads(request_body.get("operations")) except (TypeError, ValueError): raise ValueError("'operations' must be a valid JSON") if not isinstance(operations, (dict, list)): raise ValueError("'operations' field must be an Object or an Array") try: - name_path_map = json.loads(request_body.get("map")) + name_path_map = ujson.loads(request_body.get("map")) except (TypeError, ValueError): raise ValueError("'map' field must be a valid JSON") if not isinstance(name_path_map, dict): @@ -474,8 +477,8 @@ async def _get_operation_from_multipart( raise ValueError(f"File fields don't contain a valid UploadFile type for '{name}' mapping") for path in paths: - path = tuple(path.split(".")) - _inject_file_to_operations(operations, file, path) + path_components = tuple(path.split(".")) + _inject_file_to_operations(operations, file, path_components) return operations diff --git a/backend/infrahub/graphql/auth/query_permission_checker/interface.py b/backend/infrahub/graphql/auth/query_permission_checker/interface.py index 61cbb89d92..a5c62d8b41 100644 --- a/backend/infrahub/graphql/auth/query_permission_checker/interface.py +++ b/backend/infrahub/graphql/auth/query_permission_checker/interface.py @@ -6,9 +6,7 @@ class GraphQLQueryPermissionCheckerInterface(ABC): @abstractmethod - async def supports(self, account_session: AccountSession) -> bool: - ... + async def supports(self, account_session: AccountSession) -> bool: ... @abstractmethod - async def check(self, analyzed_query: InfrahubGraphQLQueryAnalyzer) -> None: - ... + async def check(self, analyzed_query: InfrahubGraphQLQueryAnalyzer) -> None: ... diff --git a/backend/infrahub/graphql/directives.py b/backend/infrahub/graphql/directives.py new file mode 100644 index 0000000000..99fe8e85d6 --- /dev/null +++ b/backend/infrahub/graphql/directives.py @@ -0,0 +1,19 @@ +from graphql.type.definition import GraphQLArgument, GraphQLList +from graphql.type.directives import DirectiveLocation, GraphQLDirective, specified_directives +from graphql.type.scalars import GraphQLString + +GraphQLExpand = GraphQLDirective( + name="expand", + locations=[ + DirectiveLocation.FIELD, + DirectiveLocation.FRAGMENT_SPREAD, + DirectiveLocation.INLINE_FRAGMENT, + ], + args={ + "exclude": GraphQLArgument(GraphQLList(GraphQLString), description="Exclude specific fields"), + }, + description="Expands a field to include Node defaults", +) + + +DIRECTIVES = list(specified_directives) + [GraphQLExpand] diff --git a/backend/infrahub/graphql/enums.py b/backend/infrahub/graphql/enums.py index b9396124e2..f5d07be3bd 100644 --- a/backend/infrahub/graphql/enums.py +++ b/backend/infrahub/graphql/enums.py @@ -1,15 +1,15 @@ import re -from typing import Any, List, Union +from typing import Any, List import graphene from infrahub.core.enums import generate_python_enum -from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema +from infrahub.core.schema import AttributeSchema, MainSchemaTypes ENUM_NAME_REGEX = re.compile("[_a-zA-Z0-9]+") -def get_enum_attribute_type_name(node_schema: Union[NodeSchema, GenericSchema], attr_schema: AttributeSchema) -> str: +def get_enum_attribute_type_name(node_schema: MainSchemaTypes, attr_schema: AttributeSchema) -> str: return f"{node_schema.kind}{attr_schema.name.title()}" diff --git a/backend/infrahub/graphql/manager.py b/backend/infrahub/graphql/manager.py index 19791cd197..5c1c786126 100644 --- a/backend/infrahub/graphql/manager.py +++ b/backend/infrahub/graphql/manager.py @@ -8,15 +8,19 @@ from infrahub import config from infrahub.core.attribute import String from infrahub.core.constants import InfrahubKind, RelationshipKind -from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema -from infrahub.graphql.mutations.attribute import BaseAttributeInput +from infrahub.core.schema import AttributeSchema, GenericSchema, MainSchemaTypes, NodeSchema, ProfileSchema +from infrahub.graphql.mutations.attribute import BaseAttributeCreate, BaseAttributeUpdate from infrahub.graphql.mutations.graphql_query import InfrahubGraphQLQueryMutation from infrahub.types import ATTRIBUTE_TYPES, InfrahubDataType, get_attribute_type +from .directives import DIRECTIVES from .enums import generate_graphql_enum, get_enum_attribute_type_name from .metrics import SCHEMA_GENERATE_GRAPHQL_METRICS from .mutations import ( InfrahubArtifactDefinitionMutation, + InfrahubIPAddressMutation, + InfrahubIPNamespaceMutation, + InfrahubIPPrefixMutation, InfrahubMutation, InfrahubProposedChangeMutation, InfrahubRepositoryMutation, @@ -62,7 +66,7 @@ class GraphqlMutations: delete: Type[InfrahubMutation] -def get_attr_kind(node_schema: Union[NodeSchema, GenericSchema], attr_schema: AttributeSchema) -> str: +def get_attr_kind(node_schema: MainSchemaTypes, attr_schema: AttributeSchema) -> str: if not config.SETTINGS.experimental_features.graphql_enums or not attr_schema.enum: return attr_schema.kind return get_enum_attribute_type_name(node_schema=node_schema, attr_schema=attr_schema) @@ -105,7 +109,12 @@ def generate( subscription = self.get_gql_subscription() if include_subscription else None graphene_schema = graphene.Schema( - query=query, mutation=mutation, subscription=subscription, types=types, auto_camelcase=False + query=query, + mutation=mutation, + subscription=subscription, + types=types, + auto_camelcase=False, + directives=DIRECTIVES, ) return graphene_schema.graphql_schema @@ -166,27 +175,29 @@ def _load_node_interface(self) -> None: ) self.generate_graphql_paginated_object(schema=node_interface_schema, edge=edged_interface, populate_cache=True) - def _load_all_enum_types(self, node_schemas: Iterable[Union[NodeSchema, GenericSchema]]) -> None: + def _load_all_enum_types(self, node_schemas: Iterable[MainSchemaTypes]) -> None: for node_schema in node_schemas: self._load_enum_type(node_schema=node_schema) - def _load_enum_type(self, node_schema: Union[NodeSchema, GenericSchema]) -> None: + def _load_enum_type(self, node_schema: MainSchemaTypes) -> None: for attr_schema in node_schema.attributes: if not attr_schema.enum: continue base_enum_name = get_enum_attribute_type_name(node_schema, attr_schema) enum_value_name = f"{base_enum_name}Value" - input_class_name = f"{base_enum_name}AttributeInput" - data_type_class_name = f"{base_enum_name}EnumType" graphene_enum = generate_graphql_enum(name=enum_value_name, options=attr_schema.enum) + data_type_class_name = f"{base_enum_name}EnumType" + default_value = None if attr_schema.default_value: for g_enum in graphene_enum: if g_enum.value == attr_schema.default_value: default_value = g_enum.name break + graphene_field = graphene.Field(graphene_enum, default_value=default_value) - input_class = type(input_class_name, (BaseAttributeInput,), {"value": graphene_field}) + create_class = type(f"{base_enum_name}AttributeCreate", (BaseAttributeCreate,), {"value": graphene_field}) + update_class = type(f"{base_enum_name}AttributeUpdate", (BaseAttributeUpdate,), {"value": graphene_field}) data_type_class: Type[InfrahubDataType] = type( data_type_class_name, (InfrahubDataType,), @@ -194,7 +205,8 @@ def _load_enum_type(self, node_schema: Union[NodeSchema, GenericSchema]) -> None "label": data_type_class_name, "graphql": graphene.String, "graphql_query": TextAttributeType, - "graphql_input": input_class, + "graphql_create": create_class, + "graphql_update": update_class, "graphql_filter": graphene_enum, "infrahub": String, }, @@ -221,8 +233,8 @@ def generate_object_types(self) -> None: # pylint: disable=too-many-branches,to self.generate_graphql_paginated_object(schema=node_schema, edge=edged_interface, populate_cache=True) # Define LineageSource and LineageOwner - data_source = self.get_type(name="LineageSource") - data_owner = self.get_type(name="LineageOwner") + data_source = self.get_type(name=InfrahubKind.LINEAGESOURCE) + data_owner = self.get_type(name=InfrahubKind.LINEAGEOWNER) self.define_relationship_property(data_source=data_source, data_owner=data_owner) relationship_property = self.get_type(name="RelationshipProperty") for data_type in ATTRIBUTE_TYPES.values(): @@ -234,7 +246,6 @@ def generate_object_types(self) -> None: # pylint: disable=too-many-branches,to for node_name, node_schema in full_schema.items(): if not isinstance(node_schema, GenericSchema): continue - node_interface = self.get_type(name=node_name) nested_edged_interface = self.generate_nested_interface_object( @@ -253,7 +264,7 @@ def generate_object_types(self) -> None: # pylint: disable=too-many-branches,to # Generate all GraphQL ObjectType, Nested, Paginated & NestedPaginated and store them in the registry for node_name, node_schema in full_schema.items(): - if isinstance(node_schema, NodeSchema): + if isinstance(node_schema, (NodeSchema, ProfileSchema)): node_type = self.generate_graphql_object(schema=node_schema, populate_cache=True) node_type_edged = self.generate_graphql_edged_object( schema=node_schema, node=node_type, populate_cache=True @@ -272,6 +283,13 @@ def generate_object_types(self) -> None: # pylint: disable=too-many-branches,to node_type = self.get_type(name=node_name) for rel in node_schema.relationships: + # Exclude hierarchical relationships, we will add them later + if ( + (isinstance(node_schema, NodeSchema) and node_schema.hierarchy) + or (isinstance(node_schema, GenericSchema) and node_schema.hierarchical) + ) and rel.name in ("parent", "children", "ancestors", "descendants"): + continue + peer_schema = self.schema.get(name=rel.peer, duplicate=False) if peer_schema.namespace == "Internal": continue @@ -293,12 +311,26 @@ def generate_object_types(self) -> None: # pylint: disable=too-many-branches,to peer_type, required=False, resolver=many_relationship_resolver, **peer_filters ) - if isinstance(node_schema, NodeSchema) and node_schema.hierarchy: - schema = self.schema.get(name=node_schema.hierarchy, duplicate=False) + if (isinstance(node_schema, NodeSchema) and node_schema.hierarchy) or ( + isinstance(node_schema, GenericSchema) and node_schema.hierarchical + ): + if isinstance(node_schema, NodeSchema): + schema = self.schema.get(name=node_schema.hierarchy, duplicate=False) # type: ignore[arg-type] + hierarchy_name = node_schema.hierarchy + else: + schema = node_schema + hierarchy_name = node_schema.kind peer_filters = self.generate_filters(schema=schema, top_level=False) - peer_type = self.get_type(name=f"NestedPaginated{node_schema.hierarchy}") + peer_type = self.get_type(name=f"NestedPaginated{hierarchy_name}") + peer_type_edge = self.get_type(name=f"NestedEdged{hierarchy_name}") + node_type._meta.fields["parent"] = graphene.Field( + peer_type_edge, required=False, resolver=single_relationship_resolver + ) + node_type._meta.fields["children"] = graphene.Field( + peer_type, required=False, resolver=many_relationship_resolver, **peer_filters + ) node_type._meta.fields["ancestors"] = graphene.Field( peer_type, required=False, resolver=ancestors_resolver, **peer_filters ) @@ -341,7 +373,7 @@ def generate_mutation_mixin(self) -> Type[object]: full_schema = self.schema.get_all(duplicate=False) for node_schema in full_schema.values(): - if not isinstance(node_schema, NodeSchema): + if not isinstance(node_schema, (NodeSchema, ProfileSchema)): continue if node_schema.namespace == "Internal": @@ -353,8 +385,15 @@ def generate_mutation_mixin(self) -> Type[object]: InfrahubKind.READONLYREPOSITORY: InfrahubRepositoryMutation, InfrahubKind.PROPOSEDCHANGE: InfrahubProposedChangeMutation, InfrahubKind.GRAPHQLQUERY: InfrahubGraphQLQueryMutation, + InfrahubKind.NAMESPACE: InfrahubIPNamespaceMutation, } - base_class = mutation_map.get(node_schema.kind, InfrahubMutation) + + if isinstance(node_schema, NodeSchema) and node_schema.is_ip_prefix(): + base_class = InfrahubIPPrefixMutation + elif isinstance(node_schema, NodeSchema) and node_schema.is_ip_address(): + base_class = InfrahubIPAddressMutation + else: + base_class = mutation_map.get(node_schema.kind, InfrahubMutation) mutations = self.generate_graphql_mutations(schema=node_schema, base_class=base_class) @@ -365,7 +404,9 @@ def generate_mutation_mixin(self) -> Type[object]: return type("MutationMixin", (object,), class_attrs) - def generate_graphql_object(self, schema: NodeSchema, populate_cache: bool = False) -> Type[InfrahubObject]: + def generate_graphql_object( + self, schema: Union[NodeSchema, ProfileSchema], populate_cache: bool = False + ) -> Type[InfrahubObject]: """Generate a GraphQL object Type from a Infrahub NodeSchema.""" interfaces: Set[Type[InfrahubObject]] = set() @@ -375,9 +416,10 @@ def generate_graphql_object(self, schema: NodeSchema, populate_cache: bool = Fal generic = self.get_type(name=generic_name) interfaces.add(generic) - if not schema.inherit_from or InfrahubKind.GENERICGROUP not in schema.inherit_from: - node_interface = self.get_type(name="CoreNode") - interfaces.add(node_interface) + if not isinstance(schema, ProfileSchema): + if not schema.inherit_from or InfrahubKind.GENERICGROUP not in schema.inherit_from: + node_interface = self.get_type(name=InfrahubKind.NODE) + interfaces.add(node_interface) meta_attrs = { "schema": schema, @@ -453,9 +495,12 @@ def define_relationship_property(self, data_source: Type[InfrahubObject], data_o self.set_type(name=type_name, graphql_type=relationship_property) - def generate_graphql_mutations(self, schema: NodeSchema, base_class: Type[InfrahubMutation]) -> GraphqlMutations: + def generate_graphql_mutations( + self, schema: Union[NodeSchema, ProfileSchema], base_class: Type[InfrahubMutation] + ) -> GraphqlMutations: graphql_mutation_create_input = self.generate_graphql_mutation_create_input(schema) graphql_mutation_update_input = self.generate_graphql_mutation_update_input(schema) + graphql_mutation_upsert_input = self.generate_graphql_mutation_upsert_input(schema) create = self.generate_graphql_mutation_create( schema=schema, base_class=base_class, input_type=graphql_mutation_create_input @@ -463,7 +508,7 @@ def generate_graphql_mutations(self, schema: NodeSchema, base_class: Type[Infrah upsert = self.generate_graphql_mutation_create( schema=schema, base_class=base_class, - input_type=graphql_mutation_create_input, + input_type=graphql_mutation_upsert_input, mutation_type="Upsert", ) update = self.generate_graphql_mutation_update( @@ -479,15 +524,17 @@ def generate_graphql_mutations(self, schema: NodeSchema, base_class: Type[Infrah return GraphqlMutations(create=create, update=update, upsert=upsert, delete=delete) @staticmethod - def generate_graphql_mutation_create_input(schema: NodeSchema) -> Type[graphene.InputObjectType]: + def generate_graphql_mutation_create_input( + schema: Union[NodeSchema, ProfileSchema], + ) -> Type[graphene.InputObjectType]: """Generate an InputObjectType Object from a Infrahub NodeSchema Example of Object Generated by this function: class StatusCreateInput(InputObjectType): id = String(required=False) - label = InputField(StringAttributeInput, required=True) - slug = InputField(StringAttributeInput, required=True) - description = InputField(StringAttributeInput, required=False) + label = InputField(StringAttributeCreate, required=True) + slug = InputField(StringAttributeCreate, required=True) + description = InputField(StringAttributeCreate, required=False) """ attrs: Dict[str, Union[graphene.String, graphene.InputField]] = {"id": graphene.String(required=False)} @@ -496,7 +543,7 @@ class StatusCreateInput(InputObjectType): continue attr_kind = get_attr_kind(schema, attr) - attr_type = get_attribute_type(kind=attr_kind).get_graphql_input() + attr_type = get_attribute_type(kind=attr_kind).get_graphql_create() # A Field is not required if explicitly indicated or if a default value has been provided required = not attr.optional if not attr.default_value else False @@ -504,7 +551,7 @@ class StatusCreateInput(InputObjectType): attrs[attr.name] = graphene.InputField(attr_type, required=required, description=attr.description) for rel in schema.relationships: - if rel.internal_peer: + if rel.internal_peer or rel.read_only: continue required = not rel.optional if rel.cardinality == "one": @@ -518,15 +565,17 @@ class StatusCreateInput(InputObjectType): return type(f"{schema.kind}CreateInput", (graphene.InputObjectType,), attrs) @staticmethod - def generate_graphql_mutation_update_input(schema: NodeSchema) -> Type[graphene.InputObjectType]: + def generate_graphql_mutation_update_input( + schema: Union[NodeSchema, ProfileSchema], + ) -> Type[graphene.InputObjectType]: """Generate an InputObjectType Object from a Infrahub NodeSchema Example of Object Generated by this function: class StatusUpdateInput(InputObjectType): id = String(required=True) - label = InputField(StringAttributeInput, required=False) - slug = InputField(StringAttributeInput, required=False) - description = InputField(StringAttributeInput, required=False) + label = InputField(StringAttributeUpdate, required=False) + slug = InputField(StringAttributeUpdate, required=False) + description = InputField(StringAttributeUpdate, required=False) """ attrs: Dict[str, Union[graphene.String, graphene.InputField]] = {"id": graphene.String(required=True)} @@ -534,11 +583,11 @@ class StatusUpdateInput(InputObjectType): if attr.read_only: continue attr_kind = get_attr_kind(schema, attr) - attr_type = get_attribute_type(kind=attr_kind).get_graphql_input() + attr_type = get_attribute_type(kind=attr_kind).get_graphql_update() attrs[attr.name] = graphene.InputField(attr_type, required=False, description=attr.description) for rel in schema.relationships: - if rel.internal_peer: + if rel.internal_peer or rel.read_only: continue if rel.cardinality == "one": attrs[rel.name] = graphene.InputField(RelatedNodeInput, required=False, description=rel.description) @@ -550,9 +599,50 @@ class StatusUpdateInput(InputObjectType): return type(f"{schema.kind}UpdateInput", (graphene.InputObjectType,), attrs) + @staticmethod + def generate_graphql_mutation_upsert_input( + schema: Union[NodeSchema, ProfileSchema], + ) -> Type[graphene.InputObjectType]: + """Generate an InputObjectType Object from a Infrahub NodeSchema + + Example of Object Generated by this function: + class StatusUpsertInput(InputObjectType): + id = String(required=False) + label = InputField(StringAttributeUpdate, required=True) + slug = InputField(StringAttributeUpdate, required=True) + description = InputField(StringAttributeUpdate, required=False) + """ + attrs: Dict[str, Union[graphene.String, graphene.InputField]] = {"id": graphene.String(required=False)} + + for attr in schema.attributes: + if attr.read_only: + continue + + attr_kind = get_attr_kind(schema, attr) + attr_type = get_attribute_type(kind=attr_kind).get_graphql_update() + + # A Field is not required if explicitly indicated or if a default value has been provided + required = not attr.optional if not attr.default_value else False + + attrs[attr.name] = graphene.InputField(attr_type, required=required, description=attr.description) + + for rel in schema.relationships: + if rel.internal_peer or rel.read_only: + continue + required = not rel.optional + if rel.cardinality == "one": + attrs[rel.name] = graphene.InputField(RelatedNodeInput, required=required, description=rel.description) + + elif rel.cardinality == "many": + attrs[rel.name] = graphene.InputField( + graphene.List(RelatedNodeInput), required=required, description=rel.description + ) + + return type(f"{schema.kind}UpsertInput", (graphene.InputObjectType,), attrs) + def generate_graphql_mutation_create( self, - schema: NodeSchema, + schema: Union[NodeSchema, ProfileSchema], input_type: Type[graphene.InputObjectType], base_class: Type[InfrahubMutation] = InfrahubMutation, mutation_type: str = "Create", @@ -576,7 +666,7 @@ def generate_graphql_mutation_create( def generate_graphql_mutation_update( self, - schema: NodeSchema, + schema: Union[NodeSchema, ProfileSchema], input_type: Type[graphene.InputObjectType], base_class: Type[InfrahubMutation] = InfrahubMutation, ) -> Type[InfrahubMutation]: @@ -599,7 +689,7 @@ def generate_graphql_mutation_update( @staticmethod def generate_graphql_mutation_delete( - schema: NodeSchema, + schema: Union[NodeSchema, ProfileSchema], base_class: Type[InfrahubMutation] = InfrahubMutation, ) -> Type[InfrahubMutation]: """Generate a GraphQL Mutation to DELETE an object based on the specified NodeSchema.""" @@ -619,7 +709,7 @@ def generate_graphql_mutation_delete( def generate_filters( self, - schema: Union[NodeSchema, GenericSchema], + schema: MainSchemaTypes, top_level: bool = False, include_properties: bool = True, ) -> Dict[str, Union[graphene.Scalar, graphene.List]]: @@ -636,7 +726,7 @@ def generate_filters( top_level (bool, optional): Flag to indicate if are at the top level or not. Defaults to False. Returns: - dict: A Dictionnary containing all the filters with their name as the key and their Type as value + dict: A Dictionary containing all the filters with their name as the key and their Type as value """ filters: Dict[str, Any] = {"offset": graphene.Int(), "limit": graphene.Int()} @@ -679,7 +769,7 @@ def generate_filters( def generate_graphql_edged_object( self, - schema: Union[NodeSchema, GenericSchema], + schema: MainSchemaTypes, node: Type[InfrahubObject], relation_property: Optional[Type[InfrahubObject]] = None, populate_cache: bool = False, @@ -714,7 +804,7 @@ def generate_graphql_edged_object( def generate_graphql_paginated_object( self, - schema: Union[NodeSchema, GenericSchema], + schema: MainSchemaTypes, edge: Type[InfrahubObject], nested: bool = False, populate_cache: bool = False, diff --git a/backend/infrahub/graphql/mutations/__init__.py b/backend/infrahub/graphql/mutations/__init__.py index 2b8962e032..f888d53411 100644 --- a/backend/infrahub/graphql/mutations/__init__.py +++ b/backend/infrahub/graphql/mutations/__init__.py @@ -1,14 +1,22 @@ from .account import CoreAccountSelfUpdate, CoreAccountTokenCreate from .artifact_definition import InfrahubArtifactDefinitionMutation from .attribute import ( - AnyAttributeInput, - BoolAttributeInput, - CheckboxAttributeInput, - JSONAttributeInput, - ListAttributeInput, - NumberAttributeInput, - StringAttributeInput, - TextAttributeInput, + AnyAttributeCreate, + AnyAttributeUpdate, + BoolAttributeCreate, + BoolAttributeUpdate, + CheckboxAttributeCreate, + CheckboxAttributeUpdate, + JSONAttributeCreate, + JSONAttributeUpdate, + ListAttributeCreate, + ListAttributeUpdate, + NumberAttributeCreate, + NumberAttributeUpdate, + StringAttributeCreate, + StringAttributeUpdate, + TextAttributeCreate, + TextAttributeUpdate, ) from .branch import ( BranchCreate, @@ -20,6 +28,7 @@ BranchUpdate, BranchValidate, ) +from .ipam import InfrahubIPAddressMutation, InfrahubIPNamespaceMutation, InfrahubIPPrefixMutation from .main import InfrahubMutation, InfrahubMutationMixin, InfrahubMutationOptions from .proposed_change import ( InfrahubProposedChangeMutation, @@ -31,8 +40,10 @@ from .task import TaskCreate, TaskUpdate __all__ = [ - "AnyAttributeInput", - "BoolAttributeInput", + "AnyAttributeCreate", + "AnyAttributeUpdate", + "BoolAttributeCreate", + "BoolAttributeUpdate", "BranchCreate", "BranchCreateInput", "BranchRebase", @@ -41,23 +52,32 @@ "BranchMerge", "BranchNameInput", "BranchUpdate", - "CheckboxAttributeInput", + "CheckboxAttributeCreate", + "CheckboxAttributeUpdate", "CoreAccountSelfUpdate", "CoreAccountTokenCreate", "InfrahubArtifactDefinitionMutation", + "InfrahubIPAddressMutation", + "InfrahubIPNamespaceMutation", + "InfrahubIPPrefixMutation", "InfrahubRepositoryMutation", "InfrahubMutationOptions", "InfrahubMutation", "InfrahubMutationMixin", "InfrahubProposedChangeMutation", - "JSONAttributeInput", - "ListAttributeInput", - "NumberAttributeInput", + "JSONAttributeCreate", + "JSONAttributeUpdate", + "ListAttributeCreate", + "ListAttributeUpdate", + "NumberAttributeCreate", + "NumberAttributeUpdate", "ProposedChangeRequestRunCheck", "RelationshipAdd", "RelationshipRemove", - "StringAttributeInput", - "TextAttributeInput", + "StringAttributeCreate", + "StringAttributeUpdate", + "TextAttributeCreate", + "TextAttributeUpdate", "SchemaDropdownAdd", "SchemaDropdownRemove", "SchemaEnumAdd", diff --git a/backend/infrahub/graphql/mutations/attribute.py b/backend/infrahub/graphql/mutations/attribute.py index 6858d1cd1f..c28e367e37 100644 --- a/backend/infrahub/graphql/mutations/attribute.py +++ b/backend/infrahub/graphql/mutations/attribute.py @@ -4,7 +4,7 @@ from infrahub.core import registry -class BaseAttributeInput(InputObjectType): +class BaseAttributeCreate(InputObjectType): is_visible = Boolean(required=False) is_protected = Boolean(required=False) source = String(required=False) @@ -16,37 +16,86 @@ def __init_subclass__(cls, **kwargs): registry.input_type[cls.__name__] = cls -class TextAttributeInput(BaseAttributeInput): +class BaseAttributeUpdate(InputObjectType): + is_default = Boolean(required=False) + is_visible = Boolean(required=False) + is_protected = Boolean(required=False) + source = String(required=False) + owner = String(required=False) + + @classmethod + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + registry.input_type[cls.__name__] = cls + + +class TextAttributeCreate(BaseAttributeCreate): value = String(required=False) -class StringAttributeInput(BaseAttributeInput): +class TextAttributeUpdate(BaseAttributeUpdate): value = String(required=False) -class NumberAttributeInput(BaseAttributeInput): +class StringAttributeCreate(BaseAttributeCreate): + value = String(required=False) + + +class StringAttributeUpdate(BaseAttributeUpdate): + value = String(required=False) + + +class NumberAttributeCreate(BaseAttributeCreate): + value = Int(required=False) + + +class NumberAttributeUpdate(BaseAttributeUpdate): value = Int(required=False) -class IntAttributeInput(BaseAttributeInput): +class IntAttributeCreate(BaseAttributeCreate): value = Int(required=False) -class CheckboxAttributeInput(BaseAttributeInput): +class IntAttributeUpdate(BaseAttributeUpdate): + value = Int(required=False) + + +class CheckboxAttributeCreate(BaseAttributeCreate): + value = Boolean(required=False) + + +class CheckboxAttributeUpdate(BaseAttributeUpdate): + value = Boolean(required=False) + + +class BoolAttributeCreate(BaseAttributeCreate): value = Boolean(required=False) -class BoolAttributeInput(BaseAttributeInput): +class BoolAttributeUpdate(BaseAttributeUpdate): value = Boolean(required=False) -class ListAttributeInput(BaseAttributeInput): +class ListAttributeCreate(BaseAttributeCreate): + value = GenericScalar(required=False) + + +class ListAttributeUpdate(BaseAttributeUpdate): + value = GenericScalar(required=False) + + +class JSONAttributeCreate(BaseAttributeCreate): + value = GenericScalar(required=False) + + +class JSONAttributeUpdate(BaseAttributeUpdate): value = GenericScalar(required=False) -class JSONAttributeInput(BaseAttributeInput): +class AnyAttributeCreate(BaseAttributeCreate): value = GenericScalar(required=False) -class AnyAttributeInput(BaseAttributeInput): +class AnyAttributeUpdate(BaseAttributeUpdate): value = GenericScalar(required=False) diff --git a/backend/infrahub/graphql/mutations/branch.py b/backend/infrahub/graphql/mutations/branch.py index 75453c2e56..43142f86c2 100644 --- a/backend/infrahub/graphql/mutations/branch.py +++ b/backend/infrahub/graphql/mutations/branch.py @@ -5,12 +5,14 @@ import pydantic from graphene import Boolean, Field, InputObjectType, List, Mutation, String from infrahub_sdk.utils import extract_fields, extract_fields_first_node +from opentelemetry import trace from typing_extensions import Self from infrahub import config, lock from infrahub.core import registry from infrahub.core.branch import Branch from infrahub.core.diff.branch_differ import BranchDiffer +from infrahub.core.diff.ipam_diff_parser import IpamDiffParser from infrahub.core.merge import BranchMerger from infrahub.core.migrations.schema.runner import schema_migrations_runner from infrahub.core.task import UserTask @@ -55,6 +57,7 @@ class Arguments: @classmethod @retry_db_transaction(name="branch_create") + @trace.get_tracer(__name__).start_as_current_span("branch_create") async def mutate( cls, root: dict, info: GraphQLResolveInfo, data: BranchCreateInput, background_execution: bool = False ) -> Self: @@ -250,8 +253,17 @@ async def mutate(cls, root: dict, info: GraphQLResolveInfo, data: BranchNameInpu if context.service: log_data = get_log_data() request_id = log_data.get("request_id", "") + differ = await merger.get_graph_diff() + diff_parser = IpamDiffParser( + db=context.db, + differ=differ, + source_branch_name=obj.name, + target_branch_name=registry.default_branch, + ) + ipam_node_details = await diff_parser.get_changed_ipam_node_details() message = messages.EventBranchRebased( branch=obj.name, + ipam_node_details=ipam_node_details, meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), ) await context.service.send(message=message) @@ -332,9 +344,19 @@ async def mutate(cls, root: dict, info: GraphQLResolveInfo, data: BranchNameInpu if config.SETTINGS.broker.enable and context.background: log_data = get_log_data() request_id = log_data.get("request_id", "") + + differ = await merger.get_graph_diff() + diff_parser = IpamDiffParser( + db=context.db, + differ=differ, + source_branch_name=obj.name, + target_branch_name=registry.default_branch, + ) + ipam_node_details = await diff_parser.get_changed_ipam_node_details() message = messages.EventBranchMerge( source_branch=obj.name, target_branch=registry.default_branch, + ipam_node_details=ipam_node_details, meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), ) context.background.add_task(services.send, message) diff --git a/backend/infrahub/graphql/mutations/ipam.py b/backend/infrahub/graphql/mutations/ipam.py new file mode 100644 index 0000000000..91b8e8db19 --- /dev/null +++ b/backend/infrahub/graphql/mutations/ipam.py @@ -0,0 +1,333 @@ +import ipaddress +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +from graphene import InputObjectType, Mutation +from graphql import GraphQLResolveInfo +from typing_extensions import Self + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import InfrahubKind +from infrahub.core.ipam.reconciler import IpamReconciler +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.schema import NodeSchema +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import NodeNotFoundError, ValidationError +from infrahub.graphql.mutations.node_getter.interface import MutationNodeGetterInterface +from infrahub.log import get_logger + +from .main import InfrahubMutationMixin, InfrahubMutationOptions + +if TYPE_CHECKING: + from infrahub.graphql import GraphqlContext + +log = get_logger() + + +async def validate_namespace( + db: InfrahubDatabase, data: InputObjectType, existing_namespace_id: Optional[str] = None +) -> str: + """Validate or set (if not present) the namespace to pass to the mutation and return its ID.""" + namespace_id: Optional[str] = None + if "ip_namespace" not in data or not data["ip_namespace"]: + namespace_id = existing_namespace_id or registry.default_ipnamespace + data["ip_namespace"] = {"id": namespace_id} + elif "id" in data["ip_namespace"]: + namespace = await registry.manager.get_one(db=db, kind=InfrahubKind.IPNAMESPACE, id=data["ip_namespace"]["id"]) + namespace_id = namespace.id + else: + raise ValidationError( + "A valid ip_namespace must be provided or ip_namespace should be left empty in order to use the default value." + ) + return namespace_id + + +class InfrahubIPNamespaceMutation(InfrahubMutationMixin, Mutation): + @classmethod + def __init_subclass_with_meta__( # pylint: disable=arguments-differ + cls, + schema: NodeSchema, + _meta: Optional[Any] = None, + **options: Dict[str, Any], + ) -> None: + # Make sure schema is a valid NodeSchema Node Class + if not isinstance(schema, NodeSchema): + raise ValueError(f"You need to pass a valid NodeSchema in '{cls.__name__}.Meta', received '{schema}'") + + if not _meta: + _meta = InfrahubMutationOptions(cls) + _meta.schema = schema + + super().__init_subclass_with_meta__(_meta=_meta, **options) + + @classmethod + async def mutate_delete( + cls, + root, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + ): + if data["id"] == registry.default_ipnamespace: + raise ValueError("Cannot delete default IPAM namespace") + + return await super().mutate_delete(root=root, info=info, data=data, branch=branch, at=at) + + +class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation): + @classmethod + def __init_subclass_with_meta__( # pylint: disable=arguments-differ + cls, + schema: NodeSchema, + _meta: Optional[Any] = None, + **options: Dict[str, Any], + ) -> None: + # Make sure schema is a valid NodeSchema Node Class + if not isinstance(schema, NodeSchema): + raise ValueError(f"You need to pass a valid NodeSchema in '{cls.__name__}.Meta', received '{schema}'") + + if not _meta: + _meta = InfrahubMutationOptions(cls) + _meta.schema = schema + + super().__init_subclass_with_meta__(_meta=_meta, **options) + + @classmethod + async def mutate_create( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + database: Optional[InfrahubDatabase] = None, + ) -> Tuple[Node, Self]: + context: GraphqlContext = info.context + db = database or context.db + ip_address = ipaddress.ip_interface(data["address"]["value"]) + namespace_id = await validate_namespace(db=db, data=data) + + async with db.start_transaction() as dbt: + address = await cls.mutate_create_object(data=data, db=dbt, branch=branch, at=at) + reconciler = IpamReconciler(db=dbt, branch=branch) + reconciled_address = await reconciler.reconcile( + ip_value=ip_address, namespace=namespace_id, node_uuid=address.get_id() + ) + + result = await cls.mutate_create_to_graphql(info=info, db=db, obj=reconciled_address) + + return reconciled_address, result + + @classmethod + async def mutate_update( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + database: Optional[InfrahubDatabase] = None, + node: Optional[Node] = None, + ) -> Tuple[Node, Self]: + context: GraphqlContext = info.context + db = database or context.db + + address = node or await NodeManager.get_one_by_id_or_default_filter( + db=db, + schema_name=cls._meta.schema.kind, + id=data.get("id"), + branch=branch, + at=at, + include_owner=True, + include_source=True, + ) + namespace = await address.ip_namespace.get_peer(db) + namespace_id = await validate_namespace(db=db, data=data, existing_namespace_id=namespace.id) + try: + async with db.start_transaction() as dbt: + address = await cls.mutate_update_object(db=dbt, info=info, data=data, branch=branch, obj=address) + reconciler = IpamReconciler(db=dbt, branch=branch) + ip_address = ipaddress.ip_interface(address.address.value) + reconciled_address = await reconciler.reconcile( + ip_value=ip_address, node_uuid=address.get_id(), namespace=namespace_id + ) + + result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=reconciled_address) + except ValidationError as exc: + raise ValueError(str(exc)) from exc + + return address, result + + @classmethod + async def mutate_upsert( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + node_getters: List[MutationNodeGetterInterface], + database: Optional[InfrahubDatabase] = None, + ) -> Tuple[Node, Self, bool]: + context: GraphqlContext = info.context + db = database or context.db + + await validate_namespace(db=db, data=data) + prefix, result, created = await super().mutate_upsert( + root=root, info=info, data=data, branch=branch, at=at, node_getters=node_getters, database=db + ) + + return prefix, result, created + + @classmethod + async def mutate_delete( + cls, + root, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + ): + return await super().mutate_delete(root=root, info=info, data=data, branch=branch, at=at) + + +class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation): + @classmethod + def __init_subclass_with_meta__( # pylint: disable=arguments-differ + cls, + schema: NodeSchema, + _meta: Optional[Any] = None, + **options: Dict[str, Any], + ) -> None: + # Make sure schema is a valid NodeSchema Node Class + if not isinstance(schema, NodeSchema): + raise ValueError(f"You need to pass a valid NodeSchema in '{cls.__name__}.Meta', received '{schema}'") + + if not _meta: + _meta = InfrahubMutationOptions(cls) + _meta.schema = schema + + super().__init_subclass_with_meta__(_meta=_meta, **options) + + @classmethod + async def mutate_create( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + database: Optional[InfrahubDatabase] = None, + ) -> Tuple[Node, Self]: + context: GraphqlContext = info.context + db = database or context.db + ip_network = ipaddress.ip_network(data["prefix"]["value"]) + namespace_id = await validate_namespace(db=db, data=data) + + async with db.start_transaction() as dbt: + prefix = await cls.mutate_create_object(data=data, db=dbt, branch=branch, at=at) + reconciler = IpamReconciler(db=dbt, branch=branch) + reconciled_prefix = await reconciler.reconcile( + ip_value=ip_network, namespace=namespace_id, node_uuid=prefix.get_id() + ) + + result = await cls.mutate_create_to_graphql(info=info, db=db, obj=reconciled_prefix) + + return reconciled_prefix, result + + @classmethod + async def mutate_update( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + database: Optional[InfrahubDatabase] = None, + node: Optional[Node] = None, + ) -> Tuple[Node, Self]: + context: GraphqlContext = info.context + db = database or context.db + + prefix = node or await NodeManager.get_one_by_id_or_default_filter( + db=db, + schema_name=cls._meta.schema.kind, + id=data.get("id"), + branch=branch, + at=at, + include_owner=True, + include_source=True, + ) + namespace = await prefix.ip_namespace.get_peer(db) + namespace_id = await validate_namespace(db=db, data=data, existing_namespace_id=namespace.id) + try: + async with db.start_transaction() as dbt: + prefix = await cls.mutate_update_object(db=dbt, info=info, data=data, branch=branch, obj=prefix) + reconciler = IpamReconciler(db=dbt, branch=branch) + ip_network = ipaddress.ip_network(prefix.prefix.value) + reconciled_prefix = await reconciler.reconcile( + ip_value=ip_network, node_uuid=prefix.get_id(), namespace=namespace_id + ) + + result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=reconciled_prefix) + except ValidationError as exc: + raise ValueError(str(exc)) from exc + + return prefix, result + + @classmethod + async def mutate_upsert( + cls, + root: dict, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + node_getters: List[MutationNodeGetterInterface], + database: Optional[InfrahubDatabase] = None, + ): + context: GraphqlContext = info.context + db = database or context.db + + await validate_namespace(db=db, data=data) + prefix, result, created = await super().mutate_upsert( + root=root, info=info, data=data, branch=branch, at=at, node_getters=node_getters, database=db + ) + + return prefix, result, created + + @classmethod + async def mutate_delete( + cls, + root, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + at: str, + ) -> Tuple[Node, Self]: + context: GraphqlContext = info.context + db = context.db + + prefix = await NodeManager.get_one( + data.get("id"), context.db, branch=branch, at=at, prefetch_relationships=True + ) + if not prefix: + raise NodeNotFoundError(branch, cls._meta.schema.kind, data.get("id")) + + namespace_rels = await prefix.ip_namespace.get_relationships(db=db) + namespace_id = namespace_rels[0].peer_id + try: + async with context.db.start_transaction() as dbt: + reconciler = IpamReconciler(db=dbt, branch=branch) + ip_network = ipaddress.ip_network(prefix.prefix.value) + reconciled_prefix = await reconciler.reconcile( + ip_value=ip_network, node_uuid=prefix.get_id(), namespace=namespace_id, is_delete=True + ) + except ValidationError as exc: + raise ValueError(str(exc)) from exc + + ok = True + + return reconciled_prefix, cls(ok=ok) diff --git a/backend/infrahub/graphql/mutations/main.py b/backend/infrahub/graphql/mutations/main.py index 82f8cb48dd..0d5aac3199 100644 --- a/backend/infrahub/graphql/mutations/main.py +++ b/backend/infrahub/graphql/mutations/main.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List, Optional, Tuple +from typing import TYPE_CHECKING, List, Optional, Tuple, Union from graphene import InputObjectType, Mutation from graphene.types.mutation import MutationOptions @@ -18,6 +18,8 @@ from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.core.schema import NodeSchema +from infrahub.core.schema.generic_schema import GenericSchema +from infrahub.core.schema.profile_schema import ProfileSchema from infrahub.core.timestamp import Timestamp from infrahub.database import retry_db_transaction from infrahub.dependencies.registry import get_component_registry @@ -116,7 +118,31 @@ async def mutate(cls, root: dict, info: GraphQLResolveInfo, *args, **kwargs): return mutation @classmethod - @retry_db_transaction(name="object_create") + async def _get_profile_ids(cls, db: InfrahubDatabase, obj: Node) -> set[str]: + if not hasattr(obj, "profiles"): + return set() + profile_rels = await obj.profiles.get_relationships(db=db) + return {pr.peer_id for pr in profile_rels} + + @classmethod + async def _refresh_for_profile_update( + cls, db: InfrahubDatabase, branch: Branch, obj: Node, previous_profile_ids: Optional[set[str]] = None + ) -> Node: + if not hasattr(obj, "profiles"): + return obj + current_profile_ids = await cls._get_profile_ids(db=db, obj=obj) + if previous_profile_ids is None or previous_profile_ids != current_profile_ids: + return await NodeManager.get_one_by_id_or_default_filter( + db=db, + schema_name=cls._meta.schema.kind, + id=obj.get_id(), + branch=branch, + include_owner=True, + include_source=True, + ) + return obj + + @classmethod async def mutate_create( cls, root: dict, @@ -128,9 +154,21 @@ async def mutate_create( ) -> Tuple[Node, Self]: context: GraphqlContext = info.context db = database or context.db + obj = await cls.mutate_create_object(data=data, db=db, branch=branch, at=at) + result = await cls.mutate_create_to_graphql(info=info, db=db, obj=obj) + return obj, result + + @classmethod + @retry_db_transaction(name="object_create") + async def mutate_create_object( + cls, + data: InputObjectType, + db: InfrahubDatabase, + branch: Branch, + at: str, + ) -> Node: component_registry = get_component_registry() node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch) - node_class = Node if cls._meta.schema.kind in registry.node: node_class = registry.node[cls._meta.schema.kind] @@ -144,18 +182,24 @@ async def mutate_create( if db.is_transaction: await obj.save(db=db) else: - async with db.start_transaction() as db: - await obj.save(db=db) + async with db.start_transaction() as dbt: + await obj.save(db=dbt) except ValidationError as exc: raise ValueError(str(exc)) from exc + if await cls._get_profile_ids(db=db, obj=obj): + obj = await cls._refresh_for_profile_update(db=db, branch=branch, obj=obj) + + return obj + + @classmethod + async def mutate_create_to_graphql(cls, info: GraphQLResolveInfo, db: InfrahubDatabase, obj: Node) -> Self: fields = await extract_fields(info.field_nodes[0].selection_set) result = {"ok": True} if "object" in fields: - result["object"] = await obj.to_graphql(db=context.db, fields=fields.get("object", {})) - - return obj, cls(**result) + result["object"] = await obj.to_graphql(db=db, fields=fields.get("object", {})) + return cls(**result) @classmethod @retry_db_transaction(name="object_update") @@ -171,8 +215,6 @@ async def mutate_update( ): context: GraphqlContext = info.context db = database or context.db - component_registry = get_component_registry() - node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch) obj = node or await NodeManager.get_one_by_id_or_default_filter( db=db, @@ -183,37 +225,63 @@ async def mutate_update( include_owner=True, include_source=True, ) - - fields_object = await extract_fields(info.field_nodes[0].selection_set) - fields_object = fields_object.get("object", {}) - result = {"ok": True} try: - await obj.from_graphql(db=db, data=data) - fields_to_validate = list(data) - await node_constraint_runner.check(node=obj, field_filters=fields_to_validate) - node_id = data.get("id", obj.id) - fields = list(data.keys()) - if "id" in fields: - fields.remove("id") - validate_mutation_permissions_update_node( - operation=cls.__name__, node_id=node_id, account_session=context.account_session, fields=fields - ) - if db.is_transaction: - await obj.save(db=db) - if fields_object: - result["object"] = await obj.to_graphql(db=db, fields=fields_object) - + obj = await cls.mutate_update_object(db=db, info=info, data=data, branch=branch, obj=obj) + result = await cls.mutate_update_to_graphql(db=db, info=info, obj=obj) else: async with db.start_transaction() as dbt: - await obj.save(db=dbt) - if fields_object: - result["object"] = await obj.to_graphql(db=dbt, fields=fields_object) - + obj = await cls.mutate_update_object(db=dbt, info=info, data=data, branch=branch, obj=obj) + result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=obj) except ValidationError as exc: raise ValueError(str(exc)) from exc - return obj, cls(**result) + return obj, result + + @classmethod + async def mutate_update_object( + cls, + db: InfrahubDatabase, + info: GraphQLResolveInfo, + data: InputObjectType, + branch: Branch, + obj: Node, + ) -> Node: + context: GraphqlContext = info.context + component_registry = get_component_registry() + node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch) + + before_mutate_profile_ids = await cls._get_profile_ids(db=db, obj=obj) + await obj.from_graphql(db=db, data=data) + fields_to_validate = list(data) + await node_constraint_runner.check(node=obj, field_filters=fields_to_validate) + node_id = data.get("id", obj.id) + fields = list(data.keys()) + if "id" in fields: + fields.remove("id") + validate_mutation_permissions_update_node( + operation=cls.__name__, node_id=node_id, account_session=context.account_session, fields=fields + ) + + await obj.save(db=db) + obj = await cls._refresh_for_profile_update( + db=db, branch=branch, obj=obj, previous_profile_ids=before_mutate_profile_ids + ) + return obj + + @classmethod + async def mutate_update_to_graphql( + cls, + db: InfrahubDatabase, + info: GraphQLResolveInfo, + obj: Node, + ) -> Self: + fields_object = await extract_fields(info.field_nodes[0].selection_set) + fields_object = fields_object.get("object", {}) + result = {"ok": True} + if fields_object: + result["object"] = await obj.to_graphql(db=db, fields=fields_object) + return cls(**result) @classmethod @retry_db_transaction(name="object_upsert") @@ -226,9 +294,9 @@ async def mutate_upsert( at: str, node_getters: List[MutationNodeGetterInterface], database: Optional[InfrahubDatabase] = None, - ): + ) -> Tuple[Node, Self, bool]: schema_name = cls._meta.schema.kind - node_schema = registry.get_node_schema(name=schema_name, branch=branch) + node_schema = registry.schema.get(name=schema_name, branch=branch) node = None for getter in node_getters: @@ -259,8 +327,14 @@ async def mutate_delete( if not (obj := await NodeManager.get_one(db=context.db, id=data.get("id"), branch=branch, at=at)): raise NodeNotFoundError(branch, cls._meta.schema.kind, data.get("id")) - async with context.db.start_transaction() as db: - await obj.delete(db=db, at=at) + try: + async with context.db.start_transaction() as db: + deleted = await NodeManager.delete(db=db, at=at, branch=branch, nodes=[obj]) + except ValidationError as exc: + raise ValueError(str(exc)) from exc + + deleted_str = ", ".join([f"{d.get_kind()}({d.get_id()})" for d in deleted]) + log.info(f"nodes deleted: {deleted_str}") ok = True @@ -269,9 +343,11 @@ async def mutate_delete( class InfrahubMutation(InfrahubMutationMixin, Mutation): @classmethod - def __init_subclass_with_meta__(cls, schema: NodeSchema = None, _meta=None, **options): # pylint: disable=arguments-differ + def __init_subclass_with_meta__( # pylint: disable=arguments-differ + cls, schema: Optional[Union[NodeSchema, GenericSchema, ProfileSchema]] = None, _meta=None, **options + ) -> None: # Make sure schema is a valid NodeSchema Node Class - if not isinstance(schema, NodeSchema): + if not isinstance(schema, (NodeSchema, GenericSchema, ProfileSchema)): raise ValueError(f"You need to pass a valid NodeSchema in '{cls.__name__}.Meta', received '{schema}'") if not _meta: diff --git a/backend/infrahub/graphql/mutations/node_getter/by_default_filter.py b/backend/infrahub/graphql/mutations/node_getter/by_default_filter.py index 11c6d990e1..cb46037884 100644 --- a/backend/infrahub/graphql/mutations/node_getter/by_default_filter.py +++ b/backend/infrahub/graphql/mutations/node_getter/by_default_filter.py @@ -5,7 +5,7 @@ from infrahub.core.branch import Branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node -from infrahub.core.schema import NodeSchema +from infrahub.core.schema import MainSchemaTypes from infrahub.database import InfrahubDatabase from .interface import MutationNodeGetterInterface @@ -18,7 +18,7 @@ def __init__(self, db: InfrahubDatabase, node_manager: NodeManager): async def get_node( self, - node_schema: NodeSchema, + node_schema: MainSchemaTypes, data: InputObjectType, branch: Branch, at: str, diff --git a/backend/infrahub/graphql/mutations/node_getter/by_id.py b/backend/infrahub/graphql/mutations/node_getter/by_id.py index 07cba0081a..c69d503fa6 100644 --- a/backend/infrahub/graphql/mutations/node_getter/by_id.py +++ b/backend/infrahub/graphql/mutations/node_getter/by_id.py @@ -5,7 +5,7 @@ from infrahub.core.branch import Branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node -from infrahub.core.schema import NodeSchema +from infrahub.core.schema import MainSchemaTypes from infrahub.database import InfrahubDatabase from .interface import MutationNodeGetterInterface @@ -18,7 +18,7 @@ def __init__(self, db: InfrahubDatabase, node_manager: NodeManager): async def get_node( self, - node_schema: NodeSchema, + node_schema: MainSchemaTypes, data: InputObjectType, branch: Branch, at: str, diff --git a/backend/infrahub/graphql/mutations/node_getter/interface.py b/backend/infrahub/graphql/mutations/node_getter/interface.py index c9be8056d8..690f65e822 100644 --- a/backend/infrahub/graphql/mutations/node_getter/interface.py +++ b/backend/infrahub/graphql/mutations/node_getter/interface.py @@ -5,16 +5,15 @@ from infrahub.core.branch import Branch from infrahub.core.node import Node -from infrahub.core.schema import NodeSchema +from infrahub.core.schema import MainSchemaTypes class MutationNodeGetterInterface(ABC): @abstractmethod async def get_node( self, - node_schema: NodeSchema, + node_schema: MainSchemaTypes, data: InputObjectType, branch: Branch, at: str, - ) -> Optional[Node]: - ... + ) -> Optional[Node]: ... diff --git a/backend/infrahub/graphql/mutations/proposed_change.py b/backend/infrahub/graphql/mutations/proposed_change.py index 53d81724a4..f0aa710257 100644 --- a/backend/infrahub/graphql/mutations/proposed_change.py +++ b/backend/infrahub/graphql/mutations/proposed_change.py @@ -3,9 +3,10 @@ from graphene import Boolean, InputObjectType, Mutation, String from graphql import GraphQLResolveInfo -from infrahub import config, lock +from infrahub import lock from infrahub.core.branch import Branch from infrahub.core.constants import CheckType, InfrahubKind, ProposedChangeState, ValidatorConclusion +from infrahub.core.diff.ipam_diff_parser import IpamDiffParser from infrahub.core.manager import NodeManager from infrahub.core.merge import BranchMerger from infrahub.core.migrations.schema.runner import schema_migrations_runner @@ -101,7 +102,7 @@ async def mutate_update( include_owner=True, include_source=True, ) - state = ProposedChangeState(obj.state.value) + state = ProposedChangeState(obj.state.value.value) state.validate_editability() updated_state = None @@ -123,7 +124,7 @@ async def mutate_update( validator_kind = validation.get_kind() if ( validator_kind != InfrahubKind.DATAVALIDATOR - and validation.conclusion.value != ValidatorConclusion.SUCCESS.value + and validation.conclusion.value.value != ValidatorConclusion.SUCCESS.value ): # Ignoring Data integrity checks as they are handled again later raise ValidationError("Unable to merge proposed change containing failing checks") @@ -135,7 +136,7 @@ async def mutate_update( "Data conflicts found on branch and missing decisions about what branch to keep" ) if check.conflicts.value: - keep_source_value = check.keep_branch.value == "source" + keep_source_value = check.keep_branch.value.value == "source" conflict_resolution[check.conflicts.value[0]["path"]] = keep_source_value async with lock.registry.global_graph_lock(): @@ -143,12 +144,21 @@ async def mutate_update( await merger.merge(conflict_resolution=conflict_resolution) await merger.update_schema() - if config.SETTINGS.broker.enable and context.background: + if context.background: log_data = get_log_data() request_id = log_data.get("request_id", "") + differ = await merger.get_graph_diff() + diff_parser = IpamDiffParser( + db=context.db, + differ=differ, + source_branch_name=obj.name, + target_branch_name=registry.default_branch, + ) + ipam_node_details = await diff_parser.get_changed_ipam_node_details() message = messages.EventBranchMerge( source_branch=source_branch.name, target_branch=registry.default_branch, + ipam_node_details=ipam_node_details, meta=Meta(initiator_id=WORKER_IDENTITY, request_id=request_id), ) context.background.add_task(services.send, message) @@ -193,7 +203,7 @@ async def mutate( proposed_change = await NodeManager.get_one_by_id_or_default_filter( id=identifier, schema_name=InfrahubKind.PROPOSEDCHANGE, db=context.db ) - state = ProposedChangeState(proposed_change.state.value) + state = ProposedChangeState(proposed_change.state.value.value) state.validate_state_check_run() destination_branch = proposed_change.destination_branch.value diff --git a/backend/infrahub/graphql/mutations/relationship.py b/backend/infrahub/graphql/mutations/relationship.py index e53edcd6f4..ef92ae723f 100644 --- a/backend/infrahub/graphql/mutations/relationship.py +++ b/backend/infrahub/graphql/mutations/relationship.py @@ -5,7 +5,7 @@ from graphene import Boolean, InputField, InputObjectType, List, Mutation, String from infrahub_sdk.utils import compare_lists -from infrahub.core.constants import RelationshipCardinality +from infrahub.core.constants import InfrahubKind, RelationshipCardinality from infrahub.core.manager import NodeManager from infrahub.core.query.relationship import ( RelationshipGetPeerQuery, @@ -20,12 +20,14 @@ if TYPE_CHECKING: from graphql import GraphQLResolveInfo + from infrahub.core.relationship import RelationshipManager + from .. import GraphqlContext # pylint: disable=unused-argument,too-many-branches -RELATIONSHIP_PEERS_TO_IGNORE = ["CoreNode"] +RELATIONSHIP_PEERS_TO_IGNORE = [InfrahubKind.NODE] class RelationshipNodesInput(InputObjectType): @@ -42,21 +44,22 @@ async def mutate( cls, root: dict, info: GraphQLResolveInfo, - data, + data: RelationshipNodesInput, ): context: GraphqlContext = info.context + input_id = str(data.id) if not ( source := await NodeManager.get_one( db=context.db, - id=data.get("id"), + id=input_id, branch=context.branch, at=context.at, include_owner=True, include_source=True, ) ): - raise NodeNotFoundError(context.branch, None, data.get("id")) + raise NodeNotFoundError(context.branch, None, input_id) # Check if the name of the relationship provided exist for this node and is of cardinality Many if data.get("name") not in source._schema.relationship_names: @@ -69,7 +72,7 @@ async def mutate( raise ValidationError({"name": f"'{data.get('name')}' must be a relationship of cardinality Many"}) # Query the node in the database and validate that all of them exist and are if the correct kind - node_ids: List[str] = [node_data.get("id") for node_data in data.get("nodes")] + node_ids: list[str] = [node_data.get("id") for node_data in data.get("nodes")] nodes = await NodeManager.get_many( db=context.db, ids=node_ids, fields={"display_label": None}, branch=context.branch, at=context.at ) @@ -85,6 +88,19 @@ async def mutate( if rel_schema.peer not in node.get_labels(): raise ValidationError(f"{node_id!r} {node.get_kind()!r} is not a valid peer for '{rel_schema.peer}'") + peer_relationships = [rel for rel in node._schema.relationships if rel.identifier == rel_schema.identifier] + if ( + rel_schema.identifier + and len(peer_relationships) == 1 + and peer_relationships[0].cardinality == RelationshipCardinality.ONE + ): + peer_relationship: RelationshipManager = getattr(node, peer_relationships[0].name) + if peer := await peer_relationship.get_peer(db=context.db): + if peer.id != input_id: + raise ValidationError( + f"{node_id!r} {node.get_kind()!r} is already related to another peer on '{peer_relationships[0].name}'" + ) + # The nodes that are already present in the db query = await RelationshipGetPeerQuery.init( db=context.db, @@ -128,7 +144,7 @@ async def mutate( cls, root: dict, info: GraphQLResolveInfo, - data, + data: RelationshipNodesInput, ): return await super().mutate(root=root, info=info, data=data) @@ -145,6 +161,6 @@ async def mutate( cls, root: dict, info: GraphQLResolveInfo, - data, + data: RelationshipNodesInput, ): return await super().mutate(root=root, info=info, data=data) diff --git a/backend/infrahub/graphql/mutations/repository.py b/backend/infrahub/graphql/mutations/repository.py index 3b4fc5d25a..fb5e832923 100644 --- a/backend/infrahub/graphql/mutations/repository.py +++ b/backend/infrahub/graphql/mutations/repository.py @@ -4,7 +4,7 @@ from graphene import InputObjectType, Mutation -from infrahub.core.constants import infrahubkind +from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager from infrahub.core.schema import NodeSchema from infrahub.log import get_logger @@ -54,7 +54,7 @@ async def mutate_create( authenticated_user = None if context.account_session and context.account_session.authenticated: authenticated_user = context.account_session.account_id - if obj.get_kind() == "CoreReadOnlyRepository": + if obj.get_kind() == InfrahubKind.READONLYREPOSITORY: message = messages.GitRepositoryAddReadOnly( repository_id=obj.id, repository_name=obj.name.value, @@ -101,7 +101,7 @@ async def mutate_update( include_owner=True, include_source=True, ) - if node.get_kind() != infrahubkind.READONLYREPOSITORY: + if node.get_kind() != InfrahubKind.READONLYREPOSITORY: return await super().mutate_update(root, info, data, branch, at, database=context.db, node=node) current_commit = node.commit.value diff --git a/backend/infrahub/graphql/mutations/schema.py b/backend/infrahub/graphql/mutations/schema.py index 63a46f4236..5f2c2a5a97 100644 --- a/backend/infrahub/graphql/mutations/schema.py +++ b/backend/infrahub/graphql/mutations/schema.py @@ -265,3 +265,4 @@ async def update_registry(kind: NodeSchema, branch: Branch, db: InfrahubDatabase meta=Meta(initiator_id=WORKER_IDENTITY), ) await services.send(message) + await services.service.component.refresh_schema_hash(branches=[branch.name]) diff --git a/backend/infrahub/graphql/parser.py b/backend/infrahub/graphql/parser.py new file mode 100644 index 0000000000..c158a54ae8 --- /dev/null +++ b/backend/infrahub/graphql/parser.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Optional + +from graphql.language import ( + DirectiveNode, + FieldNode, + InlineFragmentNode, + ListValueNode, + NameNode, + SelectionSetNode, + StringValueNode, +) +from infrahub_sdk.utils import deep_merge_dict + +if TYPE_CHECKING: + from infrahub.core.schema import NodeSchema + + +@dataclass +class FieldEnricher: + key: str + node: FieldNode + path: str + fields: dict = field(default_factory=dict) + + +async def extract_selection(field_node: FieldNode, schema: NodeSchema) -> dict: + graphql_extractor = GraphQLExtractor(field_node=field_node, schema=schema) + return await graphql_extractor.get_fields() + + +class GraphQLExtractor: + def __init__(self, field_node: FieldNode, schema: NodeSchema) -> None: + self.field_node = field_node + self.schema = schema + self.typename_paths: dict[str, list[FieldEnricher]] = {} + self.node_path: dict[str, list[FieldEnricher]] = {} + + def _define_node_path(self, path: str) -> None: + if path not in self.node_path: + self.node_path[path] = [] + + async def get_fields(self) -> dict: + return await self.extract_fields(selection_set=self.field_node.selection_set) or {} + + def _process_expand_directive(self, path: str, directive: DirectiveNode) -> None: + excluded_fields = [] + for argument in directive.arguments: + if argument.name.value == "exclude": + if isinstance(argument.value, ListValueNode): + excluded_fields.extend( + [value.value for value in argument.value.values if isinstance(value, StringValueNode)] + ) + + if path not in self.typename_paths: + self.typename_paths[path] = [] + self.typename_paths[path].append( + FieldEnricher( + key="__typename", + node=FieldNode( + kind="field", + name=NameNode(kind="name", value="__typename"), + directives=[], + arguments=[], + ), + path=f"{path}/__typename/", + ) + ) + if path == "/edges/node/": + self._define_node_path(path=path) + + self.node_path[path].append( + FieldEnricher( + key="id", + node=FieldNode( + kind="field", + name=NameNode(kind="name", value="id"), + directives=[], + arguments=[], + ), + path=f"{path}id/", + fields={"id": None}, + ) + ) + attribute_enrichers = [] + attributes = [attribute for attribute in self.schema.attributes if attribute.name not in excluded_fields] + for attribute in attributes: + attribute_path = f"{path}{attribute.name}/" + self._define_node_path(path=attribute_path) + field_attributes = {"value": None, "is_default": None, "is_from_profile": None} + + enrichers = [ + FieldEnricher( + key=attribute.name, + node=FieldNode( + kind="field", + name=NameNode( + kind="name", + value=key, + directives=[], + arguments=[], + ), + ), + path=attribute_path, + fields={key: None}, + ) + for key in field_attributes + ] + + self.node_path[attribute_path].extend(enrichers) + attribute_enrichers.append( + FieldNode( + kind="field", + name=NameNode(kind="name", value=attribute.name), + selection_set=SelectionSetNode(selections=tuple(enrichers)), + ) + ) + + self._define_node_path(path=path) + self.node_path[path].append( + FieldEnricher( + key="node", + path=path, + node=FieldNode( + kind="field", + name=NameNode(kind="name", value="node"), + selection_set=SelectionSetNode(selections=tuple(attribute_enrichers)), + ), + fields={attribute.name: field_attributes for attribute in self.schema.attributes}, + ) + ) + + def process_directives(self, node: FieldNode, path: str) -> None: + for directive in node.directives: + if directive.name.value == "expand": + self._process_expand_directive(path=path, directive=directive) + + def apply_directives(self, selection_set: SelectionSetNode, fields: dict, path: str) -> dict: + if path in self.typename_paths: + for node in self.typename_paths[path]: + if "__typename" not in fields: + selections = list(selection_set.selections) + selections.append(node.node) + selection_set.selections = tuple(selections) + + if path in self.node_path: + for node in self.node_path[path]: + if node.key not in fields: + fields = deep_merge_dict(dicta=fields.copy(), dictb=node.fields) + selections = list(selection_set.selections) + selections.append(node.node) + selection_set.selections = tuple(selections) + + undefined_paths = [key for key in self.node_path if is_child_path(path=path, child=key)] + + for undefined in undefined_paths: + for sub_node in self.node_path[undefined]: + selections = list(selection_set.selections) + selections.append( + FieldNode( + kind="field", + name=NameNode(kind="name", value=sub_node.key), + selection_set=SelectionSetNode(selections=tuple([sub_node.node])), + ) + ) + selection_set.selections = tuple(selections) + + del self.node_path[path] + + return fields + + async def extract_fields( + self, selection_set: Optional[SelectionSetNode], path: str = "/" + ) -> Optional[dict[str, Optional[dict]]]: + """Extract fields and apply Directives""" + if not selection_set: + return None + + fields: dict[str, Optional[dict]] = {} + for node in selection_set.selections: + sub_selection_set = getattr(node, "selection_set", None) + if isinstance(node, FieldNode): + node_path = f"{path}{node.name.value}/" + self.process_directives(node=node, path=node_path) + + value = await self.extract_fields(sub_selection_set, path=node_path) + if node.name.value not in fields: + fields[node.name.value] = value + elif isinstance(fields[node.name.value], dict) and isinstance(value, dict): + fields[node.name.value].update(value) # type: ignore[union-attr] + + elif isinstance(node, InlineFragmentNode): + for sub_node in node.selection_set.selections: + if isinstance(sub_node, FieldNode): + sub_node_path = f"{path}{sub_node.name.value}/" + sub_sub_selection_set = getattr(sub_node, "selection_set", None) + value = await self.extract_fields(sub_sub_selection_set, path=sub_node_path) + if sub_node.name.value not in fields: + fields[sub_node.name.value] = await self.extract_fields( + sub_sub_selection_set, path=sub_node_path + ) + elif isinstance(fields[sub_node.name.value], dict) and isinstance(value, dict): + fields[sub_node.name.value].update(value) # type: ignore[union-attr] + + return self.apply_directives(selection_set=selection_set, fields=fields, path=path) + + +def is_child_path(path: str, child: str) -> bool: + if child.startswith(path) and len(child) > len(path): + return True + return False diff --git a/backend/infrahub/graphql/queries/__init__.py b/backend/infrahub/graphql/queries/__init__.py index 077c3d5017..da0851182b 100644 --- a/backend/infrahub/graphql/queries/__init__.py +++ b/backend/infrahub/graphql/queries/__init__.py @@ -3,6 +3,7 @@ from .diff.old import DiffSummaryOld from .internal import InfrahubInfo from .relationship import Relationship +from .status import InfrahubStatus from .task import Task -__all__ = ["BranchQueryList", "DiffSummary", "DiffSummaryOld", "InfrahubInfo", "Relationship", "Task"] +__all__ = ["BranchQueryList", "DiffSummary", "DiffSummaryOld", "InfrahubInfo", "InfrahubStatus", "Relationship", "Task"] diff --git a/backend/infrahub/graphql/queries/diff/old.py b/backend/infrahub/graphql/queries/diff/old.py index 4bc41d1d58..54db0091db 100644 --- a/backend/infrahub/graphql/queries/diff/old.py +++ b/backend/infrahub/graphql/queries/diff/old.py @@ -55,4 +55,5 @@ async def get_summary( time_to=String(required=False), branch_only=Boolean(required=False, default_value=False), resolver=DiffSummaryEntryOld.resolve, + deprecation_reason="DiffSummaryOld will be removed in the next release, please use DiffSummary instead.", ) diff --git a/backend/infrahub/graphql/queries/status.py b/backend/infrahub/graphql/queries/status.py new file mode 100644 index 0000000000..5a01326266 --- /dev/null +++ b/backend/infrahub/graphql/queries/status.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from graphene import Boolean, Field, List, ObjectType, String +from infrahub_sdk.utils import extract_fields_first_node + +from infrahub.services import services + +if TYPE_CHECKING: + from graphql import GraphQLResolveInfo + + from infrahub.graphql import GraphqlContext + + +class StatusSummary(ObjectType): + schema_hash_synced = Field( + Boolean, required=True, description="Indicates if the schema hash is in sync on all active workers" + ) + + +class StatusWorker(ObjectType): + id = Field(String, required=True) + active = Field(Boolean, required=True) + schema_hash = Field(String, required=False) + + +class StatusWorkerEdge(ObjectType): + node = Field(StatusWorker, required=True) + + +class StatusWorkerEdges(ObjectType): + edges = Field(List(of_type=StatusWorkerEdge, required=True), required=True) + + +class Status(ObjectType): + summary = Field(StatusSummary, required=True) + workers = Field(StatusWorkerEdges, required=True) + + +async def resolve_status( + root: dict, # pylint: disable=unused-argument + info: GraphQLResolveInfo, +) -> dict: + context: GraphqlContext = info.context + service = context.service or services.service + fields = await extract_fields_first_node(info) + response: dict[str, Any] = {} + workers = await service.component.list_workers(branch=context.branch.name, schema_hash=True) + + if summary := fields.get("summary"): + response["summary"] = {} + if "schema_hash_synced" in summary: + hashes = {worker.schema_hash for worker in workers if worker.active} + response["summary"]["schema_hash_synced"] = len(hashes) == 1 + + if "workers" in fields: + response["workers"] = {} + response["workers"]["edges"] = [{"node": worker.to_dict()} for worker in workers] + + return response + + +InfrahubStatus = Field(Status, resolver=resolve_status) diff --git a/backend/infrahub/graphql/query.py b/backend/infrahub/graphql/query.py index 1ae418ff2a..a4db942453 100644 --- a/backend/infrahub/graphql/query.py +++ b/backend/infrahub/graphql/query.py @@ -4,10 +4,10 @@ from graphql import graphql -from infrahub.core import get_branch from infrahub.core.branch import Branch from infrahub.core.constants import InfrahubKind from infrahub.core.manager import NodeManager +from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.graphql import prepare_graphql_params @@ -27,7 +27,7 @@ async def execute_query( """Helper function to Execute a GraphQL Query.""" if not isinstance(branch, Branch): - branch = await get_branch(db=db, branch=branch) + branch = await registry.get_branch(db=db, branch=branch) at = Timestamp(at) graphql_query = await NodeManager.get_one_by_default_filter( diff --git a/backend/infrahub/graphql/schema.py b/backend/infrahub/graphql/schema.py index 63e738b7be..859fdd5d1e 100644 --- a/backend/infrahub/graphql/schema.py +++ b/backend/infrahub/graphql/schema.py @@ -28,10 +28,11 @@ TaskCreate, TaskUpdate, ) -from .queries import BranchQueryList, DiffSummary, DiffSummaryOld, InfrahubInfo, Relationship, Task +from .parser import extract_selection +from .queries import BranchQueryList, DiffSummary, DiffSummaryOld, InfrahubInfo, InfrahubStatus, Relationship, Task if TYPE_CHECKING: - from graphql import GraphQLResolveInfo # pylint: disable=no-name-in-module + from graphql import GraphQLResolveInfo from . import GraphqlContext @@ -40,7 +41,8 @@ async def default_paginated_list_resolver(root: dict, info: GraphQLResolveInfo, **kwargs): - fields = await extract_fields(info.field_nodes[0].selection_set) + fields = await extract_selection(info.field_nodes[0], schema=info.return_type.graphene_type._meta.schema) + return await info.return_type.graphene_type.get_paginated_list(**kwargs, fields=fields, context=info.context) @@ -71,6 +73,7 @@ class InfrahubBaseQuery(ObjectType): Relationship = Relationship InfrahubInfo = InfrahubInfo + InfrahubStatus = InfrahubStatus InfrahubTask = Task diff --git a/backend/infrahub/graphql/types/attribute.py b/backend/infrahub/graphql/types/attribute.py index cb4303fbd2..2694d97bec 100644 --- a/backend/infrahub/graphql/types/attribute.py +++ b/backend/infrahub/graphql/types/attribute.py @@ -17,6 +17,7 @@ class RelatedNodeInput(InputObjectType): class AttributeInterface(InfrahubInterface): + is_default = Field(Boolean) is_inherited = Field(Boolean) is_protected = Field(Boolean) is_visible = Field(Boolean) @@ -29,6 +30,7 @@ class AttributeInterface(InfrahubInterface): class BaseAttribute(ObjectType): id = Field(String) + is_from_profile = Field(Boolean) @classmethod def __init_subclass__(cls, **kwargs): @@ -64,7 +66,7 @@ class IPHostType(BaseAttribute): ip = Field(String) hostmask = Field(String) netmask = Field(String) - prefixlen = Field(String) + prefixlen = Field(Int) version = Field(Int) with_hostmask = Field(String) with_netmask = Field(String) @@ -80,7 +82,7 @@ class IPNetworkType(BaseAttribute): broadcast_address = Field(String) hostmask = Field(String) netmask = Field(String) - prefixlen = Field(String) + prefixlen = Field(Int) num_addresses = Field(Int) version = Field(Int) with_hostmask = Field(String) diff --git a/backend/infrahub/graphql/types/node.py b/backend/infrahub/graphql/types/node.py index 5673b83615..9b521e7d86 100644 --- a/backend/infrahub/graphql/types/node.py +++ b/backend/infrahub/graphql/types/node.py @@ -1,9 +1,11 @@ from __future__ import annotations +from typing import Optional, Union + from graphene import ObjectType from graphene.types.objecttype import ObjectTypeOptions -from infrahub.core.schema import GenericSchema, NodeSchema +from infrahub.core.schema import GenericSchema, NodeSchema, ProfileSchema from .mixin import GetListMixin @@ -14,8 +16,14 @@ class InfrahubObjectOptions(ObjectTypeOptions): class InfrahubObject(ObjectType, GetListMixin): @classmethod - def __init_subclass_with_meta__(cls, schema: NodeSchema = None, interfaces=(), _meta=None, **options): # pylint: disable=arguments-differ - if not isinstance(schema, (NodeSchema, GenericSchema)): + def __init_subclass_with_meta__( + cls, + schema: Optional[Union[NodeSchema, GenericSchema, ProfileSchema]] = None, + interfaces=(), + _meta=None, + **options, + ): # pylint: disable=arguments-differ + if not isinstance(schema, (NodeSchema, GenericSchema, ProfileSchema)): raise ValueError(f"You need to pass a valid NodeSchema in '{cls.__name__}.Meta', received '{schema}'") if not _meta: diff --git a/backend/infrahub/message_bus/messages/__init__.py b/backend/infrahub/message_bus/messages/__init__.py index 1edc434565..13124a8eca 100644 --- a/backend/infrahub/message_bus/messages/__init__.py +++ b/backend/infrahub/message_bus/messages/__init__.py @@ -3,6 +3,7 @@ from infrahub.message_bus import InfrahubMessage, InfrahubResponse from .check_artifact_create import CheckArtifactCreate +from .check_generator_run import CheckGeneratorRun from .check_repository_checkdefinition import CheckRepositoryCheckDefinition from .check_repository_mergeconflicts import CheckRepositoryMergeConflicts from .check_repository_usercheck import CheckRepositoryUserCheck @@ -27,6 +28,9 @@ from .request_artifact_generate import RequestArtifactGenerate from .request_artifactdefinition_check import RequestArtifactDefinitionCheck from .request_artifactdefinition_generate import RequestArtifactDefinitionGenerate +from .request_generator_run import RequestGeneratorRun +from .request_generatordefinition_check import RequestGeneratorDefinitionCheck +from .request_generatordefinition_run import RequestGeneratorDefinitionRun from .request_git_createbranch import RequestGitCreateBranch from .request_git_sync import RequestGitSync from .request_graphqlquerygroup_update import RequestGraphQLQueryGroupUpdate @@ -35,6 +39,7 @@ from .request_proposedchange_pipeline import RequestProposedChangePipeline from .request_proposedchange_refreshartifacts import RequestProposedChangeRefreshArtifacts from .request_proposedchange_repositorychecks import RequestProposedChangeRepositoryChecks +from .request_proposedchange_rungenerators import RequestProposedChangeRunGenerators from .request_proposedchange_runtests import RequestProposedChangeRunTests from .request_proposedchange_schemaintegrity import RequestProposedChangeSchemaIntegrity from .request_repository_checks import RequestRepositoryChecks @@ -46,11 +51,14 @@ from .transform_jinja_template import TransformJinjaTemplate, TransformJinjaTemplateResponse from .transform_python_data import TransformPythonData, TransformPythonDataResponse from .trigger_artifact_definition_generate import TriggerArtifactDefinitionGenerate +from .trigger_generatordefinition_run import TriggerGeneratorDefinitionRun +from .trigger_ipam_reconciliation import TriggerIpamReconciliation from .trigger_proposed_change_cancel import TriggerProposedChangeCancel from .trigger_webhook_actions import TriggerWebhookActions MESSAGE_MAP: Dict[str, Type[InfrahubMessage]] = { "check.artifact.create": CheckArtifactCreate, + "check.generator.run": CheckGeneratorRun, "check.repository.check_definition": CheckRepositoryCheckDefinition, "check.repository.merge_conflicts": CheckRepositoryMergeConflicts, "check.repository.user_check": CheckRepositoryUserCheck, @@ -77,6 +85,9 @@ "request.artifact.generate": RequestArtifactGenerate, "request.artifact_definition.check": RequestArtifactDefinitionCheck, "request.artifact_definition.generate": RequestArtifactDefinitionGenerate, + "request.generator.run": RequestGeneratorRun, + "request.generator_definition.check": RequestGeneratorDefinitionCheck, + "request.generator_definition.run": RequestGeneratorDefinitionRun, "request.git.create_branch": RequestGitCreateBranch, "request.git.sync": RequestGitSync, "request.graphql_query_group.update": RequestGraphQLQueryGroupUpdate, @@ -85,6 +96,7 @@ "request.proposed_change.pipeline": RequestProposedChangePipeline, "request.proposed_change.refresh_artifacts": RequestProposedChangeRefreshArtifacts, "request.proposed_change.repository_checks": RequestProposedChangeRepositoryChecks, + "request.proposed_change.run_generators": RequestProposedChangeRunGenerators, "request.proposed_change.schema_integrity": RequestProposedChangeSchemaIntegrity, "request.proposed_change.run_tests": RequestProposedChangeRunTests, "request.repository.checks": RequestRepositoryChecks, @@ -94,6 +106,8 @@ "transform.jinja.template": TransformJinjaTemplate, "transform.python.data": TransformPythonData, "trigger.artifact_definition.generate": TriggerArtifactDefinitionGenerate, + "trigger.generator_definition.run": TriggerGeneratorDefinitionRun, + "trigger.ipam.reconciliation": TriggerIpamReconciliation, "trigger.proposed_change.cancel": TriggerProposedChangeCancel, "trigger.webhook.actions": TriggerWebhookActions, } @@ -108,26 +122,26 @@ "schema.validator.path": SchemaValidatorPathResponse, } +PRIORITY_MAP = { + "check.artifact.create": 2, + "check.repository.check_definition": 2, + "check.repository.merge_conflicts": 2, + "event.branch.create": 5, + "event.branch.delete": 5, + "event.branch.merge": 5, + "event.schema.update": 5, + "git.diff.names_only": 4, + "git.file.get": 4, + "request.artifact.generate": 2, + "request.git.sync": 4, + "request.proposed_change.pipeline": 5, + "request.proposed_change.repository_checks": 5, + "transform.jinja.template": 4, + "transform.python.data": 4, +} -def message_priority(routing_key: str) -> int: - PRIORITY_MAP = { - "check.artifact.create": 2, - "check.repository.check_definition": 2, - "check.repository.merge_conflicts": 2, - "event.branch.create": 5, - "event.branch.delete": 5, - "event.branch.merge": 5, - "event.schema.update": 5, - "git.diff.names_only": 4, - "git.file.get": 4, - "request.artifact.generate": 2, - "request.git.sync": 4, - "request.proposed_change.pipeline": 5, - "request.proposed_change.repository_checks": 5, - "transform.jinja.template": 4, - "transform.python.data": 4, - } +def message_priority(routing_key: str) -> int: return PRIORITY_MAP.get(routing_key, 3) diff --git a/backend/infrahub/message_bus/messages/check_generator_run.py b/backend/infrahub/message_bus/messages/check_generator_run.py new file mode 100644 index 0000000000..7c8e3f7e77 --- /dev/null +++ b/backend/infrahub/message_bus/messages/check_generator_run.py @@ -0,0 +1,25 @@ +from typing import Optional + +from pydantic import Field + +from infrahub.message_bus import InfrahubMessage +from infrahub.message_bus.types import ProposedChangeGeneratorDefinition + + +class CheckGeneratorRun(InfrahubMessage): + """A check that runs a generator.""" + + generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator definition") + generator_instance: Optional[str] = Field( + default=None, description="The id of the generator instance if it previously existed" + ) + commit: str = Field(..., description="The commit to target") + repository_id: str = Field(..., description="The unique ID of the Repository") + repository_name: str = Field(..., description="The name of the Repository") + repository_kind: str = Field(..., description="The kind of the Repository") + branch_name: str = Field(..., description="The branch where the check is run") + target_id: str = Field(..., description="The ID of the target object for this generator") + target_name: str = Field(..., description="Name of the generator target") + query: str = Field(..., description="The name of the query to use when collecting data") + variables: dict = Field(..., description="Input variables when running the generator") + validator_id: str = Field(..., description="The ID of the validator") diff --git a/backend/infrahub/message_bus/messages/event_branch_merge.py b/backend/infrahub/message_bus/messages/event_branch_merge.py index b71d584d63..c438340b8c 100644 --- a/backend/infrahub/message_bus/messages/event_branch_merge.py +++ b/backend/infrahub/message_bus/messages/event_branch_merge.py @@ -1,5 +1,6 @@ from pydantic import Field +from infrahub.core.ipam.model import IpamNodeDetails from infrahub.message_bus import InfrahubMessage @@ -8,3 +9,4 @@ class EventBranchMerge(InfrahubMessage): source_branch: str = Field(..., description="The source branch") target_branch: str = Field(..., description="The target branch") + ipam_node_details: list[IpamNodeDetails] = Field(default_factory=list, description="Details for changed IP nodes") diff --git a/backend/infrahub/message_bus/messages/event_branch_rebased.py b/backend/infrahub/message_bus/messages/event_branch_rebased.py index 604d9d9bf6..04335ace44 100644 --- a/backend/infrahub/message_bus/messages/event_branch_rebased.py +++ b/backend/infrahub/message_bus/messages/event_branch_rebased.py @@ -1,5 +1,6 @@ from pydantic import Field +from infrahub.core.ipam.model import IpamNodeDetails from infrahub.message_bus import InfrahubMessage @@ -7,3 +8,4 @@ class EventBranchRebased(InfrahubMessage): """Sent when a branch has been rebased.""" branch: str = Field(..., description="The branch that was rebased") + ipam_node_details: list[IpamNodeDetails] = Field(default_factory=list, description="Details for changed IP nodes") diff --git a/backend/infrahub/message_bus/messages/event_schema_update.py b/backend/infrahub/message_bus/messages/event_schema_update.py index 8d92281529..70ad799a82 100644 --- a/backend/infrahub/message_bus/messages/event_schema_update.py +++ b/backend/infrahub/message_bus/messages/event_schema_update.py @@ -6,4 +6,4 @@ class EventSchemaUpdate(InfrahubMessage): """Sent when the schema on a branch has been updated.""" - branch: str = Field(..., description="The branch where the update occured") + branch: str = Field(..., description="The branch where the update occurred") diff --git a/backend/infrahub/message_bus/messages/request_generator_run.py b/backend/infrahub/message_bus/messages/request_generator_run.py new file mode 100644 index 0000000000..e76d736bfe --- /dev/null +++ b/backend/infrahub/message_bus/messages/request_generator_run.py @@ -0,0 +1,24 @@ +from typing import Optional + +from pydantic import Field + +from infrahub.message_bus import InfrahubMessage +from infrahub.message_bus.types import ProposedChangeGeneratorDefinition + + +class RequestGeneratorRun(InfrahubMessage): + """Runs a generator.""" + + generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator definition") + generator_instance: Optional[str] = Field( + default=None, description="The id of the generator instance if it previously existed" + ) + commit: str = Field(..., description="The commit to target") + repository_id: str = Field(..., description="The unique ID of the Repository") + repository_name: str = Field(..., description="The name of the Repository") + repository_kind: str = Field(..., description="The kind of the Repository") + branch_name: str = Field(..., description="The branch where the check is run") + target_id: str = Field(..., description="The ID of the target object for this generator") + target_name: str = Field(..., description="Name of the generator target") + query: str = Field(..., description="The name of the query to use when collecting data") + variables: dict = Field(..., description="Input variables when running the generator") diff --git a/backend/infrahub/message_bus/messages/request_generatordefinition_check.py b/backend/infrahub/message_bus/messages/request_generatordefinition_check.py new file mode 100644 index 0000000000..c2231aa7cf --- /dev/null +++ b/backend/infrahub/message_bus/messages/request_generatordefinition_check.py @@ -0,0 +1,17 @@ +from pydantic import ConfigDict, Field + +from infrahub.message_bus import InfrahubMessage +from infrahub.message_bus.types import ProposedChangeBranchDiff, ProposedChangeGeneratorDefinition + + +class RequestGeneratorDefinitionCheck(InfrahubMessage): + """Sent to trigger Generators to run for a proposed change.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator Definition") + branch_diff: ProposedChangeBranchDiff = Field(..., description="The calculated diff between the two branches") + proposed_change: str = Field(..., description="The unique ID of the Proposed Change") + source_branch: str = Field(..., description="The source branch") + source_branch_sync_with_git: bool = Field(..., description="Indicates if the source branch should sync with git") + destination_branch: str = Field(..., description="The target branch") diff --git a/backend/infrahub/message_bus/messages/request_generatordefinition_run.py b/backend/infrahub/message_bus/messages/request_generatordefinition_run.py new file mode 100644 index 0000000000..a2fe329723 --- /dev/null +++ b/backend/infrahub/message_bus/messages/request_generatordefinition_run.py @@ -0,0 +1,13 @@ +from pydantic import ConfigDict, Field + +from infrahub.message_bus import InfrahubMessage +from infrahub.message_bus.types import ProposedChangeGeneratorDefinition + + +class RequestGeneratorDefinitionRun(InfrahubMessage): + """Sent to trigger a Generator to run on a specific branch.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator Definition") + branch: str = Field(..., description="The branch to target") diff --git a/backend/infrahub/message_bus/messages/request_proposedchange_pipeline.py b/backend/infrahub/message_bus/messages/request_proposedchange_pipeline.py index ed187f5ba1..cb71624699 100644 --- a/backend/infrahub/message_bus/messages/request_proposedchange_pipeline.py +++ b/backend/infrahub/message_bus/messages/request_proposedchange_pipeline.py @@ -7,7 +7,7 @@ class RequestProposedChangePipeline(InfrahubMessage): """Sent request the start of a pipeline connected to a proposed change.""" - proposed_change: str = Field(..., description="The unique ID of the proposed phange") + proposed_change: str = Field(..., description="The unique ID of the proposed change") source_branch: str = Field(..., description="The source branch of the proposed change") source_branch_sync_with_git: bool = Field(..., description="Indicates if the source branch should sync with git") destination_branch: str = Field(..., description="The destination branch of the proposed change") diff --git a/backend/infrahub/message_bus/messages/request_proposedchange_rungenerators.py b/backend/infrahub/message_bus/messages/request_proposedchange_rungenerators.py new file mode 100644 index 0000000000..d7dd55a199 --- /dev/null +++ b/backend/infrahub/message_bus/messages/request_proposedchange_rungenerators.py @@ -0,0 +1,16 @@ +from pydantic import ConfigDict, Field + +from infrahub.message_bus import InfrahubMessage +from infrahub.message_bus.types import ProposedChangeBranchDiff + + +class RequestProposedChangeRunGenerators(InfrahubMessage): + """Sent trigger the generators that are impacted by the proposed change to run.""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + + proposed_change: str = Field(..., description="The unique ID of the Proposed Change") + source_branch: str = Field(..., description="The source branch of the proposed change") + source_branch_sync_with_git: bool = Field(..., description="Indicates if the source branch should sync with git") + destination_branch: str = Field(..., description="The destination branch of the proposed change") + branch_diff: ProposedChangeBranchDiff = Field(..., description="The calculated diff between the two branches") diff --git a/backend/infrahub/message_bus/messages/schema_migration_path.py b/backend/infrahub/message_bus/messages/schema_migration_path.py index b78d37f97e..ac287df147 100644 --- a/backend/infrahub/message_bus/messages/schema_migration_path.py +++ b/backend/infrahub/message_bus/messages/schema_migration_path.py @@ -1,12 +1,12 @@ from __future__ import annotations -from typing import List, Optional, Union +from typing import List, Optional from pydantic import Field from infrahub.core.branch import Branch # noqa: TCH001 from infrahub.core.path import SchemaPath # noqa: TCH001 -from infrahub.core.schema import GenericSchema, NodeSchema # noqa: TCH001 +from infrahub.core.schema import MainSchemaTypes # noqa: TCH001 from infrahub.message_bus import InfrahubMessage, InfrahubResponse, InfrahubResponseData ROUTING_KEY = "schema.migration.path" @@ -15,10 +15,8 @@ class SchemaMigrationPath(InfrahubMessage): branch: Branch = Field(..., description="The name of the branch to target") migration_name: str = Field(..., description="The name of the migration to run") - new_node_schema: Optional[Union[NodeSchema, GenericSchema]] = Field( - None, description="new Schema of Node or Generic to process" - ) - previous_node_schema: Optional[Union[NodeSchema, GenericSchema]] = Field( + new_node_schema: Optional[MainSchemaTypes] = Field(None, description="new Schema of Node or Generic to process") + previous_node_schema: Optional[MainSchemaTypes] = Field( None, description="Previous Schema of Node or Generic to process" ) schema_path: SchemaPath = Field(..., description="SchemaPath to the element of the schema to migrate") diff --git a/backend/infrahub/message_bus/messages/trigger_generatordefinition_run.py b/backend/infrahub/message_bus/messages/trigger_generatordefinition_run.py new file mode 100644 index 0000000000..59088416ae --- /dev/null +++ b/backend/infrahub/message_bus/messages/trigger_generatordefinition_run.py @@ -0,0 +1,9 @@ +from pydantic import Field + +from infrahub.message_bus import InfrahubMessage + + +class TriggerGeneratorDefinitionRun(InfrahubMessage): + """Triggers all Generators to run on the desired branch.""" + + branch: str = Field(..., description="The branch to run the Generators in") diff --git a/backend/infrahub/message_bus/messages/trigger_ipam_reconciliation.py b/backend/infrahub/message_bus/messages/trigger_ipam_reconciliation.py new file mode 100644 index 0000000000..af2cfd9368 --- /dev/null +++ b/backend/infrahub/message_bus/messages/trigger_ipam_reconciliation.py @@ -0,0 +1,11 @@ +from pydantic import Field + +from infrahub.core.ipam.model import IpamNodeDetails +from infrahub.message_bus import InfrahubMessage + + +class TriggerIpamReconciliation(InfrahubMessage): + """Sent after a branch has been merged/rebased to reconcile changed IP Prefix and Address nodes""" + + branch: str = Field(..., description="The updated branch") + ipam_node_details: list[IpamNodeDetails] = Field(..., description="Details for changed IP nodes") diff --git a/backend/infrahub/message_bus/operations/__init__.py b/backend/infrahub/message_bus/operations/__init__.py index 2dd0e4e963..6b4f0ed155 100644 --- a/backend/infrahub/message_bus/operations/__init__.py +++ b/backend/infrahub/message_bus/operations/__init__.py @@ -1,4 +1,4 @@ -import json +import ujson from infrahub.message_bus import RPCErrorResponse, messages from infrahub.message_bus.operations import ( @@ -19,6 +19,7 @@ COMMAND_MAP = { "check.artifact.create": check.artifact.create, + "check.generator.run": check.generator.run, "check.repository.check_definition": check.repository.check_definition, "check.repository.merge_conflicts": check.repository.merge_conflicts, "check.repository.user_check": check.repository.user_check, @@ -40,6 +41,9 @@ "refresh.registry.branches": refresh.registry.branches, "refresh.registry.rebased_branch": refresh.registry.rebased_branch, "refresh.webhook.configuration": refresh.webhook.configuration, + "request.generator.run": requests.generator.run, + "request.generator_definition.check": requests.generator_definition.check, + "request.generator_definition.run": requests.generator_definition.run, "request.git.create_branch": requests.git.create_branch, "request.git.sync": requests.git.sync, "request.graphql_query_group.update": requests.graphql_query_group.update, @@ -51,6 +55,7 @@ "request.proposed_change.pipeline": requests.proposed_change.pipeline, "request.proposed_change.refresh_artifacts": requests.proposed_change.refresh_artifacts, "request.proposed_change.repository_checks": requests.proposed_change.repository_checks, + "request.proposed_change.run_generators": requests.proposed_change.run_generators, "request.proposed_change.run_tests": requests.proposed_change.run_tests, "request.proposed_change.schema_integrity": requests.proposed_change.schema_integrity, "request.repository.checks": requests.repository.checks, @@ -62,13 +67,15 @@ "transform.jinja.template": transform.jinja.template, "transform.python.data": transform.python.data, "trigger.artifact_definition.generate": trigger.artifact_definition.generate, + "trigger.generator_definition.run": trigger.generator_definition.run, + "trigger.ipam.reconciliation": trigger.ipam.reconciliation, "trigger.proposed_change.cancel": trigger.proposed_change.cancel, "trigger.webhook.actions": trigger.webhook.actions, } async def execute_message(routing_key: str, message_body: bytes, service: InfrahubServices): - message_data = json.loads(message_body) + message_data = ujson.loads(message_body) message = messages.MESSAGE_MAP[routing_key](**message_data) message.set_log_data(routing_key=routing_key) try: diff --git a/backend/infrahub/message_bus/operations/check/__init__.py b/backend/infrahub/message_bus/operations/check/__init__.py index 127e46d80b..885cf55c5e 100644 --- a/backend/infrahub/message_bus/operations/check/__init__.py +++ b/backend/infrahub/message_bus/operations/check/__init__.py @@ -1,3 +1,3 @@ -from . import artifact, repository +from . import artifact, generator, repository -__all__ = ["artifact", "repository"] +__all__ = ["artifact", "generator", "repository"] diff --git a/backend/infrahub/message_bus/operations/check/generator.py b/backend/infrahub/message_bus/operations/check/generator.py new file mode 100644 index 0000000000..1731f0ecd6 --- /dev/null +++ b/backend/infrahub/message_bus/operations/check/generator.py @@ -0,0 +1,138 @@ +import os + +from infrahub_sdk import InfrahubNode +from infrahub_sdk.exceptions import ModuleImportError +from infrahub_sdk.schema import InfrahubGeneratorDefinitionConfig + +from infrahub import lock +from infrahub.core.constants import GeneratorInstanceStatus, InfrahubKind, ValidatorConclusion +from infrahub.core.timestamp import Timestamp +from infrahub.git.repository import extract_repo_file_information, get_initialized_repo +from infrahub.message_bus import messages +from infrahub.services import InfrahubServices +from infrahub.tasks.check import set_check_status + +# pylint: disable=duplicate-code + + +async def run(message: messages.CheckGeneratorRun, service: InfrahubServices): + repository = await get_initialized_repo( + repository_id=message.repository_id, + name=message.repository_name, + service=service, + repository_kind=message.repository_kind, + ) + + conclusion = ValidatorConclusion.SUCCESS + + generator_definition = InfrahubGeneratorDefinitionConfig( + name=message.generator_definition.definition_name, + class_name=message.generator_definition.class_name, + file_path=message.generator_definition.file_path, + query=message.generator_definition.query_name, + targets=message.generator_definition.group_id, + convert_query_response=message.generator_definition.convert_query_response, + ) + + commit_worktree = repository.get_commit_worktree(commit=message.commit) + + file_info = extract_repo_file_information( + full_filename=os.path.join(commit_worktree.directory, generator_definition.file_path.as_posix()), + repo_directory=repository.directory_root, + worktree_directory=commit_worktree.directory, + ) + generator_instance = await _define_instance(message=message, service=service) + + check_message = "Instance successfully generated" + try: + generator_class = generator_definition.load_class( + import_root=repository.directory_root, relative_path=file_info.relative_repo_path_dir + ) + + generator = generator_class( + query=generator_definition.query, + client=service.client, + branch=message.branch_name, + params=message.variables, + generator_instance=generator_instance.id, + convert_query_response=generator_definition.convert_query_response, + infrahub_node=InfrahubNode, + ) + await generator.run(identifier=generator_definition.name) + generator_instance.status.value = GeneratorInstanceStatus.READY.value + except ModuleImportError as exc: + conclusion = ValidatorConclusion.FAILURE + generator_instance.status.value = GeneratorInstanceStatus.ERROR.value + check_message = f"Failed to import generator: {exc.message}" + except Exception as exc: # pylint: disable=broad-exception-caught + conclusion = ValidatorConclusion.FAILURE + generator_instance.status.value = GeneratorInstanceStatus.ERROR.value + check_message = f"Failed to execute generator: {str(exc)}" + + await generator_instance.update(do_full_update=True) + + check = None + existing_check = await service.client.filters( + kind=InfrahubKind.GENERATORCHECK, validator__ids=message.validator_id, instance__value=generator_instance.id + ) + if existing_check: + check = existing_check[0] + + if check: + check.created_at.value = Timestamp().to_string() + check.conclusion.value = conclusion.value + await check.save() + else: + check = await service.client.create( + kind=InfrahubKind.GENERATORCHECK, + data={ + "name": message.target_name, + "origin": message.repository_id, + "kind": "GeneratorDefinition", + "validator": message.validator_id, + "created_at": Timestamp().to_string(), + "message": check_message, + "conclusion": conclusion.value, + "instance": generator_instance.id, + }, + ) + await check.save() + + await set_check_status(message=message, conclusion=conclusion.value, service=service) + + +async def _define_instance(message: messages.CheckGeneratorRun, service: InfrahubServices) -> InfrahubNode: + if message.generator_instance: + instance = await service.client.get( + kind=InfrahubKind.GENERATORINSTANCE, id=message.generator_instance, branch=message.branch_name + ) + instance.status.value = GeneratorInstanceStatus.PENDING.value + await instance.update(do_full_update=True) + + else: + async with lock.registry.get( + f"{message.target_id}-{message.generator_definition.definition_id}", namespace="generator" + ): + instances = await service.client.filters( + kind=InfrahubKind.GENERATORINSTANCE, + definition__ids=[message.generator_definition.definition_id], + object__ids=[message.target_id], + branch=message.branch_name, + ) + if instances: + instance = instances[0] + instance.status.value = GeneratorInstanceStatus.PENDING.value + await instance.update(do_full_update=True) + else: + instance = await service.client.create( + kind=InfrahubKind.GENERATORINSTANCE, + branch=message.branch_name, + data={ + "name": f"{message.generator_definition.definition_name}: {message.target_name}", + "status": GeneratorInstanceStatus.PENDING.value, + "object": message.target_id, + "definition": message.generator_definition.definition_id, + }, + ) + await instance.save() + return instance diff --git a/backend/infrahub/message_bus/operations/check/repository.py b/backend/infrahub/message_bus/operations/check/repository.py index cca3b234a8..ef747bcaae 100644 --- a/backend/infrahub/message_bus/operations/check/repository.py +++ b/backend/infrahub/message_bus/operations/check/repository.py @@ -4,6 +4,7 @@ from infrahub import lock from infrahub.core.constants import InfrahubKind +from infrahub.core.manager import NodeManager from infrahub.core.timestamp import Timestamp from infrahub.exceptions import CheckError from infrahub.git.repository import InfrahubRepository @@ -207,8 +208,7 @@ async def merge_conflicts(message: messages.CheckRepositoryMergeConflicts, servi ) await check.save() - for previous_result in existing_checks.values(): - await previous_result.delete() + await NodeManager.delete(db=service.database, nodes=list(existing_checks.values())) await service.cache.set( key=f"validator_execution_id:{message.validator_execution_id}:check_execution_id:{message.check_execution_id}", diff --git a/backend/infrahub/message_bus/operations/event/branch.py b/backend/infrahub/message_bus/operations/event/branch.py index b156e55efc..9996c85281 100644 --- a/backend/infrahub/message_bus/operations/event/branch.py +++ b/backend/infrahub/message_bus/operations/event/branch.py @@ -37,7 +37,9 @@ async def merge(message: messages.EventBranchMerge, service: InfrahubServices) - events: List[InfrahubMessage] = [ messages.RefreshRegistryBranches(), + messages.TriggerIpamReconciliation(branch=message.target_branch, ipam_node_details=message.ipam_node_details), messages.TriggerArtifactDefinitionGenerate(branch=message.target_branch), + messages.TriggerGeneratorDefinitionRun(branch=message.target_branch), ] for event in events: @@ -51,6 +53,10 @@ async def rebased(message: messages.EventBranchRebased, service: InfrahubService events: List[InfrahubMessage] = [ messages.RefreshRegistryRebasedBranch(branch=message.branch), ] + if message.ipam_node_details: + events.append( + messages.TriggerIpamReconciliation(branch=message.branch, ipam_node_details=message.ipam_node_details), + ) for event in events: event.assign_meta(parent=message) diff --git a/backend/infrahub/message_bus/operations/git/repository.py b/backend/infrahub/message_bus/operations/git/repository.py index 9cfd13a8a4..cc825722e1 100644 --- a/backend/infrahub/message_bus/operations/git/repository.py +++ b/backend/infrahub/message_bus/operations/git/repository.py @@ -65,32 +65,37 @@ async def pull_read_only(message: messages.GitRepositoryPullReadOnly, service: I ref=message.ref, commit=message.commit, ) - async with lock.registry.get(name=message.repository_name, namespace="repository"): - init_failed = False - try: - repo = await InfrahubReadOnlyRepository.init( - id=message.repository_id, - name=message.repository_name, - location=message.location, - client=service.client, - ref=message.ref, - infrahub_branch_name=message.infrahub_branch_name, - ) - except RepositoryError: - init_failed = True + async with service.task_report( + related_node=message.repository_id, title="Pulling read-only repository" + ) as task_report: + async with lock.registry.get(name=message.repository_name, namespace="repository"): + init_failed = False + try: + repo = await InfrahubReadOnlyRepository.init( + id=message.repository_id, + name=message.repository_name, + location=message.location, + client=service.client, + ref=message.ref, + infrahub_branch_name=message.infrahub_branch_name, + task_report=task_report, + ) + except RepositoryError: + init_failed = True - if init_failed: - repo = await InfrahubReadOnlyRepository.new( - id=message.repository_id, - name=message.repository_name, - location=message.location, - client=service.client, - ref=message.ref, - infrahub_branch_name=message.infrahub_branch_name, - ) + if init_failed: + repo = await InfrahubReadOnlyRepository.new( + id=message.repository_id, + name=message.repository_name, + location=message.location, + client=service.client, + ref=message.ref, + infrahub_branch_name=message.infrahub_branch_name, + task_report=task_report, + ) - await repo.import_objects_from_files(branch_name=message.infrahub_branch_name, commit=message.commit) - await repo.sync_from_remote(commit=message.commit) + await repo.import_objects_from_files(branch_name=message.infrahub_branch_name, commit=message.commit) + await repo.sync_from_remote(commit=message.commit) async def merge(message: messages.GitRepositoryMerge, service: InfrahubServices) -> None: diff --git a/backend/infrahub/message_bus/operations/refresh/registry.py b/backend/infrahub/message_bus/operations/refresh/registry.py index 5e32690a44..d1cc15f6e4 100644 --- a/backend/infrahub/message_bus/operations/refresh/registry.py +++ b/backend/infrahub/message_bus/operations/refresh/registry.py @@ -14,6 +14,8 @@ async def branches(message: messages.RefreshRegistryBranches, service: InfrahubS async with service.database.start_session() as db: await refresh_branches(db=db) + await service.component.refresh_schema_hash() + async def rebased_branch(message: messages.RefreshRegistryRebasedBranch, service: InfrahubServices) -> None: if message.meta and message.meta.initiator_id == WORKER_IDENTITY: diff --git a/backend/infrahub/message_bus/operations/refresh/webhook.py b/backend/infrahub/message_bus/operations/refresh/webhook.py index a081712550..8d07a6e79d 100644 --- a/backend/infrahub/message_bus/operations/refresh/webhook.py +++ b/backend/infrahub/message_bus/operations/refresh/webhook.py @@ -1,4 +1,4 @@ -import json +import ujson from infrahub.core.constants import InfrahubKind from infrahub.message_bus import messages @@ -25,7 +25,7 @@ async def configuration( "validate_certificates": webhook.validate_certificates.value, }, } - await service.cache.set(key=webhook_key, value=json.dumps(payload)) + await service.cache.set(key=webhook_key, value=ujson.dumps(payload)) for webhook in custom_webhooks: webhook_key = f"webhook:active:{webhook.id}" @@ -39,7 +39,7 @@ async def configuration( } if webhook.transformation.id: transform = await service.client.get( - kind="CoreTransformPython", + kind=InfrahubKind.TRANSFORMPYTHON, id=webhook.transformation.id, prefetch_relationships=True, populate_store=True, @@ -52,7 +52,7 @@ async def configuration( payload["webhook_configuration"]["repository_id"] = transform.repository.id payload["webhook_configuration"]["repository_name"] = transform.repository.peer.name.value - await service.cache.set(key=webhook_key, value=json.dumps(payload)) + await service.cache.set(key=webhook_key, value=ujson.dumps(payload)) cached_webhooks = await service.cache.list_keys(filter_pattern="webhook:active:*") for cached_webhook in cached_webhooks: diff --git a/backend/infrahub/message_bus/operations/requests/__init__.py b/backend/infrahub/message_bus/operations/requests/__init__.py index b0d208f44e..adf7c8e49e 100644 --- a/backend/infrahub/message_bus/operations/requests/__init__.py +++ b/backend/infrahub/message_bus/operations/requests/__init__.py @@ -1,3 +1,21 @@ -from . import artifact, artifact_definition, git, graphql_query_group, proposed_change, repository +from . import ( + artifact, + artifact_definition, + generator, + generator_definition, + git, + graphql_query_group, + proposed_change, + repository, +) -__all__ = ["artifact", "artifact_definition", "git", "graphql_query_group", "proposed_change", "repository"] +__all__ = [ + "artifact", + "artifact_definition", + "generator", + "generator_definition", + "git", + "graphql_query_group", + "proposed_change", + "repository", +] diff --git a/backend/infrahub/message_bus/operations/requests/artifact_definition.py b/backend/infrahub/message_bus/operations/requests/artifact_definition.py index f0c3d81b8a..439496c95b 100644 --- a/backend/infrahub/message_bus/operations/requests/artifact_definition.py +++ b/backend/infrahub/message_bus/operations/requests/artifact_definition.py @@ -174,7 +174,7 @@ async def generate(message: messages.RequestArtifactDefinitionGenerate, service: if transform.typename == InfrahubKind.TRANSFORMJINJA2: transform_location = transform.template_path.value - elif transform.typename == "CoreTransformPython": + elif transform.typename == InfrahubKind.TRANSFORMPYTHON: transform_location = f"{transform.file_path.value}::{transform.class_name.value}" events = [] diff --git a/backend/infrahub/message_bus/operations/requests/generator.py b/backend/infrahub/message_bus/operations/requests/generator.py new file mode 100644 index 0000000000..1affe02ad1 --- /dev/null +++ b/backend/infrahub/message_bus/operations/requests/generator.py @@ -0,0 +1,98 @@ +import os + +from infrahub_sdk import InfrahubNode +from infrahub_sdk.exceptions import ModuleImportError +from infrahub_sdk.schema import InfrahubGeneratorDefinitionConfig + +from infrahub import lock +from infrahub.core.constants import GeneratorInstanceStatus, InfrahubKind +from infrahub.git.repository import extract_repo_file_information, get_initialized_repo +from infrahub.message_bus import messages +from infrahub.services import InfrahubServices + + +async def run(message: messages.RequestGeneratorRun, service: InfrahubServices): + repository = await get_initialized_repo( + repository_id=message.repository_id, + name=message.repository_name, + service=service, + repository_kind=message.repository_kind, + ) + + generator_definition = InfrahubGeneratorDefinitionConfig( + name=message.generator_definition.definition_name, + class_name=message.generator_definition.class_name, + file_path=message.generator_definition.file_path, + query=message.generator_definition.query_name, + targets=message.generator_definition.group_id, + convert_query_response=message.generator_definition.convert_query_response, + ) + + commit_worktree = repository.get_commit_worktree(commit=message.commit) + + file_info = extract_repo_file_information( + full_filename=os.path.join(commit_worktree.directory, generator_definition.file_path.as_posix()), + repo_directory=repository.directory_root, + worktree_directory=commit_worktree.directory, + ) + generator_instance = await _define_instance(message=message, service=service) + + try: + generator_class = generator_definition.load_class( + import_root=repository.directory_root, relative_path=file_info.relative_repo_path_dir + ) + + generator = generator_class( + query=generator_definition.query, + client=service.client, + branch=message.branch_name, + params=message.variables, + generator_instance=generator_instance.id, + convert_query_response=generator_definition.convert_query_response, + infrahub_node=InfrahubNode, + ) + await generator.run(identifier=generator_definition.name) + generator_instance.status.value = GeneratorInstanceStatus.READY.value + except ModuleImportError: + generator_instance.status.value = GeneratorInstanceStatus.ERROR.value + except Exception: # pylint: disable=broad-exception-caught + generator_instance.status.value = GeneratorInstanceStatus.ERROR.value + + await generator_instance.update(do_full_update=True) + + +async def _define_instance(message: messages.RequestGeneratorRun, service: InfrahubServices) -> InfrahubNode: + if message.generator_instance: + instance = await service.client.get( + kind=InfrahubKind.GENERATORINSTANCE, id=message.generator_instance, branch=message.branch_name + ) + instance.status.value = GeneratorInstanceStatus.PENDING.value + await instance.update(do_full_update=True) + + else: + async with lock.registry.get( + f"{message.target_id}-{message.generator_definition.definition_id}", namespace="generator" + ): + instances = await service.client.filters( + kind=InfrahubKind.GENERATORINSTANCE, + definition__ids=[message.generator_definition.definition_id], + object__ids=[message.target_id], + branch=message.branch_name, + ) + if instances: + instance = instances[0] + instance.status.value = GeneratorInstanceStatus.PENDING.value + await instance.update(do_full_update=True) + else: + instance = await service.client.create( + kind=InfrahubKind.GENERATORINSTANCE, + branch=message.branch_name, + data={ + "name": f"{message.generator_definition.definition_name}: {message.target_name}", + "status": GeneratorInstanceStatus.PENDING.value, + "object": message.target_id, + "definition": message.generator_definition.definition_id, + }, + ) + await instance.save() + return instance diff --git a/backend/infrahub/message_bus/operations/requests/generator_definition.py b/backend/infrahub/message_bus/operations/requests/generator_definition.py new file mode 100644 index 0000000000..54d3b500ba --- /dev/null +++ b/backend/infrahub/message_bus/operations/requests/generator_definition.py @@ -0,0 +1,201 @@ +from typing import List, Optional + +from infrahub_sdk import UUIDT + +from infrahub.core.constants import InfrahubKind, ValidatorConclusion, ValidatorState +from infrahub.core.timestamp import Timestamp +from infrahub.message_bus import InfrahubMessage, Meta, messages +from infrahub.services import InfrahubServices + + +async def check(message: messages.RequestGeneratorDefinitionCheck, service: InfrahubServices) -> None: + async with service.task_report( + title=f"Generator Definition: {message.generator_definition.definition_name}", + related_node=message.proposed_change, + ) as task_report: + service.log.info( + "Validating Generator selection", + generator_definition=message.generator_definition.definition_id, + source_branch=message.source_branch, + ) + events: List[InfrahubMessage] = [] + + proposed_change = await service.client.get(kind=InfrahubKind.PROPOSEDCHANGE, id=message.proposed_change) + + validator_name = f"Generator Validator: {message.generator_definition.definition_name}" + validator_execution_id = str(UUIDT()) + check_execution_ids: List[str] = [] + + await proposed_change.validations.fetch() + + validator = None + for relationship in proposed_change.validations.peers: + existing_validator = relationship.peer + if ( + existing_validator.typename == InfrahubKind.GENERATORVALIDATOR + and existing_validator.definition.id == message.generator_definition.definition_id + ): + validator = existing_validator + + if validator: + validator.conclusion.value = ValidatorConclusion.UNKNOWN.value + validator.state.value = ValidatorState.QUEUED.value + validator.started_at.value = "" + validator.completed_at.value = "" + await validator.save() + else: + validator = await service.client.create( + kind=InfrahubKind.GENERATORVALIDATOR, + data={ + "label": validator_name, + "proposed_change": message.proposed_change, + "definition": message.generator_definition.definition_id, + }, + ) + await validator.save() + + group = await service.client.get( + kind=InfrahubKind.GENERICGROUP, + prefetch_relationships=True, + populate_store=True, + id=message.generator_definition.group_id, + branch=message.source_branch, + ) + await group.members.fetch() + + existing_instances = await service.client.filters( + kind=InfrahubKind.GENERATORINSTANCE, + definition__ids=[message.generator_definition.definition_id], + include=["object"], + branch=message.source_branch, + ) + instance_by_member = {} + for instance in existing_instances: + instance_by_member[instance.object.peer.id] = instance.id + + repository = message.branch_diff.get_repository(repository_id=message.generator_definition.repository_id) + requested_instances = 0 + impacted_instances = message.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE) + + for relationship in group.members.peers: + member = relationship.peer + generator_instance = instance_by_member.get(member.id) + if _run_generator( + instance_id=generator_instance, + managed_branch=message.source_branch_sync_with_git, + impacted_instances=impacted_instances, + ): + check_execution_id = str(UUIDT()) + check_execution_ids.append(check_execution_id) + requested_instances += 1 + events.append( + messages.CheckGeneratorRun( + generator_definition=message.generator_definition, + generator_instance=generator_instance, + commit=repository.source_commit, + repository_id=repository.repository_id, + repository_name=repository.repository_name, + repository_kind=repository.kind, + branch_name=message.source_branch, + query=message.generator_definition.query_name, + variables=member.extract(params=message.generator_definition.parameters), + target_id=member.id, + target_name=member.name.value, + validator_id=validator.id, + meta=Meta(validator_execution_id=validator_execution_id, check_execution_id=check_execution_id), + ) + ) + + checks_in_execution = ",".join(check_execution_ids) + await service.cache.set( + key=f"validator_execution_id:{validator_execution_id}:checks", value=checks_in_execution, expires=7200 + ) + events.append( + messages.FinalizeValidatorExecution( + start_time=Timestamp().to_string(), + validator_id=validator.id, + validator_execution_id=validator_execution_id, + validator_type=InfrahubKind.GENERATORVALIDATOR, + ) + ) + await task_report.info(event=f"{requested_instances} generator instances required to be executed.") + for event in events: + event.assign_meta(parent=message) + await service.send(message=event) + + +async def run(message: messages.RequestGeneratorDefinitionRun, service: InfrahubServices) -> None: + async with service.task_report( + title="Executing Generator", + related_node=message.generator_definition.definition_id, + ) as task_report: + service.log.info( + "Received request to run generator", + branch=message.branch, + generator_definition=message.generator_definition.definition_id, + ) + events: List[InfrahubMessage] = [] + + group = await service.client.get( + kind=InfrahubKind.GENERICGROUP, + prefetch_relationships=True, + populate_store=True, + id=message.generator_definition.group_id, + branch=message.branch, + ) + await group.members.fetch() + + existing_instances = await service.client.filters( + kind=InfrahubKind.GENERATORINSTANCE, + definition__ids=[message.generator_definition.definition_id], + include=["object"], + branch=message.branch, + ) + instance_by_member = {} + for instance in existing_instances: + instance_by_member[instance.object.peer.id] = instance.id + + repository = await service.client.get( + kind=InfrahubKind.REPOSITORY, branch=message.branch, id=message.generator_definition.repository_id + ) + + for relationship in group.members.peers: + member = relationship.peer + generator_instance = instance_by_member.get(member.id) + events.append( + messages.RequestGeneratorRun( + generator_definition=message.generator_definition, + commit=repository.commit.value, + generator_instance=generator_instance, + repository_id=repository.id, + repository_name=repository.name.value, + repository_kind=repository.typename, + branch_name=message.branch, + query=message.generator_definition.query_name, + variables=member.extract(params=message.generator_definition.parameters), + target_id=member.id, + target_name=member.name.value, + ) + ) + + await task_report.info( + event=f"Generator triggered for {len(group.members.peers)} members in {group.name.value}." + ) + + for event in events: + event.assign_meta(parent=message) + await service.send(message=event) + + +def _run_generator(instance_id: Optional[str], managed_branch: bool, impacted_instances: list[str]) -> bool: + """Returns a boolean to indicate if a generator instance needs to be executed + Will return true if: + * The instance_id wasn't set which could be that it's a new object that doesn't have a previous generator instance + * The source branch is set to sync with Git which would indicate that it could contain updates in git to the generator + * The instance_id exists in the impacted_instances list + Will return false if: + * The source branch is a not one that syncs with git and the instance_id exists and is not in the impacted list + """ + if not instance_id or managed_branch: + return True + return instance_id in impacted_instances diff --git a/backend/infrahub/message_bus/operations/requests/graphql_query_group.py b/backend/infrahub/message_bus/operations/requests/graphql_query_group.py index 2d5316375f..e082c46812 100644 --- a/backend/infrahub/message_bus/operations/requests/graphql_query_group.py +++ b/backend/infrahub/message_bus/operations/requests/graphql_query_group.py @@ -1,6 +1,6 @@ from typing import List -from infrahub_sdk import InfrahubClient, InfrahubNode, Timestamp +from infrahub_sdk import InfrahubClient, InfrahubNode from infrahub_sdk.utils import dict_hash from infrahub.core.constants import InfrahubKind @@ -48,7 +48,7 @@ async def update(message: messages.RequestGraphQLQueryGroupUpdate, service: Infr parameters=message.params, members=message.related_node_ids, ) - await group.save(at=Timestamp(), allow_upsert=True) + await group.save(allow_upsert=True) if message.subscribers: await group_add_subscriber( diff --git a/backend/infrahub/message_bus/operations/requests/proposed_change.py b/backend/infrahub/message_bus/operations/requests/proposed_change.py index f99dcb4d7d..1a7104d370 100644 --- a/backend/infrahub/message_bus/operations/requests/proposed_change.py +++ b/backend/infrahub/message_bus/operations/requests/proposed_change.py @@ -26,6 +26,7 @@ from infrahub.message_bus.types import ( ProposedChangeArtifactDefinition, ProposedChangeBranchDiff, + ProposedChangeGeneratorDefinition, ProposedChangeRepository, ProposedChangeSubscriber, ) @@ -42,13 +43,13 @@ log = get_logger() -class ArtifactSelect(IntFlag): +class DefinitionSelect(IntFlag): NONE = 0 MODIFIED_KINDS = 1 FILE_CHANGES = 2 @staticmethod - def add_flag(current: ArtifactSelect, flag: ArtifactSelect, condition: bool): + def add_flag(current: DefinitionSelect, flag: DefinitionSelect, condition: bool): if condition: return current | flag return current @@ -56,10 +57,10 @@ def add_flag(current: ArtifactSelect, flag: ArtifactSelect, condition: bool): @property def log_line(self) -> str: change_types = [] - if ArtifactSelect.MODIFIED_KINDS in self: + if DefinitionSelect.MODIFIED_KINDS in self: change_types.append("data changes within relevant object kinds") - if ArtifactSelect.FILE_CHANGES in self: + if DefinitionSelect.FILE_CHANGES in self: change_types.append("file modifications in Git repositories") if self: @@ -153,6 +154,18 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf ) ) + if message.check_type in [CheckType.ALL, CheckType.GENERATOR]: + await task_report.info("Adding Run Generators job", proposed_change=message.proposed_change) + events.append( + messages.RequestProposedChangeRunGenerators( + proposed_change=message.proposed_change, + source_branch=message.source_branch, + source_branch_sync_with_git=message.source_branch_sync_with_git, + destination_branch=message.destination_branch, + branch_diff=branch_diff, + ) + ) + if message.check_type in [CheckType.ALL, CheckType.DATA] and branch_diff.has_node_changes( branch=message.source_branch ): @@ -324,7 +337,7 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa branch_name=message.source_branch, ) artifact_definitions = _parse_artifact_definitions( - definitions=definition_information["CoreArtifactDefinition"]["edges"] + definitions=definition_information[InfrahubKind.ARTIFACTDEFINITION]["edges"] ) await task_report.info( @@ -339,17 +352,17 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa # Alternatively if the queries used touches models that have been modified in the path # impacted artifact definitions will be included for consideration - select = ArtifactSelect.NONE + select = DefinitionSelect.NONE select = select.add_flag( current=select, - flag=ArtifactSelect.FILE_CHANGES, + flag=DefinitionSelect.FILE_CHANGES, condition=message.source_branch_sync_with_git and message.branch_diff.has_file_modifications, ) for changed_model in message.branch_diff.modified_kinds(branch=message.source_branch): select = select.add_flag( current=select, - flag=ArtifactSelect.MODIFIED_KINDS, + flag=DefinitionSelect.MODIFIED_KINDS, condition=changed_model in artifact_definition.query_models, ) @@ -369,6 +382,67 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa await service.send(message=msg) +async def run_generators(message: messages.RequestProposedChangeRunGenerators, service: InfrahubServices) -> None: + async with service.task_report( + related_node=message.proposed_change, + title="Evaluating Generators", + ) as task_report: + generators = await service.client.filters( + kind="CoreGeneratorDefinition", prefetch_relationships=True, populate_store=True + ) + + generator_definitions = [ + ProposedChangeGeneratorDefinition( + definition_id=generator.id, + definition_name=generator.name.value, + class_name=generator.class_name.value, + file_path=generator.file_path.value, + query_name=generator.query.peer.name.value, + query_models=generator.query.peer.models.value, + repository_id=generator.repository.peer.id, + parameters=generator.parameters.value, + group_id=generator.targets.peer.id, + convert_query_response=generator.convert_query_response.value, + ) + for generator in generators + ] + + for generator_definition in generator_definitions: + # Request generator definitions if the source branch that is managed in combination + # to the Git repository containing modifications which could indicate changes to the transforms + # in code + # Alternatively if the queries used touches models that have been modified in the path + # impacted artifact definitions will be included for consideration + + select = DefinitionSelect.NONE + select = select.add_flag( + current=select, + flag=DefinitionSelect.FILE_CHANGES, + condition=message.source_branch_sync_with_git and message.branch_diff.has_file_modifications, + ) + + for changed_model in message.branch_diff.modified_kinds(branch=message.source_branch): + select = select.add_flag( + current=select, + flag=DefinitionSelect.MODIFIED_KINDS, + condition=changed_model in generator_definition.query_models, + ) + + await task_report.info(f"{generator_definition.definition_name}: {select.log_line}") + + if select: + msg = messages.RequestGeneratorDefinitionCheck( + generator_definition=generator_definition, + branch_diff=message.branch_diff, + proposed_change=message.proposed_change, + source_branch=message.source_branch, + source_branch_sync_with_git=message.source_branch_sync_with_git, + destination_branch=message.destination_branch, + ) + msg.assign_meta(parent=message) + await service.send(message=msg) + + GATHER_ARTIFACT_DEFINITIONS = """ query GatherArtifactDefinitions { CoreArtifactDefinition { @@ -616,7 +690,7 @@ def _parse_repositories(repositories: list[dict]) -> list[Repository]: Repository( repository_id=repo["node"]["id"], repository_name=repo["node"]["name"]["value"], - read_only=repo["node"]["__typename"] == "CoreReadOnlyRepository", + read_only=repo["node"]["__typename"] == InfrahubKind.READONLYREPOSITORY, commit=repo["node"]["commit"]["value"] or "", ) ) @@ -664,8 +738,10 @@ async def _get_proposed_change_repositories( query=SOURCE_READONLY_REPOSITORIES, branch_name=message.source_branch ) - destination_all = destination_all["CoreGenericRepository"]["edges"] - source_all = source_managed["CoreRepository"]["edges"] + source_readonly["CoreReadOnlyRepository"]["edges"] + destination_all = destination_all[InfrahubKind.GENERICREPOSITORY]["edges"] + source_all = ( + source_managed[InfrahubKind.REPOSITORY]["edges"] + source_readonly[InfrahubKind.READONLYREPOSITORY]["edges"] + ) return _parse_proposed_change_repositories(message=message, source=source_all, destination=destination_all) @@ -705,7 +781,7 @@ async def _populate_subscribers(branch_diff: ProposedChangeBranchDiff, service: variables={"members": branch_diff.modified_nodes(branch=branch)}, ) - for group in result["CoreGraphQLQueryGroup"]["edges"]: + for group in result[InfrahubKind.GRAPHQLQUERYGROUP]["edges"]: for subscriber in group["node"]["subscribers"]["edges"]: branch_diff.subscribers.append( ProposedChangeSubscriber(subscriber_id=subscriber["node"]["id"], kind=subscriber["node"]["__typename"]) diff --git a/backend/infrahub/message_bus/operations/send/webhook.py b/backend/infrahub/message_bus/operations/send/webhook.py index 54c075ec85..59ff511ed3 100644 --- a/backend/infrahub/message_bus/operations/send/webhook.py +++ b/backend/infrahub/message_bus/operations/send/webhook.py @@ -1,6 +1,7 @@ -import json from typing import Dict, Type +import ujson + from infrahub.exceptions import NodeNotFoundError from infrahub.message_bus import messages from infrahub.services import InfrahubServices @@ -19,7 +20,7 @@ async def event(message: messages.SendWebhookEvent, service: InfrahubServices) - node_type="Webhook", identifier=message.webhook_id, message="The requested Webhook was not found" ) - webhook_data = json.loads(webhook_definition) + webhook_data = ujson.loads(webhook_definition) payload = {"event_type": message.event_type, "data": message.event_data, "service": service} webhook_map: Dict[str, Type[Webhook]] = { "standard": StandardWebhook, diff --git a/backend/infrahub/message_bus/operations/trigger/__init__.py b/backend/infrahub/message_bus/operations/trigger/__init__.py index cdaf313d56..5d71093f84 100644 --- a/backend/infrahub/message_bus/operations/trigger/__init__.py +++ b/backend/infrahub/message_bus/operations/trigger/__init__.py @@ -1,3 +1,3 @@ -from . import artifact_definition, proposed_change, webhook +from . import artifact_definition, generator_definition, ipam, proposed_change, webhook -__all__ = ["artifact_definition", "proposed_change", "webhook"] +__all__ = ["artifact_definition", "generator_definition", "ipam", "proposed_change", "webhook"] diff --git a/backend/infrahub/message_bus/operations/trigger/generator_definition.py b/backend/infrahub/message_bus/operations/trigger/generator_definition.py new file mode 100644 index 0000000000..c9825add03 --- /dev/null +++ b/backend/infrahub/message_bus/operations/trigger/generator_definition.py @@ -0,0 +1,34 @@ +from infrahub.core.constants import InfrahubKind +from infrahub.message_bus import messages +from infrahub.message_bus.types import ProposedChangeGeneratorDefinition +from infrahub.services import InfrahubServices + + +async def run(message: messages.TriggerGeneratorDefinitionRun, service: InfrahubServices) -> None: + generators = await service.client.filters( + kind=InfrahubKind.GENERATORDEFINITION, prefetch_relationships=True, populate_store=True, branch=message.branch + ) + + generator_definitions = [ + ProposedChangeGeneratorDefinition( + definition_id=generator.id, + definition_name=generator.name.value, + class_name=generator.class_name.value, + file_path=generator.file_path.value, + query_name=generator.query.peer.name.value, + query_models=generator.query.peer.models.value, + repository_id=generator.repository.peer.id, + parameters=generator.parameters.value, + group_id=generator.targets.peer.id, + convert_query_response=generator.convert_query_response.value, + ) + for generator in generators + ] + + events = [ + messages.RequestGeneratorDefinitionRun(branch=message.branch, generator_definition=generator_definition) + for generator_definition in generator_definitions + ] + for event in events: + event.assign_meta(parent=message) + await service.send(message=event) diff --git a/backend/infrahub/message_bus/operations/trigger/ipam.py b/backend/infrahub/message_bus/operations/trigger/ipam.py new file mode 100644 index 0000000000..49dba8bf78 --- /dev/null +++ b/backend/infrahub/message_bus/operations/trigger/ipam.py @@ -0,0 +1,26 @@ +import ipaddress + +from infrahub.core import registry +from infrahub.core.ipam.reconciler import IpamReconciler +from infrahub.log import get_logger +from infrahub.message_bus import messages +from infrahub.services import InfrahubServices + +log = get_logger() + + +async def reconciliation(message: messages.TriggerIpamReconciliation, service: InfrahubServices) -> None: + branch = await registry.get_branch(db=service.database, branch=message.branch) + ipam_reconciler = IpamReconciler(db=service.database, branch=branch) + + for ipam_node_details in message.ipam_node_details: + if ipam_node_details.is_address: + ip_value = ipaddress.ip_interface(ipam_node_details.ip_value) + else: + ip_value = ipaddress.ip_network(ipam_node_details.ip_value) + await ipam_reconciler.reconcile( + ip_value=ip_value, + namespace=ipam_node_details.namespace_id, + node_uuid=ipam_node_details.node_uuid, + is_delete=ipam_node_details.is_delete, + ) diff --git a/backend/infrahub/message_bus/types.py b/backend/infrahub/message_bus/types.py index c23b61e9cf..11fa7f1b35 100644 --- a/backend/infrahub/message_bus/types.py +++ b/backend/infrahub/message_bus/types.py @@ -86,6 +86,19 @@ def transform_location(self) -> str: raise ValueError("Invalid kind for Transform") +class ProposedChangeGeneratorDefinition(BaseModel): + definition_id: str + definition_name: str + query_name: str + convert_query_response: bool + query_models: list[str] + repository_id: str + class_name: str + file_path: str + parameters: dict + group_id: str + + class ProposedChangeBranchDiff(BaseModel): diff_summary: list[NodeDiff] = Field(default_factory=list, description="The DiffSummary between two branches") repositories: list[ProposedChangeRepository] = Field(default_factory=list) diff --git a/backend/infrahub/server.py b/backend/infrahub/server.py index 5edce661b7..5b98f7641c 100644 --- a/backend/infrahub/server.py +++ b/backend/infrahub/server.py @@ -14,7 +14,7 @@ from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from infrahub_sdk.timestamp import TimestampFormatError -from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor, Span from pydantic import ValidationError from starlette_exporter import PrometheusMiddleware, handle_metrics @@ -22,6 +22,7 @@ from infrahub.api import router as api from infrahub.api.exception_handlers import generic_api_exception_handler from infrahub.components import ComponentType +from infrahub.core.graph.index import node_indexes, rel_indexes from infrahub.core.initialization import initialization from infrahub.database import InfrahubDatabase, InfrahubDatabaseMode, get_db from infrahub.dependencies.registry import build_component_registry @@ -33,7 +34,7 @@ from infrahub.services import InfrahubServices, services from infrahub.services.adapters.cache.redis import RedisCache from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus -from infrahub.trace import add_span_exception, configure_trace, get_traceid, get_tracer +from infrahub.trace import add_span_exception, configure_trace, get_traceid from infrahub.worker import WORKER_IDENTITY @@ -43,14 +44,16 @@ async def app_initialization(application: FastAPI) -> None: # Initialize trace if config.SETTINGS.trace.enable: configure_trace( + service="infrahub-server", version=__version__, exporter_type=config.SETTINGS.trace.exporter_type, - exporter_endpoint=config.SETTINGS.trace.trace_endpoint, + exporter_endpoint=config.SETTINGS.trace.exporter_endpoint, exporter_protocol=config.SETTINGS.trace.exporter_protocol, ) # Initialize database Driver and load local registry database = application.state.db = InfrahubDatabase(mode=InfrahubDatabaseMode.DRIVER, driver=await get_db()) + database.manager.index.init(nodes=node_indexes, rels=rel_indexes) initialize_lock() @@ -93,8 +96,13 @@ async def lifespan(application: FastAPI) -> AsyncGenerator: redoc_url="/api/redoc", ) -FastAPIInstrumentor().instrument_app(app, excluded_urls=".*/metrics") -tracer = get_tracer() + +def server_request_hook(span: Span, scope: dict) -> None: # pylint: disable=unused-argument + if span and span.is_recording(): + span.set_attribute("worker", WORKER_IDENTITY) + + +FastAPIInstrumentor().instrument_app(app, excluded_urls=".*/metrics", server_request_hook=server_request_hook) FRONTEND_DIRECTORY = os.environ.get("INFRAHUB_FRONTEND_DIRECTORY", os.path.abspath("frontend")) FRONTEND_ASSET_DIRECTORY = f"{FRONTEND_DIRECTORY}/dist/assets" @@ -116,15 +124,17 @@ async def lifespan(application: FastAPI) -> AsyncGenerator: async def logging_middleware(request: Request, call_next: Callable[[Request], Awaitable[Response]]) -> Response: clear_log_context() request_id = correlation_id.get() - with tracer.start_as_current_span(f"processing request {request_id}"): - trace_id = get_traceid() - set_log_data(key="request_id", value=request_id) - set_log_data(key="app", value="infrahub.api") - set_log_data(key="worker", value=WORKER_IDENTITY) - if trace_id: - set_log_data(key="trace_id", value=trace_id) - response = await call_next(request) - return response + + set_log_data(key="request_id", value=request_id) + set_log_data(key="app", value="infrahub.api") + set_log_data(key="worker", value=WORKER_IDENTITY) + + trace_id = get_traceid() + if trace_id: + set_log_data(key="trace_id", value=trace_id) + + response = await call_next(request) + return response @app.middleware("http") diff --git a/backend/infrahub/services/__init__.py b/backend/infrahub/services/__init__.py index 2ae4df18b5..c5b08a1486 100644 --- a/backend/infrahub/services/__init__.py +++ b/backend/infrahub/services/__init__.py @@ -13,6 +13,7 @@ from .adapters.cache import InfrahubCache from .adapters.message_bus import InfrahubMessageBus +from .component import InfrahubComponent from .protocols import InfrahubLogger from .scheduler import InfrahubScheduler @@ -34,6 +35,7 @@ def __init__( self.log = log or get_logger() self.component_type = component_type or ComponentType.NONE self.scheduler = InfrahubScheduler() + self.component = InfrahubComponent() @property def client(self) -> InfrahubClient: @@ -69,6 +71,7 @@ def task_report( async def initialize(self) -> None: """Initialize the Services""" + await self.component.initialize(service=self) await self.message_bus.initialize(service=self) await self.scheduler.initialize(service=self) diff --git a/backend/infrahub/services/adapters/cache/__init__.py b/backend/infrahub/services/adapters/cache/__init__.py index adc5a34cab..50347f9ff0 100644 --- a/backend/infrahub/services/adapters/cache/__init__.py +++ b/backend/infrahub/services/adapters/cache/__init__.py @@ -12,6 +12,10 @@ async def get(self, key: str) -> Optional[str]: """Retrieve a value from the cache.""" raise NotImplementedError() + async def get_values(self, keys: list[str]) -> list[Optional[str]]: + """Return a list the values for requested keys.""" + raise NotImplementedError() + async def list_keys(self, filter_pattern: str) -> List[str]: """Return a list of active keys that match the provided filter.""" raise NotImplementedError() diff --git a/backend/infrahub/services/adapters/cache/redis.py b/backend/infrahub/services/adapters/cache/redis.py index 4f02d8f389..ceb67b2172 100644 --- a/backend/infrahub/services/adapters/cache/redis.py +++ b/backend/infrahub/services/adapters/cache/redis.py @@ -23,6 +23,10 @@ async def get(self, key: str) -> Optional[str]: return value.decode() return None + async def get_values(self, keys: list[str]) -> list[Optional[str]]: + values = await self.connection.mget(keys=keys) + return [value.decode() if value is not None else value for value in values] + async def list_keys(self, filter_pattern: str) -> List[str]: cursor = 0 has_remaining_keys = True diff --git a/backend/infrahub/services/adapters/message_bus/__init__.py b/backend/infrahub/services/adapters/message_bus/__init__.py index b3356d1f20..340086c48e 100644 --- a/backend/infrahub/services/adapters/message_bus/__init__.py +++ b/backend/infrahub/services/adapters/message_bus/__init__.py @@ -25,6 +25,3 @@ async def reply(self, message: InfrahubMessage, routing_key: str) -> None: async def rpc(self, message: InfrahubMessage, response_class: type[ResponseClass]) -> ResponseClass: raise NotImplementedError() - - async def subscribe(self) -> None: - raise NotImplementedError() diff --git a/backend/infrahub/services/adapters/message_bus/local.py b/backend/infrahub/services/adapters/message_bus/local.py new file mode 100644 index 0000000000..f0dba336a2 --- /dev/null +++ b/backend/infrahub/services/adapters/message_bus/local.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from collections import defaultdict +from typing import TYPE_CHECKING, Dict, List, Optional, Type, TypeVar + +import ujson +from infrahub_sdk import UUIDT + +from infrahub.dependencies.registry import build_component_registry +from infrahub.message_bus import InfrahubMessage, Meta +from infrahub.message_bus.messages import ROUTING_KEY_MAP +from infrahub.message_bus.operations import execute_message +from infrahub.services import InfrahubServices +from infrahub.services.adapters.message_bus import InfrahubMessageBus + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + from infrahub.message_bus.types import MessageTTL + +ResponseClass = TypeVar("ResponseClass") + + +class BusSimulator(InfrahubMessageBus): + def __init__(self, database: Optional[InfrahubDatabase] = None): + self.messages: List[InfrahubMessage] = [] + self.messages_per_routing_key: Dict[str, List[InfrahubMessage]] = {} + self.service: InfrahubServices = InfrahubServices(database=database, message_bus=self) + self.replies: Dict[str, List[InfrahubMessage]] = defaultdict(list) + build_component_registry() + + async def publish(self, message: InfrahubMessage, routing_key: str, delay: Optional[MessageTTL] = None) -> None: + self.messages.append(message) + if routing_key not in self.messages_per_routing_key: + self.messages_per_routing_key[routing_key] = [] + self.messages_per_routing_key[routing_key].append(message) + await execute_message(routing_key=routing_key, message_body=message.body, service=self.service) + + async def reply(self, message: InfrahubMessage, routing_key: str) -> None: + correlation_id = message.meta.correlation_id or "default" + self.replies[correlation_id].append(message) + + async def rpc(self, message: InfrahubMessage, response_class: Type[ResponseClass]) -> ResponseClass: + routing_key = ROUTING_KEY_MAP.get(type(message), "") + + correlation_id = str(UUIDT()) + message.meta = Meta(correlation_id=correlation_id, reply_to="ci-testing") + + await self.publish(message=message, routing_key=routing_key) + reply_id = correlation_id or "default" + assert len(self.replies[reply_id]) == 1 + response = self.replies[reply_id][0] + data = ujson.loads(response.body) + return response_class(**data) + + @property + def seen_routing_keys(self) -> List[str]: + return list(self.messages_per_routing_key.keys()) diff --git a/backend/infrahub/services/adapters/message_bus/rabbitmq.py b/backend/infrahub/services/adapters/message_bus/rabbitmq.py index 9e59d64fa2..1911687162 100644 --- a/backend/infrahub/services/adapters/message_bus/rabbitmq.py +++ b/backend/infrahub/services/adapters/message_bus/rabbitmq.py @@ -1,11 +1,14 @@ from __future__ import annotations import asyncio -import json from typing import TYPE_CHECKING, Awaitable, Callable, List, MutableMapping, Optional, Type, TypeVar import aio_pika +import opentelemetry.instrumentation.aio_pika.span_builder +import ujson from infrahub_sdk import UUIDT +from opentelemetry.instrumentation.aio_pika import AioPikaInstrumentor +from opentelemetry.semconv.trace import SpanAttributes from infrahub import config from infrahub.components import ComponentType @@ -24,6 +27,7 @@ AbstractQueue, AbstractRobustConnection, ) + from opentelemetry.instrumentation.aio_pika.span_builder import SpanBuilder from infrahub.config import BrokerSettings from infrahub.services import InfrahubServices @@ -32,6 +36,29 @@ ResponseClass = TypeVar("ResponseClass") +AioPikaInstrumentor().instrument() + + +# TODO: remove this once https://github.com/open-telemetry/opentelemetry-python-contrib/issues/1835 is resolved +def patch_spanbuilder_set_channel() -> None: + """ + The default SpanBuilder.set_channel does not work with aio_pika 9.1 and the refactored connection + attribute + """ + + def set_channel(self: SpanBuilder, channel: AbstractChannel) -> None: + if hasattr(channel, "_connection"): + url = channel._connection.url + self._attributes.update( + { + SpanAttributes.NET_PEER_NAME: url.host, + SpanAttributes.NET_PEER_PORT: url.port, + } + ) + + opentelemetry.instrumentation.aio_pika.span_builder.SpanBuilder.set_channel = set_channel # type: ignore + + async def _add_request_id(message: InfrahubMessage) -> None: log_data = get_log_data() message.meta.request_id = log_data.get("request_id", "") @@ -54,6 +81,8 @@ def __init__(self, settings: Optional[BrokerSettings] = None) -> None: self.futures: MutableMapping[str, asyncio.Future] = {} async def initialize(self, service: InfrahubServices) -> None: + patch_spanbuilder_set_channel() + self.service = service self.connection = await aio_pika.connect_robust( host=self.settings.address, @@ -86,6 +115,14 @@ async def on_callback(self, message: AbstractIncomingMessage) -> None: else: self.service.log.error("Invalid message received", message=f"{message!r}") + async def on_message(self, message: AbstractIncomingMessage) -> None: + async with message.process(): + clear_log_context() + if message.routing_key in messages.MESSAGE_MAP: + await execute_message(routing_key=message.routing_key, message_body=message.body, service=self.service) + else: + self.service.log.error("Invalid message received", message=f"{message!r}") + async def _initialize_api_server(self) -> None: self.callback_queue = await self.channel.declare_queue(name=f"api-callback-{WORKER_IDENTITY}", exclusive=True) self.events_queue = await self.channel.declare_queue(name=f"api-events-{WORKER_IDENTITY}", exclusive=True) @@ -142,7 +179,6 @@ async def _initialize_api_server(self) -> None: self.message_enrichers.append(_add_request_id) async def _initialize_git_worker(self) -> None: - await self.channel.set_qos(prefetch_count=1) events_queue = await self.channel.declare_queue(name=f"worker-events-{WORKER_IDENTITY}", exclusive=True) self.exchange = await self.channel.declare_exchange( @@ -157,6 +193,12 @@ async def _initialize_git_worker(self) -> None: ) await self.callback_queue.consume(self.on_callback, no_ack=True) + message_channel = await self.connection.channel() + await message_channel.set_qos(prefetch_count=self.settings.maximum_concurrent_messages) + + queue = await message_channel.get_queue(f"{self.settings.namespace}.rpcs") + await queue.consume(callback=self.on_message, no_ack=False) + async def publish(self, message: InfrahubMessage, routing_key: str, delay: Optional[MessageTTL] = None) -> None: for enricher in self.message_enrichers: await enricher(message) @@ -183,31 +225,9 @@ async def rpc(self, message: InfrahubMessage, response_class: Type[ResponseClass await self.service.send(message=message) response: AbstractIncomingMessage = await future - data = json.loads(response.body) + data = ujson.loads(response.body) return response_class(**data) - async def subscribe(self) -> None: - queue = await self.channel.get_queue(f"{self.settings.namespace}.rpcs") - self.service.log.info("Waiting for RPC instructions to execute .. ") - async with queue.iterator() as qiterator: - async for message in qiterator: - try: - async with message.process(requeue=False): - clear_log_context() - if message.routing_key in messages.MESSAGE_MAP: - await execute_message( - routing_key=message.routing_key, message_body=message.body, service=self.service - ) - else: - self.service.log.error( - "Unhandled routing key for message", - routing_key=message.routing_key, - message=message.body, - ) - - except Exception: # pylint: disable=broad-except - self.service.log.exception("Processing error for message %r" % message) - @staticmethod def format_message(message: InfrahubMessage) -> aio_pika.Message: pika_message = aio_pika.Message( diff --git a/backend/infrahub/services/component.py b/backend/infrahub/services/component.py new file mode 100644 index 0000000000..97de5fdad6 --- /dev/null +++ b/backend/infrahub/services/component.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any, Optional + +from infrahub.components import ComponentType +from infrahub.core.registry import registry +from infrahub.core.timestamp import Timestamp +from infrahub.exceptions import InitializationError +from infrahub.message_bus import messages +from infrahub.worker import WORKER_IDENTITY + +if TYPE_CHECKING: + from infrahub.services import InfrahubServices + +PRIMARY_API_SERVER = "workers:primary:api_server" +WORKER_MATCH = re.compile(r":worker:([^:]+)") + + +class InfrahubComponent: + def __init__(self) -> None: + self._service: Optional[InfrahubServices] = None + + @property + def service(self) -> InfrahubServices: + if not self._service: + raise InitializationError("Component has not been initialized") + + return self._service + + @property + def component_names(self) -> list[str]: + names = [] + if self.service.component_type == ComponentType.API_SERVER: + names.append("api_server") + elif self.service.component_type == ComponentType.GIT_AGENT: + names.append("git_agent") + return names + + async def initialize(self, service: InfrahubServices) -> None: + """Initialize the Message bus""" + self._service = service + + async def is_primary_api(self) -> bool: + primary_identity = await self.service.cache.get(PRIMARY_API_SERVER) + return primary_identity == WORKER_IDENTITY + + async def list_workers(self, branch: str, schema_hash: bool) -> list[WorkerInfo]: + keys = await self.service.cache.list_keys(filter_pattern="workers:*") + + workers: dict[str, WorkerInfo] = {} + for key in keys: + if match := WORKER_MATCH.search(key): + identity = match.group(1) + if identity not in workers: + workers[identity] = WorkerInfo(identity=identity) + workers[identity].add_key(key=key) + + response = [] + schema_hash_keys = [] + if schema_hash: + schema_hash_keys = [key for key in keys if f":schema_hash:branch:{branch}" in key] + response = await self.service.cache.get_values(keys=schema_hash_keys) + + for key, value in zip(schema_hash_keys, response): + if match := WORKER_MATCH.search(key): + identity = match.group(1) + workers[identity].add_value(key=key, value=value) + return list(workers.values()) + + async def refresh_schema_hash(self, branches: Optional[list[str]] = None) -> None: + branches = branches or list(registry.branch.keys()) + for branch in branches: + schema_branch = registry.schema.get_schema_branch(name=branch) + hash_value = schema_branch.get_hash() + for component in self.component_names: + await self.service.cache.set( + key=f"workers:schema_hash:branch:{branch}:{component}:worker:{WORKER_IDENTITY}", + value=hash_value, + expires=7200, + ) + + async def refresh_heartbeat(self) -> None: + for component in self.component_names: + await self.service.cache.set( + key=f"workers:active:{component}:worker:{WORKER_IDENTITY}", value=Timestamp().to_string(), expires=15 + ) + if self.service.component_type == ComponentType.API_SERVER: + await self._set_primary_api_server() + await self.service.cache.set( + key=f"workers:worker:{WORKER_IDENTITY}", value=Timestamp().to_string(), expires=7200 + ) + + async def _set_primary_api_server(self) -> None: + result = await self.service.cache.set( + key=PRIMARY_API_SERVER, value=WORKER_IDENTITY, expires=15, not_exists=True + ) + if result: + await self.service.send(message=messages.EventWorkerNewPrimaryAPI(worker_id=WORKER_IDENTITY)) + else: + self.service.log.debug("Primary node already set") + primary_id = await self.service.cache.get(key=PRIMARY_API_SERVER) + if primary_id == WORKER_IDENTITY: + self.service.log.debug("Primary node set but same as ours, refreshing lifetime") + await self.service.cache.set(key=PRIMARY_API_SERVER, value=WORKER_IDENTITY, expires=15) + + +class WorkerInfo: + def __init__(self, identity: str) -> None: + self.id = identity + self.active = False + self._schema_hash: Optional[str] = None + + @property + def schema_hash(self) -> Optional[str]: + """Return schema hash provided that the worker is active.""" + if self.active: + return self._schema_hash + + return None + + def add_key(self, key: str) -> None: + if "workers:active:" in key: + self.active = True + + def add_value(self, key: str, value: Optional[str] = None) -> None: + if ":schema_hash:" in key: + self._schema_hash = value + + def to_dict(self) -> dict[str, Any]: + return {"id": self.id, "active": self.active, "schema_hash": self.schema_hash} diff --git a/backend/infrahub/services/scheduler.py b/backend/infrahub/services/scheduler.py index 53cff12663..0720463453 100644 --- a/backend/infrahub/services/scheduler.py +++ b/backend/infrahub/services/scheduler.py @@ -7,7 +7,7 @@ from infrahub import config from infrahub.components import ComponentType -from infrahub.tasks.keepalive import refresh_api_server_components +from infrahub.tasks.keepalive import refresh_heartbeat from infrahub.tasks.recurring import resync_repositories, trigger_branch_refresh if TYPE_CHECKING: @@ -32,14 +32,13 @@ async def initialize(self, service: InfrahubServices) -> None: self.service = service self.running = config.SETTINGS.miscellaneous.start_background_runner + # Add some randomness to the interval to avoid having all workers pulling the latest update at the same time + random_number = random.randint(30, 60) if self.service.component_type == ComponentType.API_SERVER: - # Add some randomness to the interval to avoid having all workers pulling the latest update at the same time - random_number = 30 + random.randint(1, 4) - 2 - schedules = [ - Schedule(name="refresh_api_components", interval=10, function=refresh_api_server_components), + Schedule(name="refresh_api_components", interval=10, function=refresh_heartbeat, start_delay=0), Schedule( - name="branch_refresh", interval=10, function=trigger_branch_refresh, start_delay=random_number + name="branch_refresh", interval=900, function=trigger_branch_refresh, start_delay=random_number ), ] self.schedules.extend(schedules) @@ -52,6 +51,14 @@ async def initialize(self, service: InfrahubServices) -> None: function=resync_repositories, ) ) + if self.service.component_type == ComponentType.GIT_AGENT: + schedules = [ + Schedule(name="refresh_components", interval=10, function=refresh_heartbeat), + Schedule( + name="branch_refresh", interval=900, function=trigger_branch_refresh, start_delay=random_number + ), + ] + self.schedules.extend(schedules) await self.start_schedule() diff --git a/backend/infrahub/tasks/keepalive.py b/backend/infrahub/tasks/keepalive.py index 8818f153b7..7ec78317fe 100644 --- a/backend/infrahub/tasks/keepalive.py +++ b/backend/infrahub/tasks/keepalive.py @@ -2,15 +2,11 @@ from typing import TYPE_CHECKING -from infrahub.core.timestamp import Timestamp -from infrahub.message_bus import messages -from infrahub.worker import WORKER_IDENTITY - if TYPE_CHECKING: from infrahub.services import InfrahubServices -async def refresh_api_server_components(service: InfrahubServices) -> None: +async def refresh_heartbeat(service: InfrahubServices) -> None: """Update API server worker information in the cache The goal of this job is to provide an updated list of API server workers in the cache, it will have a freshness @@ -20,15 +16,4 @@ async def refresh_api_server_components(service: InfrahubServices) -> None: that only one worker is responsible for scheduling specific tasks. """ service.log.debug("Refreshing API workers in cache") - - await service.cache.set(key=f"api_server:{WORKER_IDENTITY}", value=str(Timestamp()), expires=15) - - result = await service.cache.set(key="primary_api_server_id", value=WORKER_IDENTITY, expires=15, not_exists=True) - if result: - await service.send(message=messages.EventWorkerNewPrimaryAPI(worker_id=WORKER_IDENTITY)) - else: - service.log.debug("Primary node already set") - primary_id = await service.cache.get(key="primary_api_server_id") - if primary_id == WORKER_IDENTITY: - service.log.debug("Primary node set but same as ours, refreshing lifetime") - await service.cache.set(key="primary_api_server_id", value=WORKER_IDENTITY, expires=15) + await service.component.refresh_heartbeat() diff --git a/backend/infrahub/tasks/recurring.py b/backend/infrahub/tasks/recurring.py index a18d5230bc..de448cac18 100644 --- a/backend/infrahub/tasks/recurring.py +++ b/backend/infrahub/tasks/recurring.py @@ -17,13 +17,12 @@ async def trigger_branch_refresh(service: InfrahubServices) -> None: async with service.database.start_session() as db: await refresh_branches(db=db) + await service.component.refresh_schema_hash() + async def resync_repositories(service: InfrahubServices) -> None: - primary_identity = await service.cache.get("primary_api_server_id") - if primary_identity == WORKER_IDENTITY: - service.log.debug( - f"Primary identity={primary_identity} matches my identity={WORKER_IDENTITY}. Posting sync of repo message." - ) + if await service.component.is_primary_api(): + service.log.debug(f"Primary identity matches my identity={WORKER_IDENTITY}. Posting sync of repo message.") message = messages.RequestGitSync() message.assign_expiration(config.SETTINGS.git.sync_interval) await service.send(message=message) diff --git a/backend/infrahub/tasks/registry.py b/backend/infrahub/tasks/registry.py index 1d17fc5533..e3c186fe44 100644 --- a/backend/infrahub/tasks/registry.py +++ b/backend/infrahub/tasks/registry.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from infrahub import lock from infrahub.core import registry @@ -12,7 +12,7 @@ log = get_logger() -async def refresh_branches(db: InfrahubDatabase): +async def refresh_branches(db: InfrahubDatabase) -> None: """Pull all the branches from the database and update the registry. If a branch is already present with a different value for the hash @@ -20,17 +20,20 @@ async def refresh_branches(db: InfrahubDatabase): """ async with lock.registry.local_schema_lock(): - branches: List[Branch] = await registry.branch_object.get_list(db=db) + branches = await registry.branch_object.get_list(db=db) active_branches = [branch.name for branch in branches] for new_branch in branches: if new_branch.name in registry.branch: branch_registry: Branch = registry.branch[new_branch.name] - if branch_registry.schema_hash and branch_registry.schema_hash.main != new_branch.schema_hash.main: + if ( + branch_registry.schema_hash + and branch_registry.schema_hash.main != new_branch.active_schema_hash.main + ): log.info( "New hash detected", branch=new_branch.name, hash_current=branch_registry.schema_hash.main, - hash_new=new_branch.schema_hash.main, + hash_new=new_branch.active_schema_hash.main, worker=WORKER_IDENTITY, ) registry.branch[new_branch.name] = new_branch diff --git a/backend/infrahub/test_data/dataset01.py b/backend/infrahub/test_data/dataset01.py index 89252b8506..cfc2dacaf2 100644 --- a/backend/infrahub/test_data/dataset01.py +++ b/backend/infrahub/test_data/dataset01.py @@ -95,7 +95,7 @@ async def load_data(db: InfrahubDatabase, nbr_devices: int = None): # roles_dict = {} log.info("Creating Site") - site_hq = await Node.init(db=db, schema="InfraSite") + site_hq = await Node.init(db=db, schema="LocationSite") await site_hq.new(db=db, name="HQ") await site_hq.save(db=db) @@ -132,7 +132,7 @@ async def load_data(db: InfrahubDatabase, nbr_devices: int = None): ) await intf.save(db=db) - ip = await Node.init(db=db, schema="InfraIPAddress") + ip = await Node.init(db=db, schema="IpamIPAddress") await ip.new(db=db, interface=intf, address=f"192.168.{idx}.10/24") await ip.save(db=db) @@ -162,6 +162,6 @@ async def load_data(db: InfrahubDatabase, nbr_devices: int = None): await intf.save(db=db) if intf_idx == 1: - ip = await Node.init(db=db, schema="InfraIPAddress") + ip = await Node.init(db=db, schema="IpamIPAddress") await ip.new(db=db, interface=intf, address=f"192.168.{idx}.{intf_idx}/24") await ip.save(db=db) diff --git a/backend/infrahub/test_data/dataset02.py b/backend/infrahub/test_data/dataset02.py deleted file mode 100644 index 9034fb742d..0000000000 --- a/backend/infrahub/test_data/dataset02.py +++ /dev/null @@ -1,211 +0,0 @@ -import re -from collections import defaultdict - -from infrahub.log import get_logger - -# flake8: noqa -# pylint: skip-file - -ROLES = ["spine", "leaf", "firewall", "server", "uplink"] - -DEVICES = ( - ("spine1", "active", "7050X3", "profile1", "spine", ["red", "green"]), - ("spine2", "active", "QFX5100", "profile1", "spine", ["red", "blue", "green"]), - ("spine3", "drained", "QFX5100", "profile1", "spine", ["red", "blue"]), - ("spine4", "active", "7050X3", "profile1", "spine", ["blue", "green"]), - ("leaf1", "active", "QFX5100", None, "leaf", ["red", "blue"]), - ("leaf2", "maintenance", "QFX5100", "profile2", "leaf", ["red", "blue", "green"]), - ("leaf3", "active", "7050X3", None, "leaf", ["blue", "green"]), -) - -INTERFACE_NAMES = { - "QFX5100": ["xe-0/0/0", "xe-0/0/1", "xe-0/0/2", "xe-0/0/3", "xe-0/0/4", "xe-0/0/5"], - "7050X3": ["Ethernet1", "Ethernet2", "Ethernet3", "Ethernet4", "Ethernet5"], -} - -INTERFACE_ROLES = { - "spine": ["leaf", "leaf", "leaf", "leaf", "leaf", "leaf"], - "leaf": ["spine", "spine", "spine", "spine", "server", "server"], -} - -INTERFACE_OBJS = defaultdict(list) - -PERMS = ( - ("device.name.all.read", "READ", "device.name.all"), - ("device.name.all.write", "WRITE", "device.name.all"), - ("device.status.all.read", "READ", "device.status.all"), - ("device.status.all.write", "WRITE", "device.status.all"), - ("device.description.all.read", "READ", "device.description.all"), - ("device.description.all.write", "WRITE", "device.description.all"), -) - -GROUPS = ( - ( - "Network Engineer", - "network-engineer", - ( - "device.name.all.read", - "device.status.all.read", - "device.description.all.read", - "device.name.all.write", - "device.status.all.write", - "device.description.all.write", - ), - ), - ( - "Operator", - "operator", - ( - "device.name.all.read", - "device.status.all.read", - "device.description.all.read", - "device.description.all.write", - ), - ), - ("Manager", "manager", ("device.name.all.read", "device.status.all.read", "device.description.all.read")), -) - -ACCOUNTS = ( - ("site-builder", "Script", ("operator",)), - # ("nelly", "User", ("network-engineer", "operator")), - # ("mary", "User", ("manager",)), -) - -log = get_logger() - - -def load_data(): - # ------------------------------------------ - # Create User Accounts and Groups - # ------------------------------------------ - groups_dict = {} - accounts_dict = {} - tags_dict = {} - - # for perm in PERMS: - # obj = Permission.init(name=perm[0], type=perm[1]) - # obj.save() - # perms_dict[perm[0]] = obj - - # # Associate the permissions with the right attr group - # grp = registry.attr_group[perm[2]] - # add_relationship(obj, grp, f"CAN_{obj.type.value}") - - # log.info(f"Permission Created: {obj.name.value}") - - # # Import the existing groups into the dict - # groups = Group.get_list() - # for group in groups: - # groups_dict[group.slug.value] = group - - for group in GROUPS: - obj = Group.init(label=group[0], name=group[1]) - obj.save() - groups_dict[group[1]] = obj - log.info(f"Group Created: {obj.label.value}") - - # for perm_name in group[2]: - # perm = perms_dict[perm_name] - - # # Associate the permissions with the right attr group - # add_relationship(obj, perm, f"HAS_PERM") - - for account in ACCOUNTS: - obj = Account.init(name=account[0], type=account[1]) - obj.save() - accounts_dict[account[0]] = obj - - for group in account[2]: - groups_dict[group].add_account(obj) - - log.info(f"Account Created: {obj.name.value}") - - # ------------------------------------------ - # Create Status, Role & DeviceProfile - # ------------------------------------------ - statuses_dict = {} - roles_dict = {} - - log.info("Creating Roles & Status") - for role in ROLES: - obj = Role.init(label=role.title(), slug=role) - obj.save() - roles_dict[role] = obj - log.info(f"Created Role: {role}") - - STATUSES = ["active", "provisioning", "maintenance", "drained"] - for status in STATUSES: - obj = Status.init(label=status.title(), slug=status) - obj.save() - statuses_dict[status] = obj - log.info(f"Created Status: {status}") - - TAGS = ["blue", "green", "red"] - for tag in TAGS: - obj = Tag.init(name=tag) - obj.save() - tags_dict[tag] = obj - log.info(f"Created Tag: {tag}") - - active_status = statuses_dict["active"] - site_builder_account = accounts_dict["site-builder"] - - log.info("Creating Device") - for idx, device in enumerate(DEVICES): - status_id = statuses_dict[device[1]].id - role_id = roles_dict[device[4]].id - device_type = device[2] - - obj = Device.init(name=device[0], status=status_id, type=device[2], role=role_id, source=site_builder_account) - - # Connect tags - for tag_name in device[5]: - tag = tags_dict[tag_name] - obj.tags.add_peer(tag) - - obj.save() - log.info(f"- Created Device: {device[0]}") - - # # Add a special interface for spine1 - # if device[0] == "spine1": - # intf = Interface.init(device="spine1", name="Loopback0", enabled=True) - # intf.save() - - # ip = IPAddress.init(interface=intf.uuid, address=f"192.168.{idx}.10/24") - # ip.save() - - for intf_idx, intf_name in enumerate(INTERFACE_NAMES[device_type]): - device_id = str(re.search(r"\d+", device[0]).group()) - - intf_role = INTERFACE_ROLES[device[4]][intf_idx] - intf_role_id = roles_dict[intf_role].id - - intf = Interface.init( - device=obj.uuid, - name=intf_name, - speed=10000, - enabled=True, - status=active_status.id, - role=intf_role_id, - source=site_builder_account, - ) - intf.save() - - INTERFACE_OBJS[device[0]].append(intf) - - device_id = str(re.search(r"\d+", device[0]).group()) - - if "spine" in device[0]: - network = f"192.168.{device_id}{intf_idx+1}.1/30" - elif "leaf" in device[0]: - network = f"192.168.{intf_idx+1}{device_id}.2/30" - - spine_name = f"spine{intf_idx+1}" - if spine_name in INTERFACE_OBJS.keys(): - intf.connected_interface.add_peer(INTERFACE_OBJS[spine_name][int(device_id) - 1]) - if intf_idx != 0: - intf.description.value = f"Connected to {spine_name}" - intf.save() - - ip = IPAddress.init(interface=intf.uuid, address=network, source=site_builder_account) - ip.save() diff --git a/backend/infrahub/test_data/gen_connected_nodes.py b/backend/infrahub/test_data/gen_connected_nodes.py new file mode 100644 index 0000000000..d258f02d84 --- /dev/null +++ b/backend/infrahub/test_data/gen_connected_nodes.py @@ -0,0 +1,84 @@ +import random +import uuid + +from infrahub.core import registry +from infrahub.core.constants import InfrahubKind +from infrahub.core.node import Node +from infrahub.log import get_logger + +from .shared import DataGenerator + +log = get_logger() + + +class GenerateConnectedNodes(DataGenerator): + async def load_data(self, nbr_tags: int = 50, nbr_repository: int = 100, nbr_query: int = 1000): + """Generate a large number of GraphQLQuery associated with some Tags and some Repositorie.""" + default_branch = await registry.get_branch(db=self.db) + + if self.progress: + task_tag = self.progress.add_task("Loading TAG", total=nbr_tags) + task_repo = self.progress.add_task("Loading REPOSITORY", total=nbr_repository) + task_query = self.progress.add_task("Loading QUERY", total=nbr_query) + + tags = {} + repository = {} + gqlquery = {} + + tag_schema = registry.schema.get_node_schema(name=InfrahubKind.TAG, branch=default_branch) + repository_schema = registry.schema.get_node_schema(name=InfrahubKind.REPOSITORY, branch=default_branch) + gqlquery_schema = registry.schema.get_node_schema(name=InfrahubKind.GRAPHQLQUERY, branch=default_branch) + + # ------------------------------------------------------------------------------------- + # TAG + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_tags): + short_id = str(uuid.uuid4())[:8] + tag_name = f"tag-{short_id}" + obj = await Node.init(db=self.db, schema=tag_schema, branch=default_branch) + await obj.new(db=self.db, name=tag_name) + batch.add(task=self.save_obj, obj=obj) + tags[tag_name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_tag) + + # ------------------------------------------------------------------------------------- + # REPOSITORY + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_repository): + short_id = str(uuid.uuid4())[:8] + repo_name = f"repository-{short_id}" + obj = await Node.init(db=self.db, schema=repository_schema, branch=default_branch) + random_tags = [tags[tag] for tag in random.choices(list(tags.keys()), k=5)] + await obj.new(db=self.db, name=repo_name, location=f"git://{repo_name}", tags=random_tags) + batch.add(task=self.save_obj, obj=obj) + repository[repo_name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_repo) + + # ------------------------------------------------------------------------------------- + # GRAPHQL_QUERY + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_query): + short_id = str(uuid.uuid4())[:8] + + random_tags = [tags[tag] for tag in random.choices(list(tags.keys()), k=5)] + random_repo = repository[random.choice(list(repository.keys()))] + + name = f"query-{nbr_query:04}-{short_id}" + query_str = "query CoreQuery%s { tag { name { value }}}" % f"{nbr_query:04}" + obj = await Node.init(db=self.db, schema=gqlquery_schema, branch=default_branch) + await obj.new(db=self.db, name=name, query=query_str, tags=random_tags, repository=random_repo) + batch.add(task=self.save_obj, obj=obj, db=self.db) + gqlquery[name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_query) diff --git a/backend/infrahub/test_data/gen_isolated_node.py b/backend/infrahub/test_data/gen_isolated_node.py new file mode 100644 index 0000000000..8dcf06360a --- /dev/null +++ b/backend/infrahub/test_data/gen_isolated_node.py @@ -0,0 +1,62 @@ +import uuid + +from infrahub.core import registry +from infrahub.core.constants import InfrahubKind +from infrahub.core.node import Node +from infrahub.log import get_logger + +from .shared import DataGenerator + +log = get_logger() + + +class GenerateIsolatedNodes(DataGenerator): + async def load_data( + self, + nbr_tags: int = 100, + nbr_repository: int = 100, + ): + """Generate a large number of Tags and Repositories""" + default_branch = await registry.get_branch(db=self.db) + + if self.progress: + task_tag = self.progress.add_task("Loading TAG", total=nbr_tags) + task_repo = self.progress.add_task("Loading REPOSITORY", total=nbr_repository) + + tags = {} + repository = {} + + tag_schema = registry.schema.get_node_schema(name=InfrahubKind.TAG, branch=default_branch) + repository_schema = registry.schema.get_node_schema(name=InfrahubKind.REPOSITORY, branch=default_branch) + + # ------------------------------------------------------------------------------------- + # TAG + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_tags): + short_id = str(uuid.uuid4())[:8] + tag_name = f"tag-{short_id}" + obj = await Node.init(db=self.db, schema=tag_schema, branch=default_branch) + await obj.new(db=self.db, name=tag_name) + batch.add(task=self.save_obj, obj=obj) + tags[tag_name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_tag) + + # ------------------------------------------------------------------------------------- + # REPOSITORY + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_repository): + short_id = str(uuid.uuid4())[:8] + repo_name = f"repository-{short_id}" + obj = await Node.init(db=self.db, schema=repository_schema, branch=default_branch) + await obj.new(db=self.db, name=repo_name, location=f"git://{repo_name}") + batch.add(task=self.save_obj, obj=obj) + repository[repo_name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_repo) diff --git a/backend/infrahub/test_data/gen_node_profile_node.py b/backend/infrahub/test_data/gen_node_profile_node.py new file mode 100644 index 0000000000..e094d46aae --- /dev/null +++ b/backend/infrahub/test_data/gen_node_profile_node.py @@ -0,0 +1,55 @@ +import random +import uuid + +from infrahub.core import registry +from infrahub.core.node import Node +from infrahub.log import get_logger + +from .shared import DataGenerator + +log = get_logger() + + +class ProfileAttribute(DataGenerator): + async def load_data( + self, + nbr_person: int = 100, + ): + """Generate a large number of Tags and Repositories""" + default_branch = await registry.get_branch(db=self.db) + + if self.progress: + task_person = self.progress.add_task("Loading PERSON", total=nbr_person) + + persons = {} + + person_profile_schema = registry.schema.get(name="ProfileTestPerson", branch=default_branch) + profile1 = await Node.init(db=self.db, schema=person_profile_schema, branch=default_branch) + await profile1.new(db=self.db, profile_name="profile1", profile_priority=1000, height=180) + await profile1.save(db=self.db) + profile2 = await Node.init(db=self.db, schema=person_profile_schema, branch=default_branch) + await profile2.new(db=self.db, profile_name="profile2", profile_priority=1200, height=150) + await profile2.save(db=self.db) + + person_schema = registry.schema.get_node_schema(name="TestPerson", branch=default_branch) + # ------------------------------------------------------------------------------------- + # TAG + # ------------------------------------------------------------------------------------- + batch = self.create_batch() + for _ in range(nbr_person): + short_id = str(uuid.uuid4())[:8] + rand_height = random.randrange(200) + name = f"nbr_person-{short_id}" + obj = await Node.init(db=self.db, schema=person_schema, branch=default_branch) + + profile = profile2 + if rand_height % 2 == 0: + profile = profile1 + + await obj.new(db=self.db, name=name, profiles=[profile]) + batch.add(task=self.save_obj, obj=obj) + persons[name] = obj + + async for _ in batch.execute(): + if self.progress: + self.progress.advance(task_person) diff --git a/backend/infrahub/test_data/shared.py b/backend/infrahub/test_data/shared.py new file mode 100644 index 0000000000..9ad5acb16c --- /dev/null +++ b/backend/infrahub/test_data/shared.py @@ -0,0 +1,67 @@ +import asyncio +from dataclasses import dataclass +from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple + +from infrahub_sdk.batch import BatchTask, InfrahubBatch +from rich.progress import Progress + +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase + + +@dataclass +class CallbackTask: + name: str + task: Callable[[Any], Awaitable[Any]] + args: Tuple[Any, ...] + kwargs: Dict[str, Any] + + +class DataGeneratorBatch(InfrahubBatch): + def __init__( + self, + callbacks: Optional[List[CallbackTask]] = None, + callback_frequency: int = 10, + semaphore: Optional[asyncio.Semaphore] = None, + max_concurrent_execution: int = 5, + return_exceptions: bool = False, + ): + super().__init__( + semaphore=semaphore, max_concurrent_execution=max_concurrent_execution, return_exceptions=return_exceptions + ) + self.callbacks: List[CallbackTask] = callbacks or [] + self.callback_frequency = callback_frequency + + def add(self, *args: Any, **kwargs: Any) -> None: + super().add(*args, **kwargs) + + if len(self._tasks) % self.callback_frequency == 0: + for callback in self.callbacks: + self._tasks.append(BatchTask(task=callback.task, args=callback.args, kwargs=callback.kwargs)) + + +class DataGenerator: + def __init__(self, db: InfrahubDatabase, concurrent_execution: int = 2, progress: Optional[Progress] = None): + self.db = db + self.concurrent_execution = concurrent_execution + self.progress = progress + self.callbacks: List[CallbackTask] = [] + + def add_callback(self, *args: Any, callback_name: str, **kwargs: Any) -> None: + self.callbacks.append(CallbackTask(name=callback_name, task=self.execute_db_task, args=args, kwargs=kwargs)) + + async def execute_db_task(self, task: Callable[[Any], Awaitable[Any]], **kwargs: Any) -> Any: + async with self.db.start_session() as dbs: + return await task(db=dbs, **kwargs) + + async def save_obj(self, obj: Node) -> Node: + async with self.db.start_session() as dbs: + async with dbs.start_transaction() as dbt: + await obj.save(db=dbt) + + return obj + + def create_batch(self) -> DataGeneratorBatch: + return DataGeneratorBatch( + max_concurrent_execution=self.concurrent_execution, return_exceptions=True, callbacks=self.callbacks + ) diff --git a/backend/infrahub/trace.py b/backend/infrahub/trace.py index 689b1f7aac..c332380b61 100644 --- a/backend/infrahub/trace.py +++ b/backend/infrahub/trace.py @@ -1,3 +1,5 @@ +import os + from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import ( OTLPSpanExporter as GRPCSpanExporter, @@ -10,9 +12,7 @@ from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter from opentelemetry.trace import StatusCode - -def get_tracer(name: str = "infrahub") -> trace.Tracer: - return trace.get_tracer(name) +from infrahub.worker import WORKER_IDENTITY def get_current_span_with_context() -> trace.Span: @@ -55,7 +55,7 @@ def add_span_exception(exception: Exception) -> None: def create_tracer_provider( - version: str, exporter_type: str, exporter_endpoint: str = None, exporter_protocol: str = None + service: str, version: str, exporter_type: str, exporter_endpoint: str = None, exporter_protocol: str = None ) -> TracerProvider: # Create a BatchSpanProcessor exporter based on the type if exporter_type == "console": @@ -70,8 +70,19 @@ def create_tracer_provider( else: raise ValueError("Exporter type unsupported by Infrahub") + extra_attributes = {} + if os.getenv("OTEL_RESOURCE_ATTRIBUTES"): + extra_attributes = dict(attr.split("=") for attr in os.getenv("OTEL_RESOURCE_ATTRIBUTES").split(",")) + # Resource can be required for some backends, e.g. Jaeger - resource = Resource(attributes={"service.name": "infrahub", "service.version": version}) + resource = Resource( + attributes={ + "service.name": service, + "service.version": version, + "worker.id": WORKER_IDENTITY, + **extra_attributes, + } + ) span_processor = BatchSpanProcessor(exporter) tracer_provider = TracerProvider(resource=resource) tracer_provider.add_span_processor(span_processor) @@ -80,16 +91,16 @@ def create_tracer_provider( def configure_trace( - version: str, exporter_type: str, exporter_endpoint: str = None, exporter_protocol: str = None + service: str, version: str, exporter_type: str, exporter_endpoint: str | None = None, exporter_protocol: str = None ) -> None: # Create a trace provider with the exporter tracer_provider = create_tracer_provider( + service=service, version=version, exporter_type=exporter_type, exporter_endpoint=exporter_endpoint, exporter_protocol=exporter_protocol, ) - tracer_provider.get_tracer(__name__) # Register the trace provider trace.set_tracer_provider(tracer_provider) diff --git a/backend/infrahub/types.py b/backend/infrahub/types.py index 80debd32d8..041495a268 100644 --- a/backend/infrahub/types.py +++ b/backend/infrahub/types.py @@ -11,7 +11,7 @@ if TYPE_CHECKING: from infrahub.core.attribute import BaseAttribute - from infrahub.graphql.mutations.attribute import BaseAttributeInput + from infrahub.graphql.mutations.attribute import BaseAttributeCreate, BaseAttributeUpdate from infrahub.graphql.types.attribute import BaseAttribute as BaseAttributeType DEFAULT_MODULE_ATTRIBUTE = "infrahub.core.attribute" @@ -22,7 +22,8 @@ class InfrahubDataType: label: str graphql_query: str - graphql_input: str + graphql_create: str + graphql_update: str graphql_filter: type graphql: type infrahub: str @@ -43,11 +44,18 @@ def get_infrahub_class(cls) -> Type[BaseAttribute]: return getattr(module, cls.infrahub) @classmethod - def get_graphql_input(cls) -> Type[BaseAttributeInput]: - if not isinstance(cls.graphql_input, str): - return cls.graphql_input + def get_graphql_create(cls) -> Type[BaseAttributeCreate]: + if not isinstance(cls.graphql_create, str): + return cls.graphql_create module = importlib.import_module(DEFAULT_MODULE_GRAPHQL_INPUT) - return getattr(module, cls.graphql_input) + return getattr(module, cls.graphql_create) + + @classmethod + def get_graphql_update(cls) -> Type[BaseAttributeUpdate]: + if not isinstance(cls.graphql_update, str): + return cls.graphql_update + module = importlib.import_module(DEFAULT_MODULE_GRAPHQL_INPUT) + return getattr(module, cls.graphql_update) @classmethod def get_graphql_type(cls) -> Type[BaseAttributeType]: @@ -83,7 +91,8 @@ class Default(InfrahubDataType): label: str = "Default" graphql = graphene.String graphql_query = "BaseAttribute" - graphql_input = "BaseAttributeInput" + graphql_create = "BaseAttributeCreate" + graphql_update = "BaseAttributeUpdate" graphql_filter = graphene.String infrahub = "BaseAttribute" @@ -92,7 +101,8 @@ class ID(InfrahubDataType): label: str = "ID" graphql = graphene.ID graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -101,7 +111,8 @@ class Text(InfrahubDataType): label: str = "Text" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -110,7 +121,8 @@ class TextArea(Text): label: str = "TextArea" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -119,7 +131,8 @@ class DateTime(InfrahubDataType): label: str = "DateTime" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -128,7 +141,8 @@ class Email(InfrahubDataType): label: str = "Email" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -137,7 +151,8 @@ class Password(InfrahubDataType): label: str = "Password" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -146,7 +161,8 @@ class HashedPassword(InfrahubDataType): label: str = "Password" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "HashedPassword" @@ -155,7 +171,8 @@ class URL(InfrahubDataType): label: str = "URL" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "URL" @@ -164,7 +181,8 @@ class File(InfrahubDataType): label: str = "File" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -173,7 +191,8 @@ class MacAddress(InfrahubDataType): label: str = "MacAddress" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -182,7 +201,8 @@ class Color(InfrahubDataType): label: str = "Color" graphql = graphene.String graphql_query = "TextAttributeType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "String" @@ -191,7 +211,8 @@ class Dropdown(InfrahubDataType): label: str = "Dropdown" graphql = graphene.String graphql_query = "DropdownType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "Dropdown" @@ -200,7 +221,8 @@ class Number(InfrahubDataType): label: str = "Number" graphql = graphene.Int graphql_query = "NumberAttributeType" - graphql_input = "NumberAttributeInput" + graphql_create = "NumberAttributeCreate" + graphql_update = "NumberAttributeUpdate" graphql_filter = graphene.Int infrahub = "Integer" @@ -209,7 +231,8 @@ class Bandwidth(InfrahubDataType): label: str = "Bandwidth" graphql = graphene.Int graphql_query = "NumberAttributeType" - graphql_input = "NumberAttributeInput" + graphql_create = "NumberAttributeCreate" + graphql_update = "NumberAttributeUpdate" graphql_filter = graphene.Int infrahub = "Integer" @@ -218,7 +241,8 @@ class IPHost(InfrahubDataType): label: str = "IPHost" graphql = graphene.String graphql_query = "IPHostType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "IPHost" @@ -227,7 +251,8 @@ class IPNetwork(InfrahubDataType): label: str = "IPNetwork" graphql = graphene.String graphql_query = "IPNetworkType" - graphql_input = "TextAttributeInput" + graphql_create = "TextAttributeCreate" + graphql_update = "TextAttributeUpdate" graphql_filter = graphene.String infrahub = "IPNetwork" @@ -236,7 +261,8 @@ class Boolean(InfrahubDataType): label: str = "Boolean" graphql = graphene.Boolean graphql_query = "CheckboxAttributeType" - graphql_input = "CheckboxAttributeInput" + graphql_create = "CheckboxAttributeCreate" + graphql_update = "CheckboxAttributeUpdate" graphql_filter = graphene.Boolean infrahub = "Boolean" @@ -245,7 +271,8 @@ class Checkbox(InfrahubDataType): label: str = "Checkbox" graphql = graphene.Boolean graphql_query = "CheckboxAttributeType" - graphql_input = "CheckboxAttributeInput" + graphql_create = "CheckboxAttributeCreate" + graphql_update = "CheckboxAttributeUpdate" graphql_filter = graphene.Boolean infrahub = "Boolean" @@ -254,7 +281,8 @@ class List(InfrahubDataType): label: str = "List" graphql = GenericScalar graphql_query = "ListAttributeType" - graphql_input = "ListAttributeInput" + graphql_create = "ListAttributeCreate" + graphql_update = "ListAttributeUpdate" graphql_filter = GenericScalar infrahub = "ListAttribute" @@ -263,7 +291,8 @@ class JSON(InfrahubDataType): label: str = "JSON" graphql = GenericScalar graphql_query = "JSONAttributeType" - graphql_input = "JSONAttributeInput" + graphql_create = "JSONAttributeCreate" + graphql_update = "JSONAttributeUpdate" graphql_filter = GenericScalar infrahub = "JSONAttribute" @@ -272,7 +301,8 @@ class Any(InfrahubDataType): label: str = "Any" graphql = GenericScalar graphql_query = "AnyAttributeType" - graphql_input = "AnyAttributeInput" + graphql_create = "AnyAttributeCreate" + graphql_update = "AnyAttributeUpdate" graphql_filter = GenericScalar infrahub = "AnyAttribute" diff --git a/backend/templates/attributeschema_imports.j2 b/backend/templates/attributeschema_imports.j2 index b413828631..c46c38bd78 100644 --- a/backend/templates/attributeschema_imports.j2 +++ b/backend/templates/attributeschema_imports.j2 @@ -1,7 +1,7 @@ from typing import Optional, Any, TYPE_CHECKING from infrahub.core.models import HashableModel -from infrahub.core.constants import HashableModelState +from infrahub.core.constants import AllowOverrideType, HashableModelState from infrahub.core.schema.dropdown import DropdownChoice # noqa: TCH001 if TYPE_CHECKING: diff --git a/backend/templates/relationshipschema_imports.j2 b/backend/templates/relationshipschema_imports.j2 index af5ae174fb..974c8e45a4 100644 --- a/backend/templates/relationshipschema_imports.j2 +++ b/backend/templates/relationshipschema_imports.j2 @@ -1,5 +1,5 @@ from typing import Optional -from infrahub.core.constants import BranchSupportType, RelationshipKind, RelationshipCardinality, RelationshipDirection, HashableModelState # noqa: TCH001 +from infrahub.core.constants import AllowOverrideType, BranchSupportType, RelationshipKind, RelationshipCardinality, RelationshipDeleteBehavior, RelationshipDirection, HashableModelState # noqa: TCH001 from infrahub.core.models import HashableModel from infrahub.core.schema.filter import FilterSchema # noqa: TCH001 diff --git a/backend/tests/adapters/cache.py b/backend/tests/adapters/cache.py new file mode 100644 index 0000000000..bce6ada8e2 --- /dev/null +++ b/backend/tests/adapters/cache.py @@ -0,0 +1,29 @@ +import re +from typing import Optional + +from infrahub.services.adapters.cache import InfrahubCache + + +class MemoryCache(InfrahubCache): + def __init__(self) -> None: + self.storage: dict[str, str] = {} + + async def delete(self, key: str) -> None: + self.storage.pop(key, None) + + async def get(self, key: str) -> Optional[str]: + return self.storage.get(key) + + async def get_values(self, keys: list[str]) -> list[Optional[str]]: + return [await self.get(key) for key in keys] + + async def list_keys(self, filter_pattern: str) -> list[str]: + regex_pattern = f'^{filter_pattern.replace("*", ".*").replace("?", ".")}$' + compiled_pattern = re.compile(regex_pattern) + return [key for key in self.storage.keys() if compiled_pattern.match(key)] + + async def set( + self, key: str, value: str, expires: Optional[int] = None, not_exists: bool = False + ) -> Optional[bool]: + self.storage[key] = value + return True diff --git a/backend/tests/benchmark/test_graphql_query.py b/backend/tests/benchmark/test_graphql_query.py index 142222f037..e2d3d41c8f 100644 --- a/backend/tests/benchmark/test_graphql_query.py +++ b/backend/tests/benchmark/test_graphql_query.py @@ -77,7 +77,7 @@ def test_query_one_model(exec_async, aio_benchmark, db: InfrahubDatabase, defaul db=db, include_mutation=False, include_subscription=False, branch=default_branch ) - for _ in range(0, NBR_WARMUP): + for _ in range(NBR_WARMUP): exec_async( graphql, schema=gql_params.schema, @@ -127,7 +127,7 @@ def test_query_rel_many(exec_async, aio_benchmark, db: InfrahubDatabase, default db=db, include_mutation=False, include_subscription=False, branch=default_branch ) - for _ in range(0, NBR_WARMUP): + for _ in range(NBR_WARMUP): exec_async( graphql, schema=gql_params.schema, @@ -177,7 +177,7 @@ def test_query_rel_one(exec_async, aio_benchmark, db: InfrahubDatabase, default_ db=db, include_mutation=False, include_subscription=False, branch=default_branch ) - for _ in range(0, NBR_WARMUP): + for _ in range(NBR_WARMUP): exec_async( graphql, schema=gql_params.schema, diff --git a/backend/tests/benchmark/test_load_node_to_db.py b/backend/tests/benchmark/test_load_node_to_db.py new file mode 100644 index 0000000000..01f284eabc --- /dev/null +++ b/backend/tests/benchmark/test_load_node_to_db.py @@ -0,0 +1,31 @@ +from infrahub.core import registry +from infrahub.core.schema import ( + NodeSchema, + SchemaRoot, + internal_schema, +) +from infrahub.core.schema_manager import SchemaManager +from infrahub.database import InfrahubDatabase + + +def test_load_node_to_db_node_schema(aio_benchmark, db: InfrahubDatabase, default_branch): + registry.schema = SchemaManager() + registry.schema.register_schema(schema=SchemaRoot(**internal_schema), branch=default_branch.name) + + SCHEMA = { + "name": "Criticality", + "namespace": "Builtin", + "default_filter": "name__value", + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + {"name": "level", "kind": "Number"}, + {"name": "color", "kind": "Text", "default_value": "#444444"}, + {"name": "description", "kind": "Text", "optional": True}, + ], + "relationships": [ + {"name": "others", "peer": "BuiltinCriticality", "optional": True, "cardinality": "many"}, + ], + } + node = NodeSchema(**SCHEMA) # type: ignore[arg-type] + + aio_benchmark(registry.schema.load_node_to_db, node=node, db=db, branch=default_branch) diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 1ce06af2ed..b3fa4f6bae 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -1,6 +1,5 @@ import asyncio import importlib -import json import os import sys from pathlib import Path @@ -18,13 +17,16 @@ from infrahub.core.initialization import ( create_default_branch, create_global_branch, + create_ipam_namespace, create_root_node, ) +from infrahub.core.node import Node from infrahub.core.schema import ( SchemaRoot, core_models, internal_schema, ) +from infrahub.core.schema.definitions.core import core_profile_schema_definition from infrahub.core.schema_manager import SchemaBranch, SchemaManager from infrahub.core.utils import delete_all_nodes from infrahub.database import InfrahubDatabase, get_db @@ -57,6 +59,11 @@ def pytest_configure(config): setattr(config.option, "markexpr", markexpr) +@pytest.fixture(scope="session", autouse=True) +def add_tracker(): + os.environ["PYTEST_RUNNING"] = "true" + + @pytest.fixture(scope="session") def event_loop(): """Overrides pytest default function scoped event loop""" @@ -94,6 +101,15 @@ async def default_branch(reset_registry, local_storage_dir, empty_database, db: return branch +@pytest.fixture +async def default_ipnamespace(db: InfrahubDatabase, register_core_models_schema) -> Optional[Node]: + if not registry._default_ipnamespace: + ip_namespace = await create_ipam_namespace(db=db) + registry.default_ipnamespace = ip_namespace.id + return ip_namespace + return None + + @pytest.fixture def local_storage_dir(tmp_path) -> str: storage_dir = os.path.join(str(tmp_path), "storage") @@ -158,20 +174,13 @@ async def data_schema(db: InfrahubDatabase, default_branch: Branch) -> None: { "name": "Owner", "namespace": "Lineage", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], }, { "name": "Source", "description": "Any Entities that stores or produces data.", "namespace": "Lineage", - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], }, + core_profile_schema_definition, ] } @@ -228,9 +237,9 @@ async def car_person_schema_unregistered(db: InfrahubDatabase, node_group_schema "branch": BranchSupportType.AWARE.value, "attributes": [ {"name": "name", "kind": "Text", "unique": True}, - {"name": "nbr_seats", "kind": "Number"}, - {"name": "color", "kind": "Text", "default_value": "#444444", "max_length": 7}, - {"name": "is_electric", "kind": "Boolean"}, + {"name": "nbr_seats", "kind": "Number", "optional": True}, + {"name": "color", "kind": "Text", "default_value": "#444444", "max_length": 7, "optional": True}, + {"name": "is_electric", "kind": "Boolean", "optional": True}, { "name": "transmission", "kind": "Text", @@ -323,13 +332,14 @@ async def node_group_schema(db: InfrahubDatabase, default_branch: Branch, data_s def tmp_path_module_scope() -> Generator[str, None, None]: """Fixture similar to tmp_path but with scope=module""" with TemporaryDirectory() as tmpdir: + directory = tmpdir if sys.platform == "darwin" and tmpdir.startswith("/var/"): # On Mac /var is symlinked to /private/var. TemporaryDirectory uses the /var prefix # however when using 'git worktree list --porcelain' the path is returned with # /prefix/var and InfrahubRepository fails to initialize the repository as the # relative path of the repository isn't handled correctly - tmpdir = f"/private{tmpdir}" - yield tmpdir + directory = f"/private{tmpdir}" + yield directory @pytest.fixture(scope="module") @@ -366,7 +376,7 @@ def add_mock_reply(self, response: InfrahubResponse): async def rpc(self, message: InfrahubMessage, response_class: type[ResponseClass]) -> ResponseClass: self.messages.append(message) response = self.response.pop() - data = json.loads(response.body) + data = ujson.loads(response.body) return response_class(**data) diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/.infrahub.yml b/backend/tests/fixtures/repos/car-dealership/initial__main/.infrahub.yml index 9790c12830..7eaf9508bf 100644 --- a/backend/tests/fixtures/repos/car-dealership/initial__main/.infrahub.yml +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/.infrahub.yml @@ -1,9 +1,15 @@ +# yaml-language-server: $schema=https://schema.infrahub.app/python-sdk/repository-config/develop.json --- check_definitions: - name: "car_description_check" file_path: "checks/car_overview.py" class_name: "CarDescription" + - name: "owner_age_check" + file_path: "checks/car_owner_age.py" + parameters: + owner: "name__value" + targets: "people" jinja2_transforms: - name: person_with_cars @@ -19,3 +25,18 @@ artifact_definitions: content_type: "text/plain" targets: "people" transformation: "person_with_cars" + +generator_definitions: + - name: cartags + file_path: "generators/cartags.py" + targets: people + query: cartags + parameters: + name: "name__value" + - name: cartags_convert_response + file_path: "generators/cartags_convert_response.py" + targets: people + query: cartags + convert_query_response: true + parameters: + name: "name__value" diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.gql b/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.gql new file mode 100644 index 0000000000..442cf05330 --- /dev/null +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.gql @@ -0,0 +1,14 @@ +query CarOwnerAge($owner: String!) { + TestingPerson(name__value: $owner) { + edges { + node { + age { + value + } + cars { + count + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.py b/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.py new file mode 100644 index 0000000000..d1a2f1ed43 --- /dev/null +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/checks/car_owner_age.py @@ -0,0 +1,15 @@ +from infrahub_sdk.checks import InfrahubCheck + + +class Check(InfrahubCheck): + query = "car_owner_age" + + def validate(self, data: dict) -> None: + owner = self.params["owner"] + person = data["TestingPerson"]["edges"][0]["node"] + number_of_cars = person["cars"]["count"] + age = person["age"]["value"] + if number_of_cars and age < 18: + self.log_error(message=f"{owner} ({age}) is very young to own {number_of_cars} car(s)!") + else: + self.log_info(message=f"Check passed for {owner}, owner of {number_of_cars} car(s)") diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/generators/__init__.py b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.gql b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.gql new file mode 100644 index 0000000000..55e20092f6 --- /dev/null +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.gql @@ -0,0 +1,19 @@ +query CarOwner($name: String!) { + TestingPerson(name__value: $name) { + edges { + node @expand { + cars { + edges { + node { + __typename + id + name { + value + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.py b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.py new file mode 100644 index 0000000000..0a93f3b689 --- /dev/null +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags.py @@ -0,0 +1,15 @@ +from infrahub_sdk.generator import InfrahubGenerator + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + owner = data["TestingPerson"]["edges"][0]["node"] + owner_name: str = owner["name"]["value"] + for car in owner["cars"]["edges"]: + car_name: str = car["node"]["name"]["value"] + payload = { + "name": f"{owner_name.lower()}-{car_name.lower()}", + "description": "Tag", + } + obj = await self.client.create(kind="BuiltinTag", data=payload) + await obj.save(allow_upsert=True) diff --git a/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags_convert_response.py b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags_convert_response.py new file mode 100644 index 0000000000..76d129d298 --- /dev/null +++ b/backend/tests/fixtures/repos/car-dealership/initial__main/generators/cartags_convert_response.py @@ -0,0 +1,14 @@ +from infrahub_sdk.generator import InfrahubGenerator + + +class Generator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + owner = self.nodes[0] + for node in owner.cars.peers: + car = node.peer + payload = { + "name": f"InfrahubNode-{owner.name.value.lower()}-{car.name.value.lower()}", + "description": "Tag", + } + obj = await self.client.create(kind="BuiltinTag", data=payload) + await obj.save(allow_upsert=True) diff --git a/backend/tests/fixtures/schemas/infra_simple_01.json b/backend/tests/fixtures/schemas/infra_simple_01.json index 0f394a2e51..68bfddb0e6 100644 --- a/backend/tests/fixtures/schemas/infra_simple_01.json +++ b/backend/tests/fixtures/schemas/infra_simple_01.json @@ -57,7 +57,7 @@ {"name": "description", "kind": "Text", "optional": true} ], "relationships": [ - {"name": "interface", "peer": "InfraInterface", "optional": true, "cardinality": "one", "kind": "Parent"} + {"name": "interface", "peer": "InfraInterface", "optional": false, "cardinality": "one", "kind": "Parent"} ] } ] diff --git a/backend/tests/fixtures/schemas/infra_w_generics_01.json b/backend/tests/fixtures/schemas/infra_w_generics_01.json index 13bd15ffd7..45303b5b16 100644 --- a/backend/tests/fixtures/schemas/infra_w_generics_01.json +++ b/backend/tests/fixtures/schemas/infra_w_generics_01.json @@ -66,7 +66,7 @@ {"name": "description", "kind": "Text", "optional": true} ], "relationships": [ - {"name": "interface", "peer": "InfraInterface", "optional": true, "cardinality": "one", "kind": "Parent"} + {"name": "interface", "peer": "InfraInterface", "optional": false, "cardinality": "one", "kind": "Parent"} ] } ] diff --git a/backend/tests/helpers/test_app.py b/backend/tests/helpers/test_app.py index 6f9f9a885a..6d15bca759 100644 --- a/backend/tests/helpers/test_app.py +++ b/backend/tests/helpers/test_app.py @@ -88,7 +88,9 @@ async def test_client( async def client( self, test_client: InfrahubTestClient, api_token: str, bus_simulator: BusSimulator ) -> InfrahubClient: - config = Config(api_token=api_token, requester=test_client.async_request) + config = Config( + api_token=api_token, requester=test_client.async_request, sync_requester=test_client.sync_request + ) sdk_client = await InfrahubClient.init(config=config) diff --git a/backend/tests/helpers/test_client.py b/backend/tests/helpers/test_client.py index 46d1fcaaf4..953e3cc220 100644 --- a/backend/tests/helpers/test_client.py +++ b/backend/tests/helpers/test_client.py @@ -1,20 +1,34 @@ -import json +import asyncio from typing import Any, Dict, Optional import httpx +import ujson +from fastapi import FastAPI from infrahub_sdk.types import HTTPMethod class InfrahubTestClient(httpx.AsyncClient): + def __init__(self, app: FastAPI, base_url: str = ""): + self.loop = asyncio.get_event_loop() + super().__init__(app=app, base_url=base_url) + async def _request( self, url: str, method: HTTPMethod, headers: Dict[str, Any], timeout: int, payload: Optional[Dict] = None ) -> httpx.Response: content = None if payload: - content = str(json.dumps(payload)).encode("UTF-8") + content = str(ujson.dumps(payload)).encode("UTF-8") return await self.request(method=method.value, url=url, headers=headers, timeout=timeout, content=content) async def async_request( self, url: str, method: HTTPMethod, headers: Dict[str, Any], timeout: int, payload: Optional[Dict] = None ) -> httpx.Response: return await self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload) + + def sync_request( + self, url: str, method: HTTPMethod, headers: Dict[str, Any], timeout: int, payload: Optional[Dict] = None + ) -> httpx.Response: + future = asyncio.run_coroutine_threadsafe( + self._request(url=url, method=method, headers=headers, timeout=timeout, payload=payload), self.loop + ) + return future.result() diff --git a/backend/tests/integration/conftest.py b/backend/tests/integration/conftest.py index e39308562f..e4a5d54e86 100644 --- a/backend/tests/integration/conftest.py +++ b/backend/tests/integration/conftest.py @@ -18,6 +18,11 @@ from infrahub.utils import get_models_dir +@pytest.fixture(scope="session", autouse=True) +def add_tracker(): + os.environ["PYTEST_RUNNING"] = "true" + + @pytest.fixture(scope="session") def event_loop(): """Overrides pytest default function scoped event loop""" @@ -37,15 +42,19 @@ async def db() -> AsyncGenerator[InfrahubDatabase, None]: async def load_infrastructure_schema(db: InfrahubDatabase): - models_dir = get_models_dir() - - schema_txt = Path(os.path.join(models_dir, "infrastructure_base.yml")).read_text() - infra_schema = yaml.safe_load(schema_txt) + base_dir = get_models_dir() + "/base" default_branch_name = registry.default_branch branch_schema = registry.schema.get_schema_branch(name=default_branch_name) tmp_schema = branch_schema.duplicate() - tmp_schema.load_schema(schema=SchemaRoot(**infra_schema)) + + for file_name in os.listdir(base_dir): + file_path = os.path.join(base_dir, file_name) + + if file_path.endswith((".yml", ".yaml")): + schema_txt = Path(file_path).read_text() + loaded_schema = yaml.safe_load(schema_txt) + tmp_schema.load_schema(schema=SchemaRoot(**loaded_schema)) tmp_schema.process() await registry.schema.update_schema_branch(schema=tmp_schema, db=db, branch=default_branch_name, update_db=True) diff --git a/backend/tests/integration/git/test_git_repository.py b/backend/tests/integration/git/test_git_repository.py index 69e2d70ff3..a231a2ec79 100644 --- a/backend/tests/integration/git/test_git_repository.py +++ b/backend/tests/integration/git/test_git_repository.py @@ -23,15 +23,19 @@ async def load_infrastructure_schema(db: InfrahubDatabase): - models_dir = get_models_dir() - - schema_txt = Path(os.path.join(models_dir, "infrastructure_base.yml")).read_text() - infra_schema = yaml.safe_load(schema_txt) + base_dir = get_models_dir() + "/base" default_branch_name = registry.default_branch branch_schema = registry.schema.get_schema_branch(name=default_branch_name) tmp_schema = branch_schema.duplicate() - tmp_schema.load_schema(schema=SchemaRoot(**infra_schema)) + + for file_name in os.listdir(base_dir): + file_path = os.path.join(base_dir, file_name) + + if file_path.endswith((".yml", ".yaml")): + schema_txt = Path(file_path).read_text() + loaded_schema = yaml.safe_load(schema_txt) + tmp_schema.load_schema(schema=SchemaRoot(**loaded_schema)) tmp_schema.process() await registry.schema.update_schema_branch(schema=tmp_schema, db=db, branch=default_branch_name, update_db=True) @@ -122,14 +126,14 @@ async def test_import_all_graphql_query( # 1. Modify an object to validate if its being properly updated # 2. Add an object that doesn't exist in GIt and validate that it's been deleted value_before_change = queries[0].query.value - queries[0].query.value = "query myquery { InfraSite { edges { node { id }}}}" + queries[0].query.value = "query myquery { LocationSite { edges { node { id }}}}" await queries[0].save() obj = await Node.init(schema=InfrahubKind.GRAPHQLQUERY, db=db) await obj.new( db=db, name="soontobedeletedquery", - query="query soontobedeletedquery { InfraSite { edges { node { id }}}}", + query="query soontobedeletedquery { LocationSite { edges { node { id }}}}", repository=str(repo.id), ) await obj.save(db=db) diff --git a/backend/tests/integration/git/test_readonly_repository.py b/backend/tests/integration/git/test_readonly_repository.py new file mode 100644 index 0000000000..4efeb1dd8b --- /dev/null +++ b/backend/tests/integration/git/test_readonly_repository.py @@ -0,0 +1,108 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING +from unittest.mock import AsyncMock, patch + +import pytest + +from infrahub.core.constants import InfrahubKind +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.lock import InfrahubLockRegistry +from infrahub.message_bus.messages import RequestArtifactDefinitionGenerate +from infrahub.services import InfrahubServices +from tests.constants import TestKind +from tests.helpers.file_repo import FileRepo +from tests.helpers.schema import CAR_SCHEMA, load_schema +from tests.helpers.test_app import TestInfrahubApp + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + + from infrahub.database import InfrahubDatabase + from tests.conftest import TestHelper + + +class TestCreateReadOnlyRepository(TestInfrahubApp): + def setup_method(self): + lock_patcher = patch("infrahub.message_bus.operations.git.repository.lock") + self.mock_infra_lock = lock_patcher.start() + self.mock_infra_lock.registry = AsyncMock(spec=InfrahubLockRegistry) + + def teardown_method(self): + patch.stopall() + + @pytest.fixture(scope="class") + async def initial_dataset( + self, + db: InfrahubDatabase, + initialize_registry: None, + git_repos_dir_module_scope: str, + git_repos_source_dir_module_scope: str, + ) -> None: + await load_schema(db, schema=CAR_SCHEMA) + FileRepo(name="car-dealership", sources_directory=git_repos_source_dir_module_scope) + john = await Node.init(schema=TestKind.PERSON, db=db) + await john.new(db=db, name="John", height=175, age=25) + await john.save(db=db) + people = await Node.init(schema=InfrahubKind.STANDARDGROUP, db=db) + await people.new(db=db, name="people", members=[john]) + await people.save(db=db) + + async def test_step01_create_repository( + self, + db: InfrahubDatabase, + initial_dataset: None, + git_repos_source_dir_module_scope: str, + client: InfrahubClient, + ) -> None: + branch = await client.branch.create(branch_name="ro_repository", sync_with_git=False) + + client_repository = await client.create( + kind=InfrahubKind.READONLYREPOSITORY, + branch=branch.name, + data={ + "name": "car-dealership", + "location": f"{git_repos_source_dir_module_scope}/car-dealership", + "ref": "main", + }, + ) + await client_repository.save() + + repository = await NodeManager.get_one_by_id_or_default_filter( + db=db, id=client_repository.id, schema_name=InfrahubKind.READONLYREPOSITORY, branch=branch.name + ) + + check_definition = await NodeManager.get_one_by_id_or_default_filter( + db=db, id="car_description_check", schema_name=InfrahubKind.CHECKDEFINITION, branch=branch.name + ) + + assert repository.commit.value # type: ignore[attr-defined] + assert check_definition.file_path.value == "checks/car_overview.py" # type: ignore[attr-defined] + + async def test_step02_validate_generated_artifacts(self, db: InfrahubDatabase, client: InfrahubClient): + artifacts = await client.all(kind=InfrahubKind.ARTIFACT, branch="ro_repository") + assert artifacts + assert artifacts[0].name.value == "Ownership report" + + async def test_step03_merge_branch(self, db: InfrahubDatabase, client: InfrahubClient, helper: TestHelper): + await client.branch.merge(branch_name="ro_repository") + + check_definition = await NodeManager.get_one_by_id_or_default_filter( + db=db, id="car_description_check", schema_name=InfrahubKind.CHECKDEFINITION + ) + assert check_definition.file_path.value == "checks/car_overview.py" # type: ignore[attr-defined] + + bus_simulator = helper.get_message_bus_simulator() + service = InfrahubServices(client=client, message_bus=bus_simulator) + bus_simulator.service = service + + artifact_definitions = await client.all(kind=InfrahubKind.ARTIFACTDEFINITION) + for artifact_definition in artifact_definitions: + await service.send( + message=RequestArtifactDefinitionGenerate(artifact_definition=artifact_definition.id, branch="main") + ) + + artifacts = await client.all(kind=InfrahubKind.ARTIFACT) + assert artifacts + assert artifacts[0].name.value == "Ownership report" diff --git a/backend/tests/integration/ipam/__init__.py b/backend/tests/integration/ipam/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/integration/ipam/base.py b/backend/tests/integration/ipam/base.py new file mode 100644 index 0000000000..5e82ff7d8f --- /dev/null +++ b/backend/tests/integration/ipam/base.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from infrahub.core import registry +from infrahub.core.constants import InfrahubKind +from infrahub.core.node import Node +from tests.helpers.test_app import TestInfrahubApp + +if TYPE_CHECKING: + from infrahub.database import InfrahubDatabase + + +class TestIpamReconcileBase(TestInfrahubApp): + @pytest.fixture(scope="class") + async def initial_dataset( + self, + db: InfrahubDatabase, + initialize_registry: None, + register_ipam_schema, + ) -> dict[str, Node]: + default_branch = registry.default_branch + + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + # ----------------------- + # Namespace NS1 + # ----------------------- + + ns1 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns1.new(db=db, name="ns1") + await ns1.save(db=db) + + net161 = await Node.init(db=db, schema=prefix_schema) + await net161.new(db=db, prefix="2001:db8::/48", ip_namespace=ns1) + await net161.save(db=db) + + net162 = await Node.init(db=db, schema=prefix_schema) + await net162.new(db=db, prefix="2001:db8::/64", ip_namespace=ns1, parent=net161) + await net162.save(db=db) + + net146 = await Node.init(db=db, schema=prefix_schema) + await net146.new(db=db, prefix="10.0.0.0/8", ip_namespace=ns1) + await net146.save(db=db) + + net140 = await Node.init(db=db, schema=prefix_schema) + await net140.new(db=db, prefix="10.10.0.0/16", ip_namespace=ns1, parent=net146) + await net140.save(db=db) + + net142 = await Node.init(db=db, schema=prefix_schema) + await net142.new(db=db, prefix="10.10.1.0/24", parent=net140, ip_namespace=ns1) + await net142.save(db=db) + + net143 = await Node.init(db=db, schema=prefix_schema) + await net143.new(db=db, prefix="10.10.1.0/27", parent=net142, ip_namespace=ns1) + await net143.save(db=db) + + net144 = await Node.init(db=db, schema=prefix_schema) + await net144.new(db=db, prefix="10.10.2.0/24", parent=net140, ip_namespace=ns1) + await net144.save(db=db) + + net145 = await Node.init(db=db, schema=prefix_schema) + await net145.new(db=db, prefix="10.10.3.0/27", parent=net140, ip_namespace=ns1) + await net145.save(db=db) + + address10 = await Node.init(db=db, schema=address_schema) + await address10.new(db=db, address="10.10.0.0", ip_prefix=net140, ip_namespace=ns1) + await address10.save(db=db) + + address11 = await Node.init(db=db, schema=address_schema) + await address11.new(db=db, address="10.10.1.1", ip_prefix=net143, ip_namespace=ns1) + await address11.save(db=db) + + # ----------------------- + # Namespace NS2 + # ----------------------- + ns2 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns2.new(db=db, name="ns2") + await ns2.save(db=db) + + net240 = await Node.init(db=db, schema=prefix_schema) + await net240.new(db=db, prefix="10.10.0.0/15", ip_namespace=ns2) + await net240.save(db=db) + + net241 = await Node.init(db=db, schema=prefix_schema) + await net241.new(db=db, prefix="10.10.0.0/24", parent=net240, ip_namespace=ns2) + await net241.save(db=db) + + net242 = await Node.init(db=db, schema=prefix_schema) + await net242.new(db=db, prefix="10.10.4.0/27", parent=net240, ip_namespace=ns2) + await net242.save(db=db) + return { + "ns1": ns1, + "ns2": ns2, + "net161": net161, + "net162": net162, + "net140": net140, + "net142": net142, + "net143": net143, + "net144": net144, + "net145": net145, + "net146": net146, + "address10": address10, + "address11": address11, + "net240": net240, + "net241": net241, + "net242": net242, + } diff --git a/backend/tests/integration/ipam/conftest.py b/backend/tests/integration/ipam/conftest.py new file mode 100644 index 0000000000..39fef23397 --- /dev/null +++ b/backend/tests/integration/ipam/conftest.py @@ -0,0 +1,44 @@ +from typing import Any + +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import BranchSupportType, InfrahubKind +from infrahub.core.schema import SchemaRoot +from infrahub.core.schema_manager import SchemaBranch + + +@pytest.fixture(scope="class") +async def ipam_schema() -> SchemaRoot: + SCHEMA: dict[str, Any] = { + "nodes": [ + { + "name": "IPPrefix", + "namespace": "Ipam", + "default_filter": "prefix__value", + "order_by": ["prefix__value"], + "display_labels": ["prefix__value"], + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.IPPREFIX], + }, + { + "name": "IPAddress", + "namespace": "Ipam", + "default_filter": "address__value", + "order_by": ["address__value"], + "display_labels": ["address__value"], + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.IPADDRESS], + }, + ], + } + + return SchemaRoot(**SCHEMA) + + +@pytest.fixture(scope="class") +async def register_ipam_schema(default_branch: Branch, ipam_schema: SchemaRoot) -> SchemaBranch: + schema_branch = registry.schema.register_schema(schema=ipam_schema, branch=default_branch.name) + default_branch.update_schema_hash() + return schema_branch diff --git a/backend/tests/integration/ipam/test_ipam_merge_reconcile.py b/backend/tests/integration/ipam/test_ipam_merge_reconcile.py new file mode 100644 index 0000000000..465e84362e --- /dev/null +++ b/backend/tests/integration/ipam/test_ipam_merge_reconcile.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from infrahub import config +from infrahub.core import registry +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node + +from .base import TestIpamReconcileBase + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + + from infrahub.database import InfrahubDatabase + + +class TestIpamMergeReconcile(TestIpamReconcileBase): + @pytest.fixture(scope="class", autouse=True) + def enable_broker_settings(self): + config.SETTINGS.broker.enable = True + + @pytest.fixture(scope="class") + async def branch_1(self, db: InfrahubDatabase): + return await create_branch(db=db, branch_name="new_address") + + @pytest.fixture(scope="class") + async def new_address_1(self, branch_1, initial_dataset, db: InfrahubDatabase): + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch_1) + new_address = await Node.init(schema=address_schema, db=db, branch=branch_1) + await new_address.new(db=db, address="10.10.0.2", ip_namespace=initial_dataset["ns1"].id) + await new_address.save(db=db) + return new_address + + @pytest.fixture(scope="class") + async def branch_2(self, db: InfrahubDatabase): + return await create_branch(db=db, branch_name="delete_prefix") + + async def test_step01_add_address( + self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_1, new_address_1 + ) -> None: + success = await client.branch.merge(branch_name=branch_1.name) + assert success is True + + updated_address = await NodeManager.get_one(db=db, branch=branch_1.name, id=new_address_1.id) + parent_rels = await updated_address.ip_prefix.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net140"].id + + async def test_step02_add_delete_prefix( + self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_2, new_address_1 + ) -> None: + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch_2) + new_prefix = await Node.init(schema=prefix_schema, db=db, branch=registry.default_branch) + await new_prefix.new(db=db, prefix="10.10.0.0/17", ip_namespace=initial_dataset["ns1"].id) + await new_prefix.save(db=db) + deleted_prefix_branch = await NodeManager.get_one(db=db, branch=branch_2, id=initial_dataset["net140"].id) + assert deleted_prefix_branch + await deleted_prefix_branch.delete(db=db) + + success = await client.branch.merge(branch_name=branch_2.name) + assert success is True + + deleted_prefix = await NodeManager.get_one(db=db, branch=branch_2.name, id=deleted_prefix_branch.id) + assert deleted_prefix is None + new_prefix_branch = await NodeManager.get_one(db=db, branch=branch_2.name, id=new_prefix.id) + parent_rels = await new_prefix_branch.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net146"].id + children_rels = await new_prefix_branch.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(children_rels) == 3 + assert {child.peer_id for child in children_rels} == { + initial_dataset["net142"].id, + initial_dataset["net144"].id, + initial_dataset["net145"].id, + } + address_rels = await new_prefix_branch.ip_addresses.get_relationships(db=db) # type: ignore[union-attr] + assert len(address_rels) == 2 + assert {ar.peer_id for ar in address_rels} == {new_address_1.id, initial_dataset["address10"].id} diff --git a/backend/tests/integration/ipam/test_ipam_rebase_reconcile.py b/backend/tests/integration/ipam/test_ipam_rebase_reconcile.py new file mode 100644 index 0000000000..cc5363fd04 --- /dev/null +++ b/backend/tests/integration/ipam/test_ipam_rebase_reconcile.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import ipaddress +from typing import TYPE_CHECKING + +from infrahub.core import registry +from infrahub.core.initialization import create_branch +from infrahub.core.ipam.reconciler import IpamReconciler +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node + +from .base import TestIpamReconcileBase + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + + from infrahub.database import InfrahubDatabase + + +class TestIpamRebaseReconcile(TestIpamReconcileBase): + async def test_step01_add_address( + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + ) -> None: + branch = await create_branch(db=db, branch_name="new_address") + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch) + + new_address = await Node.init(schema=address_schema, db=db, branch=branch) + await new_address.new(db=db, address="10.10.0.2", ip_namespace=initial_dataset["ns1"].id) + await new_address.save(db=db) + + success = await client.branch.rebase(branch_name=branch.name) + assert success is True + + updated_address = await NodeManager.get_one(db=db, branch=branch.name, id=new_address.id) + parent_rels = await updated_address.ip_prefix.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net140"].id + + async def test_step02_add_delete_prefix( + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + ) -> None: + branch = await create_branch(db=db, branch_name="delete_prefix") + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch) + + new_prefix = await Node.init(schema=prefix_schema, db=db, branch=registry.default_branch) + await new_prefix.new(db=db, prefix="10.10.0.0/17", ip_namespace=initial_dataset["ns1"].id) + await new_prefix.save(db=db) + deleted_prefix_branch = await NodeManager.get_one(db=db, branch=branch, id=initial_dataset["net140"].id) + assert deleted_prefix_branch + await deleted_prefix_branch.delete(db=db) + + success = await client.branch.rebase(branch_name=branch.name) + assert success is True + + deleted_prefix = await NodeManager.get_one(db=db, branch=branch.name, id=deleted_prefix_branch.id) + assert deleted_prefix is None + new_prefix_branch = await NodeManager.get_one(db=db, branch=branch.name, id=new_prefix.id) + parent_rels = await new_prefix_branch.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net146"].id + children_rels = await new_prefix_branch.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(children_rels) == 3 + assert {child.peer_id for child in children_rels} == { + initial_dataset["net142"].id, + initial_dataset["net144"].id, + initial_dataset["net145"].id, + } + address_rels = await new_prefix_branch.ip_addresses.get_relationships(db=db) # type: ignore[union-attr] + assert len(address_rels) == 1 + assert address_rels[0].peer_id == initial_dataset["address10"].id + + async def test_step03_interlinked_prefixes_and_addresses( + self, + db: InfrahubDatabase, + initial_dataset, + client: InfrahubClient, + ) -> None: + branch = await create_branch(db=db, branch_name="interlinked") + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch) + + net_10_0_0_0_7 = await Node.init(schema=prefix_schema, db=db, branch=branch) + await net_10_0_0_0_7.new(db=db, prefix="10.0.0.0/7", ip_namespace=initial_dataset["ns1"].id) + await net_10_0_0_0_7.save(db=db) + net_10_0_0_0_15 = await Node.init(schema=prefix_schema, db=db, branch=branch) + await net_10_0_0_0_15.new( + db=db, prefix="10.0.0.0/15", parent=net_10_0_0_0_7.id, ip_namespace=initial_dataset["ns1"].id + ) + await net_10_0_0_0_15.save(db=db) + net_10_10_8_0_22 = await Node.init(schema=prefix_schema, db=db, branch=branch) + await net_10_10_8_0_22.new( + db=db, prefix="10.10.8.0/22", parent=net_10_0_0_0_15.id, ip_namespace=initial_dataset["ns1"].id + ) + await net_10_10_8_0_22.save(db=db) + address_10_10_1_2 = await Node.init(schema=address_schema, db=db, branch=branch) + await address_10_10_1_2.new( + db=db, address="10.10.1.2", ip_prefix=net_10_10_8_0_22.id, ip_namespace=initial_dataset["ns1"].id + ) + await address_10_10_1_2.save(db=db) + reconciler = IpamReconciler(db=db, branch=registry.get_branch_from_registry()) + await reconciler.reconcile( + ip_value=ipaddress.ip_network(initial_dataset["net143"].prefix.value), + namespace=initial_dataset["ns1"].id, + node_uuid=initial_dataset["net143"].id, + is_delete=True, + ) + + success = await client.branch.rebase(branch_name=branch.name) + assert success is True + + # 10.10.0.0/7 + net_10_0_0_0_7_check = await NodeManager.get_one(db=db, branch=branch.name, id=net_10_0_0_0_7.id) + parent_rels = await net_10_0_0_0_7_check.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 0 + child_rels = await net_10_0_0_0_7_check.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(child_rels) == 1 + assert child_rels[0].peer_id == initial_dataset["net146"].id + # 10.10.0.0/8 + net146_branch = await NodeManager.get_one(db=db, branch=branch.name, id=initial_dataset["net146"].id) + parent_rels = await net146_branch.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == net_10_0_0_0_7.id + child_rels = await net146_branch.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(child_rels) == 2 + assert {c.peer_id for c in child_rels} == {net_10_0_0_0_15.id, initial_dataset["net140"].id} + # 10.10.0.0/15 + net_10_0_0_0_15_check = await NodeManager.get_one(db=db, branch=branch.name, id=net_10_0_0_0_15.id) + parent_rels = await net_10_0_0_0_15_check.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net146"].id + child_rels = await net_10_0_0_0_15_check.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(child_rels) == 0 + # 10.10.0.0/16 + net140_branch = await NodeManager.get_one(db=db, branch=branch.name, id=initial_dataset["net140"].id) + parent_rels = await net140_branch.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net146"].id + child_rels = await net140_branch.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(child_rels) == 3 + assert {c.peer_id for c in child_rels} == { + initial_dataset["net142"].id, + initial_dataset["net144"].id, + initial_dataset["net145"].id, + } + child_addr_rels = await net140_branch.ip_addresses.get_relationships(db=db) # type: ignore[union-attr] + assert len(child_addr_rels) == 1 + assert child_addr_rels[0].peer_id == initial_dataset["address10"].id + # 10.10.0.0/17 + net_10_10_0_0_17_branch = ( + await NodeManager.query( + db=db, branch=branch, schema=prefix_schema, filters={"prefix__value": "10.10.0.0/17"} + ) + )[0] + child_rels = await net_10_10_0_0_17_branch.children.get_relationships(db=db) # type: ignore[attr-defined] + assert len(child_rels) == 1 + assert child_rels[0].peer_id == net_10_10_8_0_22.id + # 10.10.1.1 + address11_branch = await NodeManager.get_one(db=db, branch=branch, id=initial_dataset["address11"].id) + prefix_rels = await address11_branch.ip_prefix.get_relationships(db=db) # type: ignore[union-attr] + assert len(prefix_rels) == 1 + assert prefix_rels[0].peer_id == initial_dataset["net142"].id + # 10.10.1.2 + address_10_10_1_2_branch = await NodeManager.get_one(db=db, branch=branch, id=address_10_10_1_2.id) + prefix_rels = await address_10_10_1_2_branch.ip_prefix.get_relationships(db=db) # type: ignore[union-attr] + assert len(prefix_rels) == 1 + assert prefix_rels[0].peer_id == initial_dataset["net142"].id diff --git a/backend/tests/integration/ipam/test_proposed_change_reconcile.py b/backend/tests/integration/ipam/test_proposed_change_reconcile.py new file mode 100644 index 0000000000..ee45e3a6d4 --- /dev/null +++ b/backend/tests/integration/ipam/test_proposed_change_reconcile.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from infrahub import config +from infrahub.core import registry +from infrahub.core.constants import InfrahubKind +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.services.adapters.cache.redis import RedisCache + +from .base import TestIpamReconcileBase + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + + from infrahub.database import InfrahubDatabase + + +class TestIpamMergeReconcile(TestIpamReconcileBase): + @pytest.fixture(scope="class", autouse=True) + def enable_broker_settings(self): + config.SETTINGS.broker.enable = True + + @pytest.fixture(scope="class", autouse=True) + def bus_simulator_cache(self, bus_simulator): + bus_simulator.service.cache = RedisCache() + + @pytest.fixture(scope="class", autouse=True) + def git_repos_dir(self, git_repos_source_dir_module_scope): ... + + @pytest.fixture(scope="class") + async def branch_1(self, db: InfrahubDatabase): + return await create_branch(db=db, branch_name="new_address") + + @pytest.fixture(scope="class") + async def new_address_1(self, branch_1, initial_dataset, db: InfrahubDatabase): + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch_1) + new_address = await Node.init(schema=address_schema, db=db, branch=branch_1) + await new_address.new(db=db, address="10.10.0.2", ip_namespace=initial_dataset["ns1"].id) + await new_address.save(db=db) + return new_address + + @pytest.fixture(scope="class") + async def branch_2(self, db: InfrahubDatabase): + return await create_branch(db=db, branch_name="delete_prefix") + + async def test_step01_add_address( + self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_1, new_address_1 + ) -> None: + proposed_change_create = await client.create( + kind=InfrahubKind.PROPOSEDCHANGE, + data={"source_branch": branch_1.name, "destination_branch": "main", "name": "add_address_pc"}, + ) + await proposed_change_create.save() + proposed_change_create.state.value = "merged" # type: ignore[attr-defined] + await proposed_change_create.save() + + updated_address = await NodeManager.get_one(db=db, branch=branch_1.name, id=new_address_1.id) + parent_rels = await updated_address.ip_prefix.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net140"].id + + async def test_step02_add_delete_prefix( + self, db: InfrahubDatabase, initial_dataset, client: InfrahubClient, branch_2, new_address_1 + ) -> None: + proposed_change_create = await client.create( + kind=InfrahubKind.PROPOSEDCHANGE, + data={"source_branch": branch_2.name, "destination_branch": "main", "name": "delete_prefix_pc"}, + ) + await proposed_change_create.save() + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch_2) + new_prefix = await Node.init(schema=prefix_schema, db=db, branch=registry.default_branch) + await new_prefix.new(db=db, prefix="10.10.0.0/17", ip_namespace=initial_dataset["ns1"].id) + await new_prefix.save(db=db) + deleted_prefix_branch = await NodeManager.get_one(db=db, branch=branch_2, id=initial_dataset["net140"].id) + assert deleted_prefix_branch + await deleted_prefix_branch.delete(db=db) + + proposed_change_create.state.value = "merged" # type: ignore[attr-defined] + await proposed_change_create.save() + + deleted_prefix = await NodeManager.get_one(db=db, branch=branch_2.name, id=deleted_prefix_branch.id) + assert deleted_prefix is None + new_prefix_branch = await NodeManager.get_one(db=db, branch=branch_2.name, id=new_prefix.id) + parent_rels = await new_prefix_branch.parent.get_relationships(db=db) # type: ignore[union-attr] + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == initial_dataset["net146"].id + children_rels = await new_prefix_branch.children.get_relationships(db=db) # type: ignore[union-attr] + assert len(children_rels) == 3 + assert {child.peer_id for child in children_rels} == { + initial_dataset["net142"].id, + initial_dataset["net144"].id, + initial_dataset["net145"].id, + } + address_rels = await new_prefix_branch.ip_addresses.get_relationships(db=db) # type: ignore[union-attr] + assert len(address_rels) == 2 + assert {ar.peer_id for ar in address_rels} == {new_address_1.id, initial_dataset["address10"].id} diff --git a/backend/tests/integration/message_bus/operations/request/test_proposed_change.py b/backend/tests/integration/message_bus/operations/request/test_proposed_change.py index 00c62ce5a2..cb410d3552 100644 --- a/backend/tests/integration/message_bus/operations/request/test_proposed_change.py +++ b/backend/tests/integration/message_bus/operations/request/test_proposed_change.py @@ -149,6 +149,7 @@ async def test_run_pipeline_validate_requested_jobs( assert sorted(bus_pre_data_changes.seen_routing_keys) == [ "request.proposed_change.refresh_artifacts", "request.proposed_change.repository_checks", + "request.proposed_change.run_generators", "request.proposed_change.run_tests", ] @@ -156,6 +157,7 @@ async def test_run_pipeline_validate_requested_jobs( "request.proposed_change.data_integrity", "request.proposed_change.refresh_artifacts", "request.proposed_change.repository_checks", + "request.proposed_change.run_generators", "request.proposed_change.run_tests", "request.proposed_change.schema_integrity", ] @@ -184,4 +186,4 @@ async def test_cancel( proposed_change = await NodeManager.get_one_by_id_or_default_filter( db=db, id=prepare_proposed_change, schema_name=InfrahubKind.PROPOSEDCHANGE ) - assert proposed_change.state.value == ProposedChangeState.CANCELED.value # type: ignore[attr-defined] + assert proposed_change.state.value.value == ProposedChangeState.CANCELED.value # type: ignore[attr-defined] diff --git a/backend/tests/integration/profiles/__init__.py b/backend/tests/integration/profiles/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/integration/profiles/test_profile_lifecycle.py b/backend/tests/integration/profiles/test_profile_lifecycle.py new file mode 100644 index 0000000000..f0d86c4e6e --- /dev/null +++ b/backend/tests/integration/profiles/test_profile_lifecycle.py @@ -0,0 +1,470 @@ +import pytest +from graphql import graphql +from infrahub_sdk.client import InfrahubClient + +from infrahub.core import registry +from infrahub.core.node import Node +from infrahub.core.schema import SchemaRoot +from infrahub.core.schema.attribute_schema import AttributeSchema +from infrahub.core.schema.node_schema import NodeSchema +from infrahub.database import InfrahubDatabase +from infrahub.graphql import prepare_graphql_params +from tests.helpers.schema import load_schema +from tests.helpers.test_app import TestInfrahubApp + + +class TestProfileLifecycle(TestInfrahubApp): + @pytest.fixture(scope="class") + async def schema_person_base(self, db: InfrahubDatabase, initialize_registry) -> None: + person_schema = NodeSchema( + name="Person", + namespace="Testing", + include_in_menu=True, + label="Person", + attributes=[ + AttributeSchema(name="name", kind="Text"), + AttributeSchema(name="description", kind="Text", optional=True), + AttributeSchema(name="height", kind="Number", optional=True), + ], + ) + await load_schema(db=db, schema=SchemaRoot(version="1.0", nodes=[person_schema])) + + @pytest.fixture(scope="class") + async def person_1(self, db: InfrahubDatabase, schema_person_base) -> Node: + schema = registry.schema.get_node_schema(name="TestingPerson", duplicate=False) + person_1 = await Node.init(db=db, schema=schema) + await person_1.new(db=db, name="Starbuck") + await person_1.save(db=db) + return person_1 + + @pytest.fixture(scope="class") + async def person_profile_1(self, db: InfrahubDatabase, schema_person_base) -> Node: + person_profile_1 = await Node.init(db=db, schema="ProfileTestingPerson") + await person_profile_1.new(db=db, profile_name="profile-one", profile_priority=10, height=167) + await person_profile_1.save(db=db) + return person_profile_1 + + async def test_step_01_one_person_no_profile( + self, db: InfrahubDatabase, schema_person_base, person_1, person_profile_1, client: InfrahubClient + ): + retrieved_person = await client.get(kind="TestingPerson", id=person_1.id) + + assert retrieved_person.profiles.peer_ids == [] + assert retrieved_person.name.value == "Starbuck" + assert retrieved_person.name.is_from_profile is False + assert retrieved_person.name.source is None + assert retrieved_person.height.value is None + assert retrieved_person.height.is_from_profile is False + assert retrieved_person.height.source is None + + async def test_step_02_one_person_add_profile( + self, + db: InfrahubDatabase, + default_branch, + person_1, + person_profile_1, + ): + mutation = """ + mutation { + TestingPersonUpdate(data: {id: "%(person_id)s", profiles: [{ id: "%(profile_id)s"}]}) { + ok + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + } + height { + value + source { id } + is_from_profile + } + } + } + } + """ % {"person_id": person_1.id, "profile_id": person_profile_1.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestingPersonUpdate"]["ok"] is True + profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert len(profiles) == 1 + assert profiles == [{"node": {"id": person_profile_1.id}}] + attributes = result.data["TestingPersonUpdate"]["object"] + assert attributes["id"] == person_1.id + assert attributes["name"] == {"value": "Starbuck", "is_from_profile": False, "source": None} + assert attributes["height"] == {"value": 167, "is_from_profile": True, "source": {"id": person_profile_1.id}} + + async def test_step_03_create_person_with_profile( + self, + db: InfrahubDatabase, + default_branch, + person_profile_1, + ): + mutation = """ + mutation { + TestingPersonCreate(data: {name: {value: "Apollo"}, profiles: [{ id: "%(profile_id)s"}]}) { + ok + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + } + height { + value + source { id } + is_from_profile + } + } + } + } + """ % {"profile_id": person_profile_1.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestingPersonCreate"]["ok"] is True + profiles = result.data["TestingPersonCreate"]["object"]["profiles"]["edges"] + assert len(profiles) == 1 + assert profiles == [{"node": {"id": person_profile_1.id}}] + attributes = result.data["TestingPersonCreate"]["object"] + assert attributes["name"] == {"value": "Apollo", "is_from_profile": False, "source": None} + assert attributes["height"] == {"value": 167, "is_from_profile": True, "source": {"id": person_profile_1.id}} + + async def test_step_04_update_non_profile_attribute( + self, + db: InfrahubDatabase, + default_branch, + person_1, + person_profile_1, + ): + mutation = """ + mutation { + TestingPersonUpdate(data: {id: "%(person_id)s", name: {value: "Kara Thrace"}}) { + ok + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + } + height { + value + source { id } + is_from_profile + } + } + } + } + """ % { + "person_id": person_1.id, + } + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestingPersonUpdate"]["ok"] is True + profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert len(profiles) == 1 + assert profiles == [{"node": {"id": person_profile_1.id}}] + attributes = result.data["TestingPersonUpdate"]["object"] + assert attributes["id"] == person_1.id + assert attributes["name"] == {"value": "Kara Thrace", "is_from_profile": False, "source": None} + assert attributes["height"] == {"value": 167, "is_from_profile": True, "source": {"id": person_profile_1.id}} + + async def test_step_05_add_profile_with_person( + self, + db: InfrahubDatabase, + default_branch, + person_1, + ): + mutation = """ + mutation { + ProfileTestingPersonCreate(data: { + profile_name: {value: "profile-two"}, + profile_priority: {value: 5}, + height: {value: 156} + related_nodes: [{id: "%(person_id)s"}] + } ) { + ok + object { + related_nodes { edges { node { id } } } + profile_name { value } + profile_priority { value } + height { value } + } + } + } + """ % {"person_id": person_1.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["ProfileTestingPersonCreate"]["ok"] is True + nodes = result.data["ProfileTestingPersonCreate"]["object"]["related_nodes"]["edges"] + assert len(nodes) == 1 + assert nodes == [{"node": {"id": person_1.id}}] + attributes = result.data["ProfileTestingPersonCreate"]["object"] + assert attributes["profile_name"] == {"value": "profile-two"} + assert attributes["profile_priority"] == {"value": 5} + assert attributes["height"] == {"value": 156} + + async def test_step_06_get_person_multiple_profiles(self, person_1, person_profile_1, client: InfrahubClient): + person_profile_2 = await client.get(kind="ProfileTestingPerson", profile_name__value="profile-two") + retrieved_person = await client.get(kind="TestingPerson", id=person_1.id) + await retrieved_person.profiles.fetch() + + assert set(retrieved_person.profiles.peer_ids) == {person_profile_1.id, person_profile_2.id} + assert retrieved_person.name.value == "Kara Thrace" + assert retrieved_person.name.is_from_profile is False + assert retrieved_person.name.source is None + assert retrieved_person.height.value == 156 + assert retrieved_person.height.is_from_profile is True + assert retrieved_person.height.source.id == person_profile_2.id + + async def test_step_07_update_person_delete_profile( + self, + db: InfrahubDatabase, + default_branch, + client, + ): + person_2 = await client.get(kind="TestingPerson", name__value="Apollo") + mutation = """ + mutation { + TestingPersonUpdate(data: {id: "%(person_id)s", profiles: []}) { + ok + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + } + height { + value + source { id } + is_from_profile + } + } + } + } + """ % {"person_id": person_2.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestingPersonUpdate"]["ok"] is True + profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert profiles == [] + attributes = result.data["TestingPersonUpdate"]["object"] + assert attributes["id"] == person_2.id + assert attributes["name"] == {"value": "Apollo", "is_from_profile": False, "source": None} + assert attributes["height"] == {"value": None, "is_from_profile": False, "source": None} + + async def test_step_08_delete_profile( + self, + db: InfrahubDatabase, + default_branch, + client: InfrahubClient, + ): + person_profile_2 = await client.get(kind="ProfileTestingPerson", profile_name__value="profile-two") + mutation = """ + mutation { + ProfileTestingPersonDelete(data: {id: "%(profile_id)s"}) { + ok + } + } + """ % {"profile_id": person_profile_2.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert not result.errors + assert result.data + assert result.data["ProfileTestingPersonDelete"]["ok"] is True + + async def test_step_09_check_persons( + self, db: InfrahubDatabase, person_1, person_profile_1, client: InfrahubClient + ): + retrieved_person_1 = await client.get(kind="TestingPerson", id=person_1.id) + await retrieved_person_1.profiles.fetch() + retrieved_person_2 = await client.get(kind="TestingPerson", name__value="Apollo") + + assert retrieved_person_1.profiles.peer_ids == [person_profile_1.id] + assert retrieved_person_1.name.value == "Kara Thrace" + assert retrieved_person_1.name.is_from_profile is False + assert retrieved_person_1.name.source is None + assert retrieved_person_1.height.value == 167 + assert retrieved_person_1.height.is_from_profile is True + assert retrieved_person_1.height.source.id == person_profile_1.id + assert retrieved_person_2.profiles.peer_ids == [] + assert retrieved_person_2.name.value == "Apollo" + assert retrieved_person_2.name.is_from_profile is False + assert retrieved_person_2.name.source is None + assert retrieved_person_2.height.value is None + assert retrieved_person_2.height.is_from_profile is False + assert retrieved_person_2.height.source is None + + async def test_step_10_update_person_override_profile( + self, + db: InfrahubDatabase, + default_branch, + person_1, + person_profile_1, + ): + mutation = """ + mutation { + TestingPersonUpdate(data: {id: "%(person_id)s", height: {value: 145}}) { + ok + object { + id + profiles { edges { node { id } } } + name { + value + source { id } + is_from_profile + } + height { + value + source { id } + is_from_profile + } + } + } + } + """ % {"person_id": person_1.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestingPersonUpdate"]["ok"] is True + profiles = result.data["TestingPersonUpdate"]["object"]["profiles"]["edges"] + assert profiles == [{"node": {"id": person_profile_1.id}}] + attributes = result.data["TestingPersonUpdate"]["object"] + assert attributes["id"] == person_1.id + assert attributes["name"] == {"value": "Kara Thrace", "is_from_profile": False, "source": None} + assert attributes["height"] == {"value": 145, "is_from_profile": False, "source": None} + + async def test_step_11_add_profile_with_person( + self, db: InfrahubDatabase, default_branch, person_profile_1, person_1 + ): + mutation = """ + mutation { + ProfileTestingPersonUpdate(data: { + id: "%(profile_id)s" + profile_priority: {value: 11}, + height: {value: 134} + } ) { + ok + object { + related_nodes { edges { node { id } } } + profile_name { value } + profile_priority { value } + height { value } + } + } + } + """ % {"profile_id": person_profile_1.id} + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=mutation, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["ProfileTestingPersonUpdate"]["ok"] is True + nodes = result.data["ProfileTestingPersonUpdate"]["object"]["related_nodes"]["edges"] + assert len(nodes) == 1 + assert nodes == [{"node": {"id": person_1.id}}] + attributes = result.data["ProfileTestingPersonUpdate"]["object"] + assert attributes["profile_name"] == {"value": "profile-one"} + assert attributes["profile_priority"] == {"value": 11} + assert attributes["height"] == {"value": 134} + + async def test_step_12_check_persons_again(self, person_1, person_profile_1, client: InfrahubClient): + retrieved_person_1 = await client.get(kind="TestingPerson", id=person_1.id) + await retrieved_person_1.profiles.fetch() + retrieved_person_2 = await client.get(kind="TestingPerson", name__value="Apollo") + + assert retrieved_person_1.profiles.peer_ids == [person_profile_1.id] + assert retrieved_person_1.name.value == "Kara Thrace" + assert retrieved_person_1.name.is_from_profile is False + assert retrieved_person_1.name.source is None + assert retrieved_person_1.height.value == 145 + assert retrieved_person_1.height.is_from_profile is False + assert retrieved_person_1.height.source is None + assert retrieved_person_2.profiles.peer_ids == [] + assert retrieved_person_2.name.value == "Apollo" + assert retrieved_person_2.name.is_from_profile is False + assert retrieved_person_2.name.source is None + assert retrieved_person_2.height.value is None + assert retrieved_person_2.height.is_from_profile is False + assert retrieved_person_2.height.source is None diff --git a/backend/tests/integration/proposed_change/test_proposed_change.py b/backend/tests/integration/proposed_change/test_proposed_change.py index 75eb9b428f..a885081bd9 100644 --- a/backend/tests/integration/proposed_change/test_proposed_change.py +++ b/backend/tests/integration/proposed_change/test_proposed_change.py @@ -3,6 +3,7 @@ from typing import TYPE_CHECKING import pytest +from infrahub_sdk.exceptions import GraphQLError from infrahub.core.constants import InfrahubKind, ValidatorConclusion from infrahub.core.initialization import create_branch @@ -62,14 +63,14 @@ async def initial_dataset( await client_repository.save() @pytest.fixture(scope="class") - async def happy_dataset(self, db: InfrahubDatabase, initial_dataset: None) -> None: - branch1 = await create_branch(db=db, branch_name="conflict_free") - richard = await Node.init(schema=TestKind.PERSON, db=db, branch=branch1) + async def happy_dataset(self, db: InfrahubDatabase, initial_dataset: None, client: InfrahubClient) -> None: + branch1 = await client.branch.create(branch_name="conflict_free") + richard = await Node.init(schema=TestKind.PERSON, db=db, branch=branch1.name) await richard.new(db=db, name="Richard", height=180, description="The less famous Richard Doe") await richard.save(db=db) john = await NodeManager.get_one_by_id_or_default_filter( - db=db, id="John", schema_name=TestKind.PERSON, branch=branch1 + db=db, id="John", schema_name=TestKind.PERSON, branch=branch1.name ) john.age.value = 26 # type: ignore[attr-defined] await john.save(db=db) @@ -85,6 +86,7 @@ async def conflict_dataset(self, db: InfrahubDatabase, initial_dataset: None) -> db=db, id="John", schema_name=TestKind.PERSON, branch=branch1 ) john_branch.description.value = "Oh boy" # type: ignore[attr-defined] + john_branch.age.value = 30 # type: ignore[attr-defined] await john_branch.save(db=db) async def test_happy_pipeline(self, db: InfrahubDatabase, happy_dataset: None, client: InfrahubClient) -> None: @@ -100,15 +102,29 @@ async def test_happy_pipeline(self, db: InfrahubDatabase, happy_dataset: None, c peers = await proposed_change.validations.get_peers(db=db) # type: ignore[attr-defined] assert peers data_integrity = [validator for validator in peers.values() if validator.label.value == "Data Integrity"][0] - assert data_integrity.conclusion.value == ValidatorConclusion.SUCCESS.value + assert data_integrity.conclusion.value.value == ValidatorConclusion.SUCCESS.value ownership_artifacts = [ validator for validator in peers.values() if validator.label.value == "Artifact Validator: Ownership report" ][0] - assert ownership_artifacts.conclusion.value == ValidatorConclusion.SUCCESS.value + assert ownership_artifacts.conclusion.value.value == ValidatorConclusion.SUCCESS.value description_check = [ validator for validator in peers.values() if validator.label.value == "Check: car_description_check" ][0] - assert description_check.conclusion.value == ValidatorConclusion.SUCCESS.value + assert description_check.conclusion.value.value == ValidatorConclusion.SUCCESS.value + age_check = [validator for validator in peers.values() if validator.label.value == "Check: owner_age_check"][0] + assert age_check.conclusion.value.value == ValidatorConclusion.SUCCESS.value + repository_merge_conflict = [ + validator for validator in peers.values() if validator.label.value == "Repository Validator: car-dealership" + ][0] + assert repository_merge_conflict.conclusion.value.value == ValidatorConclusion.SUCCESS.value + + tags = await client.all(kind="BuiltinTag", branch="conflict_free") + # The Generator defined in the repository is expected to have created this tag during the pipeline + assert "john-jesko" in [tag.name.value for tag in tags] # type: ignore[attr-defined] + assert "InfrahubNode-john-jesko" in [tag.name.value for tag in tags] # type: ignore[attr-defined] + + proposed_change_create.state.value = "merged" # type: ignore[attr-defined] + await proposed_change_create.save() async def test_conflict_pipeline( self, db: InfrahubDatabase, conflict_dataset: None, client: InfrahubClient @@ -125,4 +141,24 @@ async def test_conflict_pipeline( peers = await proposed_change.validations.get_peers(db=db) # type: ignore[attr-defined] assert peers data_integrity = [validator for validator in peers.values() if validator.label.value == "Data Integrity"][0] - assert data_integrity.conclusion.value == ValidatorConclusion.FAILURE.value + assert data_integrity.conclusion.value.value == ValidatorConclusion.FAILURE.value + + proposed_change_create.state.value = "merged" # type: ignore[attr-defined] + + data_checks = await client.filters(kind=InfrahubKind.DATACHECK, validator__ids=data_integrity.id) + assert len(data_checks) == 1 + data_check = data_checks[0] + + with pytest.raises( + GraphQLError, match="Data conflicts found on branch and missing decisions about what branch to keep" + ): + await proposed_change_create.save() + + data_check.keep_branch.value = "source" # type: ignore[attr-defined] + await data_check.save() + proposed_change_create.state.value = "merged" # type: ignore[attr-defined] + await proposed_change_create.save() + john = await NodeManager.get_one_by_id_or_default_filter(db=db, id="John", schema_name=TestKind.PERSON) + # The value of the description should match that of the source branch that was selected + # as the branch to keep in the data conflict + assert john.description.value == "Oh boy" # type: ignore[attr-defined] diff --git a/backend/tests/integration/schema_lifecycle/test_migration_attribute_branch.py b/backend/tests/integration/schema_lifecycle/test_migration_attribute_branch.py index bb14654db2..bb96a279ce 100644 --- a/backend/tests/integration/schema_lifecycle/test_migration_attribute_branch.py +++ b/backend/tests/integration/schema_lifecycle/test_migration_attribute_branch.py @@ -200,9 +200,8 @@ async def test_step02_load_attr_add_rename( schema_step02["nodes"][0]["attributes"][0]["id"] = attr.id # Load the new schema and apply the migrations - success, response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) - assert success - assert response is None + response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) + assert not response.errors # Check if the branch has been properly updated branches = await client.branch.all() @@ -251,9 +250,8 @@ async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, assert success async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): - success, response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) + assert not response.errors # Ensure that we can query the existing node with the new schema # person_schema = registry.schema.get(name=PERSON_KIND) diff --git a/backend/tests/integration/schema_lifecycle/test_migration_relationship_branch.py b/backend/tests/integration/schema_lifecycle/test_migration_relationship_branch.py index 00b00ee90b..88012656b6 100644 --- a/backend/tests/integration/schema_lifecycle/test_migration_relationship_branch.py +++ b/backend/tests/integration/schema_lifecycle/test_migration_relationship_branch.py @@ -232,9 +232,8 @@ async def test_step02_load(self, db: InfrahubDatabase, client: InfrahubClient, i schema_step02["nodes"][1]["relationships"][0]["id"] = rel.id # Load the new schema and apply the migrations - success, response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) - assert success - assert response is None + response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) + assert not response.errors # Check if the branch has been properly updated branches = await client.branch.all() @@ -281,9 +280,8 @@ async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, assert success async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): - success, response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) + assert not response.errors john = await registry.manager.get_one(db=db, id=initial_dataset["john"], branch=self.branch1) assert john diff --git a/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py b/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py index 100471fc28..3936b5c0d3 100644 --- a/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py +++ b/backend/tests/integration/schema_lifecycle/test_schema_migration_branch.py @@ -176,9 +176,8 @@ async def test_step02_load_attr_add_rename( schema_step02["nodes"][0]["attributes"][0]["id"] = attr.id # Load the new schema and apply the migrations - success, response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) - assert success - assert response is None + response = await client.schema.load(schemas=[schema_step02], branch=self.branch1.name) + assert not response.errors # Check if the branch has been properly updated branches = await client.branch.all() @@ -203,6 +202,7 @@ async def test_step02_load_attr_add_rename( john = persons[0] assert john.name.value == "John" # type: ignore[attr-defined] + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): manufacturer_schema = registry.schema.get_node_schema(name=MANUFACTURER_KIND_01, branch=self.branch1) @@ -254,6 +254,7 @@ async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, } assert success + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): manufacturer_schema = registry.schema.get_node_schema(name=MANUFACTURER_KIND_01, branch=self.branch1) @@ -261,9 +262,8 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i assert schema_step03["nodes"][2]["name"] == "CarMaker" schema_step03["nodes"][2]["id"] = manufacturer_schema.id - success, response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step03], branch=self.branch1.name) + assert not response.errors # Ensure that we can query the existing node with the new schema # person_schema = registry.schema.get(name=PERSON_KIND) @@ -282,6 +282,7 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i renault_cars = await renault.cars.get_peers(db=db) # type: ignore[attr-defined] assert len(renault_cars) == 2 + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_rebase(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset): branch = await client.branch.rebase(branch_name=self.branch1.name) assert branch @@ -302,6 +303,7 @@ async def test_rebase(self, db: InfrahubDatabase, client: InfrahubClient, initia honda_cars = await honda.cars.get_peers(db=db) # type: ignore[attr-defined] assert len(honda_cars) == 2 + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step04_check(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step04): tag_schema = registry.schema.get_node_schema(name=TAG_KIND, branch=self.branch1) @@ -314,6 +316,7 @@ async def test_step04_check(self, db: InfrahubDatabase, client: InfrahubClient, assert response == {"diff": {"added": {}, "changed": {}, "removed": {"TestingTag": None}}} assert success + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step04_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step04): tag_schema = registry.schema.get_node_schema(name=TAG_KIND, branch=self.branch1) @@ -321,9 +324,8 @@ async def test_step04_load(self, db: InfrahubDatabase, client: InfrahubClient, i assert schema_step04["nodes"][3]["name"] == "Tag" schema_step04["nodes"][3]["id"] = tag_schema.id - success, response = await client.schema.load(schemas=[schema_step04], branch=self.branch1.name) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step04], branch=self.branch1.name) + assert not response.errors assert registry.schema.has(name=TAG_KIND) is True # FIXME after loading the new schema, TestingTag is still present in the branch, need to investigate diff --git a/backend/tests/integration/schema_lifecycle/test_schema_migration_main.py b/backend/tests/integration/schema_lifecycle/test_schema_migration_main.py index 28d3852963..173ff36f72 100644 --- a/backend/tests/integration/schema_lifecycle/test_schema_migration_main.py +++ b/backend/tests/integration/schema_lifecycle/test_schema_migration_main.py @@ -130,9 +130,8 @@ async def test_step02_load_attr_add_rename( schema_step02["nodes"][0]["attributes"][0]["id"] = attr.id # Load the new schema and apply the migrations - success, response = await client.schema.load(schemas=[schema_step02]) - assert success - assert response is None + response = await client.schema.load(schemas=[schema_step02]) + assert not response.errors # Ensure that we can query the existing node with the new schema persons = await registry.manager.query(db=db, schema=PERSON_KIND, filters={"firstname__value": "John"}) @@ -140,6 +139,7 @@ async def test_step02_load_attr_add_rename( john = persons[0] assert john.firstname.value == "John" # type: ignore[attr-defined] + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): manufacturer_schema = registry.schema.get_node_schema(name=MANUFACTURER_KIND_01) @@ -191,6 +191,7 @@ async def test_step03_check(self, db: InfrahubDatabase, client: InfrahubClient, } assert success + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step03): manufacturer_schema = registry.schema.get_node_schema(name=MANUFACTURER_KIND_01) @@ -198,9 +199,8 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i assert schema_step03["nodes"][2]["name"] == "CarMaker" schema_step03["nodes"][2]["id"] = manufacturer_schema.id - success, response = await client.schema.load(schemas=[schema_step03]) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step03]) + assert not response.errors # Ensure that we can query the existing node with the new schema persons = await registry.manager.query(db=db, schema=PERSON_KIND, filters={"firstname__value": "John"}) @@ -216,6 +216,7 @@ async def test_step03_load(self, db: InfrahubDatabase, client: InfrahubClient, i honda_cars = await honda.cars.get_peers(db=db) # type: ignore[attr-defined] assert len(honda_cars) == 2 + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step04_check(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step04): tag_schema = registry.schema.get_node_schema(name=TAG_KIND) @@ -228,6 +229,7 @@ async def test_step04_check(self, db: InfrahubDatabase, client: InfrahubClient, assert response == {"diff": {"added": {}, "changed": {}, "removed": {"TestingTag": None}}} assert success + @pytest.mark.xfail(reason="migrations need updates for profiles (issue #2841)") async def test_step04_load(self, db: InfrahubDatabase, client: InfrahubClient, initial_dataset, schema_step04): tag_schema = registry.schema.get_node_schema(name=TAG_KIND) @@ -235,8 +237,7 @@ async def test_step04_load(self, db: InfrahubDatabase, client: InfrahubClient, i assert schema_step04["nodes"][3]["name"] == "Tag" schema_step04["nodes"][3]["id"] = tag_schema.id - success, response = await client.schema.load(schemas=[schema_step04]) - assert response is None - assert success + response = await client.schema.load(schemas=[schema_step04]) + assert not response.errors assert registry.schema.has(name=TAG_KIND) is False diff --git a/backend/tests/integration/schema_lifecycle/test_schema_update.py b/backend/tests/integration/schema_lifecycle/test_schema_update.py new file mode 100644 index 0000000000..aea610bccc --- /dev/null +++ b/backend/tests/integration/schema_lifecycle/test_schema_update.py @@ -0,0 +1,62 @@ +from typing import Any, Dict + +import pytest +from infrahub_sdk import InfrahubClient + +from .shared import ( + TestSchemaLifecycleBase, +) + +# pylint: disable=unused-argument +ACCORD_COLOR = "#3443eb" + + +class TestSchemaLifecycleValidatorMain(TestSchemaLifecycleBase): + @pytest.fixture(scope="class") + def schema_network( + self, + ) -> Dict[str, Any]: + return { + "version": "1.0", + "nodes": [ + { + "name": "Device", + "namespace": "Network", + "default_filter": "hostname__value", + "attributes": [{"name": "hostname", "kind": "Text"}, {"name": "model", "kind": "Text"}], + }, + { + "name": "Interface", + "namespace": "Network", + "uniqueness_constraints": [["device", "name__value"]], + "attributes": [{"name": "name", "kind": "Text", "optional": False}], + "relationships": [ + { + "name": "device", + "cardinality": "one", + "kind": "Parent", + "peer": "NetworkDevice", + "optional": False, + } + ], + }, + ], + } + + async def test_step_01_create_branch(self, client: InfrahubClient): + branch = await client.branch.create(branch_name="test", sync_with_git=False) + assert branch + + async def test_step_02_load_schema(self, client: InfrahubClient, schema_network): + # Load the new schema and apply the migrations + response = await client.schema.load(schemas=[schema_network], branch="test") + assert not response.errors + + async def test_step_03_load_data(self, client: InfrahubClient, schema_network): + dev1 = await client.create(kind="NetworkDevice", hostname="device", model="switch", branch="test") + await dev1.save() + assert dev1.id + + intf1 = await client.create(kind="NetworkInterface", name="interface1", device=dev1.id, branch="test") + await intf1.save() + assert intf1.id diff --git a/backend/tests/integration/schema_lifecycle/test_schema_validator_generic_uniqueness.py b/backend/tests/integration/schema_lifecycle/test_schema_validator_generic_uniqueness.py new file mode 100644 index 0000000000..4b886e9644 --- /dev/null +++ b/backend/tests/integration/schema_lifecycle/test_schema_validator_generic_uniqueness.py @@ -0,0 +1,397 @@ +from typing import Any, Dict + +import pytest +from infrahub_sdk import InfrahubClient +from infrahub_sdk.exceptions import GraphQLError + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.initialization import create_branch +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase + +from ..shared import load_schema +from .shared import ( + TestSchemaLifecycleBase, +) + +PERSON_KIND = "TestingPerson" +CYLON_KIND = "TestingCylon" +CAR_KIND = "TestingCar" + +# pylint: disable=unused-argument + + +class TestSchemaLifecycleValidatorMain(TestSchemaLifecycleBase): + @pytest.fixture(scope="class") + def schema_car_base(self) -> Dict[str, Any]: + return { + "name": "Car", + "namespace": "Testing", + "include_in_menu": True, + "default_filter": "name__value", + "label": "Car", + "attributes": [ + {"name": "name", "kind": "Text"}, + {"name": "description", "kind": "Text", "optional": True}, + {"name": "color", "kind": "Text"}, + ], + "relationships": [ + { + "name": "owner", + "kind": "Attribute", + "optional": False, + "peer": "TestingHumanoid", + "cardinality": "one", + }, + ], + } + + @pytest.fixture(scope="class") + def schema_humanoid_base(self) -> Dict[str, Any]: + return { + "name": "Humanoid", + "namespace": "Testing", + "include_in_menu": True, + "label": "Humanoid", + "attributes": [ + {"name": "name", "kind": "Text"}, + {"name": "description", "kind": "Text", "optional": True}, + {"name": "height", "kind": "Number", "optional": True}, + {"name": "favorite_color", "kind": "Text", "optional": True}, + ], + "relationships": [ + {"name": "cars", "kind": "Generic", "optional": True, "peer": "TestingCar", "cardinality": "many"} + ], + } + + @pytest.fixture(scope="class") + def schema_person_base(self) -> Dict[str, Any]: + return { + "name": "Person", + "namespace": "Testing", + "include_in_menu": True, + "label": "Person", + "inherit_from": ["TestingHumanoid"], + "attributes": [ + {"name": "homeworld", "kind": "Text", "optional": False}, + ], + } + + @pytest.fixture(scope="class") + def schema_cylon_base(self) -> Dict[str, Any]: + return { + "name": "Cylon", + "namespace": "Testing", + "include_in_menu": True, + "label": "Cylon", + "inherit_from": ["TestingHumanoid"], + "attributes": [ + {"name": "model_number", "kind": "Number", "optional": False}, + ], + } + + @pytest.fixture(scope="class") + async def branch_2(self, db: InfrahubDatabase) -> Branch: + return await create_branch(db=db, branch_name="branch_2") + + @pytest.fixture(scope="class") + async def initial_dataset(self, db: InfrahubDatabase, initialize_registry, schema_step_01): + await load_schema(db=db, schema=schema_step_01) + + starbuck = await Node.init(schema=PERSON_KIND, db=db) + await starbuck.new(db=db, name="Kara", height=175, description="Starbuck", homeworld="Caprica") + await starbuck.save(db=db) + + president = await Node.init(schema=PERSON_KIND, db=db) + await president.new(db=db, name="Laura", height=175, description="President", homeworld="Caprica") + await president.save(db=db) + + gaius = await Node.init(schema=PERSON_KIND, db=db) + await gaius.new(db=db, name="Gaius", height=155, description="'Scientist'", homeworld="Aerilon") + await gaius.save(db=db) + + boomer = await Node.init(schema=CYLON_KIND, db=db) + await boomer.new(db=db, name="Sharon", height=165, model_number=8, description="8 (Boomer)") + await boomer.save(db=db) + + athena = await Node.init(schema=CYLON_KIND, db=db) + await athena.new(db=db, name="Sharon", height=165, model_number=8, description="8 (Athena)") + await athena.save(db=db) + + caprica = await Node.init(schema=CYLON_KIND, db=db) + await caprica.new(db=db, name="Caprica", height=185, model_number=6, description="6 (Caprica)") + await caprica.save(db=db) + + accord = await Node.init(schema=CAR_KIND, db=db) + await accord.new(db=db, name="accord", description="Honda Accord", color="#12345e", owner=president) + await accord.save(db=db) + + civic = await Node.init(schema=CAR_KIND, db=db) + await civic.new(db=db, name="civic", description="Honda Civic", color="#c9eb34", owner=boomer) + await civic.save(db=db) + + megane = await Node.init(schema=CAR_KIND, db=db) + await megane.new(db=db, name="Megane", description="Renault Megane", color="#c93420", owner=starbuck) + await megane.save(db=db) + + objs = { + "starbuck": starbuck.id, + "president": president.id, + "gaius": gaius.id, + "boomer": boomer.id, + "athena": athena.id, + "caprica": caprica.id, + "accord": accord.id, + "civic": civic.id, + "megane": megane.id, + } + + return objs + + @pytest.fixture(scope="class") + def schema_step_01( + self, schema_humanoid_base, schema_car_base, schema_person_base, schema_cylon_base + ) -> Dict[str, Any]: + return { + "version": "1.0", + "generics": [schema_humanoid_base], + "nodes": [schema_car_base, schema_person_base, schema_cylon_base], + } + + @pytest.fixture(scope="class") + def schema_01_humanoid_uniqueness_constraint_failure(self, schema_humanoid_base) -> Dict[str, Any]: + """Add uniqueness constraint to TestHumanoid that does not fit existing data""" + schema_humanoid_base["uniqueness_constraints"] = [["height", "name"]] + return schema_humanoid_base + + @pytest.fixture(scope="class") + def schema_01_generic_uniqueness_failure( + self, schema_01_humanoid_uniqueness_constraint_failure, schema_car_base, schema_cylon_base, schema_person_base + ) -> Dict[str, Any]: + return { + "version": "1.0", + "generics": [schema_01_humanoid_uniqueness_constraint_failure], + "nodes": [schema_car_base, schema_person_base, schema_cylon_base], + } + + @pytest.fixture(scope="class") + def schema_02_humanoid_uniqueness_constraint_failure(self, schema_humanoid_base) -> Dict[str, Any]: + schema_humanoid_base["uniqueness_constraints"] = [["name", "favorite_color"]] + return schema_humanoid_base + + @pytest.fixture(scope="class") + def schema_02_generic_uniqueness_failure( + self, schema_02_humanoid_uniqueness_constraint_failure, schema_car_base, schema_cylon_base, schema_person_base + ) -> Dict[str, Any]: + return { + "version": "1.0", + "generics": [schema_02_humanoid_uniqueness_constraint_failure], + "nodes": [schema_car_base, schema_person_base, schema_cylon_base], + } + + @pytest.fixture(scope="class") + def schema_03_humanoid_uniqueness_constraint_failure(self, schema_humanoid_base) -> Dict[str, Any]: + schema_humanoid_base["uniqueness_constraints"] = [["height", "name"]] + return schema_humanoid_base + + @pytest.fixture(scope="class") + def schema_03_person_constraint_failure(self, schema_person_base) -> Dict[str, Any]: + schema_person_base["uniqueness_constraints"] = [["height", "homeworld"]] + return schema_person_base + + @pytest.fixture(scope="class") + def schema_03_cylon_constraint_failure(self, schema_cylon_base) -> Dict[str, Any]: + schema_cylon_base["uniqueness_constraints"] = [["model_number", "favorite_color"]] + return schema_cylon_base + + @pytest.fixture(scope="class") + def schema_03_generic_and_node_uniqueness_failure( + self, + schema_03_humanoid_uniqueness_constraint_failure, + schema_car_base, + schema_03_cylon_constraint_failure, + schema_03_person_constraint_failure, + ) -> Dict[str, Any]: + return { + "version": "1.0", + "generics": [schema_03_humanoid_uniqueness_constraint_failure], + "nodes": [schema_car_base, schema_03_person_constraint_failure, schema_03_cylon_constraint_failure], + } + + @pytest.fixture(scope="class") + def schema_04_humanoid_uniqueness_constraint_failure(self, schema_humanoid_base) -> Dict[str, Any]: + schema_humanoid_base["uniqueness_constraints"] = [["name", "favorite_color"]] + return schema_humanoid_base + + @pytest.fixture(scope="class") + def schema_04_person_constraint_failure(self, schema_person_base) -> Dict[str, Any]: + schema_person_base["uniqueness_constraints"] = [["homeworld", "favorite_color"]] + return schema_person_base + + @pytest.fixture(scope="class") + def schema_04_cylon_constraint_failure(self, schema_cylon_base) -> Dict[str, Any]: + schema_cylon_base["uniqueness_constraints"] = [["model_number", "favorite_color"]] + return schema_cylon_base + + @pytest.fixture(scope="class") + def schema_04_generic_and_node_uniqueness_failure( + self, + schema_04_humanoid_uniqueness_constraint_failure, + schema_car_base, + schema_04_cylon_constraint_failure, + schema_04_person_constraint_failure, + ) -> Dict[str, Any]: + return { + "version": "1.0", + "generics": [schema_04_humanoid_uniqueness_constraint_failure], + "nodes": [schema_car_base, schema_04_person_constraint_failure, schema_04_cylon_constraint_failure], + } + + async def test_baseline_backend(self, db: InfrahubDatabase, initial_dataset): + persons = await registry.manager.query(db=db, schema=PERSON_KIND) + cylons = await registry.manager.query(db=db, schema=CYLON_KIND) + cars = await registry.manager.query(db=db, schema=CAR_KIND) + assert len(persons) == 3 + assert len(cylons) == 3 + assert len(cars) == 3 + + async def test_step_01_check_generic_uniqueness_constraint_failure( + self, client: InfrahubClient, initial_dataset, schema_01_generic_uniqueness_failure + ): + success, response = await client.schema.check(schemas=[schema_01_generic_uniqueness_failure]) + + assert success is False + assert "errors" in response + assert len(response["errors"]) == 1 + err_msg = response["errors"][0]["message"] + assert initial_dataset["boomer"] in err_msg + assert initial_dataset["athena"] in err_msg + assert "node.uniqueness_constraints.update" in err_msg + + async def test_step_02_check_generic_uniqueness_constraint_rebase_failure( + self, + client: InfrahubClient, + db: InfrahubDatabase, + initial_dataset, + schema_02_generic_uniqueness_failure, + branch_2, + ): + response = await client.schema.load(schemas=[schema_02_generic_uniqueness_failure], branch=branch_2.name) + assert not response.errors + + boomer_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["boomer"] + ) + boomer_main.favorite_color.value = "green" # type: ignore[attr-defined] + await boomer_main.save(db=db) + athena_branch = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["athena"], branch=branch_2 + ) + athena_branch.favorite_color.value = "green" # type: ignore[attr-defined] + await athena_branch.save(db=db) + + with pytest.raises(GraphQLError) as exc: + await client.branch.rebase(branch_name=branch_2.name) + + assert initial_dataset["boomer"] in exc.value.message + assert initial_dataset["athena"] in exc.value.message + assert "node.uniqueness_constraints.update" in exc.value.message + + async def test_step_03_check_generic_and_node_uniqueness_constraint_failure( + self, + db: InfrahubDatabase, + client: InfrahubClient, + initial_dataset, + schema_03_generic_and_node_uniqueness_failure, + ): + boomer_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["boomer"] + ) + boomer_main.favorite_color.value = "green" # type: ignore[attr-defined] + await boomer_main.save(db=db) + caprica_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["caprica"] + ) + caprica_main.favorite_color.value = "green" # type: ignore[attr-defined] + await caprica_main.save(db=db) + + success, response = await client.schema.check(schemas=[schema_03_generic_and_node_uniqueness_failure]) + + assert success is False + assert "errors" in response + assert len(response["errors"]) == 1 + err_msg = response["errors"][0]["message"] + assert initial_dataset["boomer"] in err_msg + assert initial_dataset["athena"] in err_msg + assert initial_dataset["starbuck"] in err_msg + assert initial_dataset["president"] in err_msg + assert initial_dataset["gaius"] not in err_msg + assert "node.uniqueness_constraints.update" in err_msg + + async def test_step_03_reset(self, db: InfrahubDatabase, initial_dataset): + boomer_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["boomer"] + ) + boomer_main.favorite_color.value = None # type: ignore[attr-defined] + await boomer_main.save(db=db) + caprica_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["caprica"] + ) + caprica_main.favorite_color.value = None # type: ignore[attr-defined] + await caprica_main.save(db=db) + + async def test_step_04_check_generic_and_node_uniqueness_constraint_rebase_failure( + self, + client: InfrahubClient, + db: InfrahubDatabase, + initial_dataset, + schema_04_generic_and_node_uniqueness_failure, + branch_2, + ): + response = await client.schema.load( + schemas=[schema_04_generic_and_node_uniqueness_failure], branch=branch_2.name + ) + assert not response.errors + + boomer_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["boomer"] + ) + boomer_main.favorite_color.value = "green" # type: ignore[attr-defined] + await boomer_main.save(db=db) + athena_branch = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=CYLON_KIND, id=initial_dataset["athena"], branch=branch_2 + ) + athena_branch.favorite_color.value = "green" # type: ignore[attr-defined] + await athena_branch.save(db=db) + starbuck_main = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=PERSON_KIND, id=initial_dataset["starbuck"] + ) + starbuck_main.favorite_color.value = "purple" # type: ignore[attr-defined] + await starbuck_main.save(db=db) + president_branch = await NodeManager.get_one_by_id_or_default_filter( + db=db, schema_name=PERSON_KIND, id=initial_dataset["president"], branch=branch_2 + ) + president_branch.favorite_color.value = "purple" # type: ignore[attr-defined] + await president_branch.save(db=db) + + with pytest.raises(GraphQLError) as exc: + await client.branch.rebase(branch_name=branch_2.name) + + assert initial_dataset["gaius"] not in exc.value.message + assert initial_dataset["caprica"] not in exc.value.message + for display_label, kind in [ + (await boomer_main.render_display_label(db=db), "TestingHumanoid"), + (await athena_branch.render_display_label(db=db), "TestingHumanoid"), + (await boomer_main.render_display_label(db=db), "TestingCylon"), + (await athena_branch.render_display_label(db=db), "TestingCylon"), + (await starbuck_main.render_display_label(db=db), "TestingPerson"), + (await president_branch.render_display_label(db=db), "TestingPerson"), + ]: + expected_error_msg = ( + f"Node {display_label} is not compatible with the constraint 'node.uniqueness_constraints.update'" + f" at 'schema/{kind}/uniqueness_constraints'" + ) + + assert expected_error_msg in exc.value.errors[0]["message"] diff --git a/backend/tests/integration/schema_lifecycle/test_schema_validator_rebase.py b/backend/tests/integration/schema_lifecycle/test_schema_validator_rebase.py index 3f897cc33e..83ee24b8ec 100644 --- a/backend/tests/integration/schema_lifecycle/test_schema_validator_rebase.py +++ b/backend/tests/integration/schema_lifecycle/test_schema_validator_rebase.py @@ -129,8 +129,8 @@ async def test_baseline_backend(self, db: InfrahubDatabase, initial_dataset): async def test_step_01_attr_regex_add_rebase_failure( self, client: InfrahubClient, db: InfrahubDatabase, initial_dataset, schema_01_attr_regex, branch_2 ): - success, _ = await client.schema.load(schemas=[schema_01_attr_regex]) - assert success + response = await client.schema.load(schemas=[schema_01_attr_regex]) + assert not response.errors little_john = await Node.init(schema=PERSON_KIND, db=db, branch=branch_2) await little_john.new(db=db, name="little john", height=115, description="a smaller john") await little_john.save(db=db) @@ -144,8 +144,8 @@ async def test_step_01_attr_regex_add_rebase_failure( async def test_step_02_node_unique_rebase_failure( self, client: InfrahubClient, db: InfrahubDatabase, initial_dataset, schema_02_node_unique, branch_2 ): - success, _ = await client.schema.load(schemas=[schema_02_node_unique]) - assert success + response = await client.schema.load(schemas=[schema_02_node_unique]) + assert not response.errors honda = await client.get(id=initial_dataset["honda"], kind=MANUFACTURER_KIND_01, branch=branch_2.name) jane = await client.get(id=initial_dataset["jane"], kind=PERSON_KIND, branch=branch_2.name) diff --git a/backend/tests/integration/sdk/test_node_create_constraint.py b/backend/tests/integration/sdk/test_node_create_constraint.py index a6f2524b6f..f13698a533 100644 --- a/backend/tests/integration/sdk/test_node_create_constraint.py +++ b/backend/tests/integration/sdk/test_node_create_constraint.py @@ -205,9 +205,8 @@ async def test_baseline_backend(self, db: InfrahubDatabase, initial_dataset): async def test_step_02_add_node_success( self, client: InfrahubClient, initial_dataset, schema_02_uniqueness_constraint ): - success, response = await client.schema.load(schemas=[schema_02_uniqueness_constraint]) - assert success is True - assert not response + response = await client.schema.load(schemas=[schema_02_uniqueness_constraint]) + assert not response.errors john_person = await client.get(kind=PERSON_KIND, id=initial_dataset["john"]) honda_manufacturer = await client.get(kind=MANUFACTURER_KIND, id=initial_dataset["honda"]) @@ -248,9 +247,8 @@ async def test_step_02_add_node_failure( async def test_step_03_add_node_success( self, client: InfrahubClient, initial_dataset, schema_03_uniqueness_constraint ): - success, response = await client.schema.load(schemas=[schema_03_uniqueness_constraint]) - assert success is True - assert not response + response = await client.schema.load(schemas=[schema_03_uniqueness_constraint]) + assert not response.errors john_person = await client.get(kind=PERSON_KIND, id=initial_dataset["john"]) honda_manufacturer = await client.get(kind=MANUFACTURER_KIND, id=initial_dataset["honda"]) diff --git a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py index 5542b67dea..5e2971cc66 100644 --- a/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py +++ b/backend/tests/integration/services/adapters/message_bus/test_rabbitmq.py @@ -1,7 +1,6 @@ from __future__ import annotations import asyncio -import json from copy import deepcopy from dataclasses import dataclass from functools import partial @@ -10,6 +9,7 @@ import httpx import pytest +import ujson from aio_pika import Message from infrahub import config @@ -351,8 +351,8 @@ async def test_rabbitmq_publish(rabbitmq_api: RabbitMQManager) -> None: delayed_queue = await bus.channel.get_queue(name=f"{bus.settings.namespace}.delay.five_seconds") message_from_queue = await queue.get() delayed_message_from_queue = await delayed_queue.get() - parsed_message = json.loads(message_from_queue.body) - parsed_delayed_message = json.loads(delayed_message_from_queue.body) + parsed_message = ujson.loads(message_from_queue.body) + parsed_delayed_message = ujson.loads(delayed_message_from_queue.body) await bus.shutdown() @@ -431,8 +431,8 @@ async def test_rabbitmq_rpc(rabbitmq_api: RabbitMQManager, fake_log: FakeLogger) assert response.data.response == "Reply to: You can reply to this message" -async def test_rabbitmq_subscribe(rabbitmq_api: RabbitMQManager, fake_log: FakeLogger) -> None: - """Validates the subscribe method.""" +async def test_rabbitmq_on_message(rabbitmq_api: RabbitMQManager, fake_log: FakeLogger) -> None: + """Validates the on_message method.""" bus = RabbitMQMessageBus(settings=rabbitmq_api.settings) api_service = InfrahubServices(message_bus=bus, component_type=ComponentType.API_SERVER) @@ -442,18 +442,15 @@ async def test_rabbitmq_subscribe(rabbitmq_api: RabbitMQManager, fake_log: FakeL await bus.initialize(service=agent_service) - subscribe_task = asyncio.create_task(bus.subscribe()) - await agent_service.send(message=messages.SendEchoRequest(message="Hello there")) await asyncio.sleep(delay=1) await bus.shutdown() - subscribe_task.cancel() - assert fake_log.info_logs == ["Waiting for RPC instructions to execute .. ", "Received message: Hello there"] + assert fake_log.info_logs == ["Received message: Hello there"] assert fake_log.error_logs == [] -async def test_rabbitmq_subscribe_invalid_routing_key(rabbitmq_api: RabbitMQManager, fake_log: FakeLogger) -> None: +async def test_rabbitmq_on_message_invalid_routing_key(rabbitmq_api: RabbitMQManager, fake_log: FakeLogger) -> None: """Validates logging of invalid routing key""" bus = RabbitMQMessageBus(settings=rabbitmq_api.settings) @@ -464,15 +461,12 @@ async def test_rabbitmq_subscribe_invalid_routing_key(rabbitmq_api: RabbitMQMana await bus.initialize(service=agent_service) - subscribe_task = asyncio.create_task(bus.subscribe()) - await bus.publish(routing_key="request.something.invalid", message=messages.SendEchoRequest(message="Hello there")) await asyncio.sleep(delay=1) await bus.shutdown() - subscribe_task.cancel() - assert fake_log.info_logs == ["Waiting for RPC instructions to execute .. "] - assert fake_log.error_logs == ["Unhandled routing key for message"] + assert fake_log.info_logs == [] + assert fake_log.error_logs == ["Invalid message received"] async def on_callback(message: AbstractIncomingMessage, service: InfrahubServices) -> None: diff --git a/backend/tests/integration/user_workflows/test_user_worflow.py b/backend/tests/integration/user_workflows/test_user_worflow.py index 3dc7d5c2ee..afbb77de05 100644 --- a/backend/tests/integration/user_workflows/test_user_worflow.py +++ b/backend/tests/integration/user_workflows/test_user_worflow.py @@ -3,6 +3,7 @@ from deepdiff import DeepDiff from fastapi.testclient import TestClient +from infrahub.core.constants import NULL_VALUE from infrahub.database import InfrahubDatabase from infrahub.server import app from infrahub.test_data import dataset01 as ds01 @@ -358,7 +359,7 @@ async def test_validate_diff_after_description_update(self, client, dataset01, i "branch": "branch1", "changed_at": "2023-10-25T11:26:48.387801Z", "type": "HAS_VALUE", - "value": {"new": "New " "description " "in " "branch1", "previous": "NULL"}, + "value": {"new": "New " "description " "in " "branch1", "previous": NULL_VALUE}, } ], "path": "data/17915618-03d5-2db0-4358-185140cb1203/description/value", @@ -394,7 +395,7 @@ async def test_validate_diff_after_description_update(self, client, dataset01, i "branch": "main", "changed_at": "2023-10-25T11:26:49.190014Z", "type": "HAS_VALUE", - "value": {"new": "New " "description " "in " "main", "previous": "NULL"}, + "value": {"new": "New " "description " "in " "main", "previous": NULL_VALUE}, } ], "path": "data/17915618-15e2-e1f0-435b-18517dcffdf5/description/value", @@ -509,7 +510,7 @@ def test_validate_diff_again_after_description_update(self, client, dataset01): "type": "HAS_VALUE", "changed_at": "2023-05-04T18:45:28.584932Z", "action": "updated", - "value": {"new": "New New description in branch1", "previous": "NULL"}, + "value": {"new": "New New description in branch1", "previous": NULL_VALUE}, }, "properties": [], } diff --git a/backend/tests/scale/common/config.py b/backend/tests/scale/common/config.py index 44c42383d8..513ab34627 100644 --- a/backend/tests/scale/common/config.py +++ b/backend/tests/scale/common/config.py @@ -20,9 +20,12 @@ class Config(BaseSettings): node_amount: int = 10 attrs_amount: int = 0 rels_amount: int = 0 + changes_amount: int = 0 current_stage: str = "" + failed_requests: int = 0 + class Config: env_prefix = "INFRAHUB_" case_sensitive = False diff --git a/backend/tests/scale/common/events.py b/backend/tests/scale/common/events.py index 1696c037b7..919013f4bc 100644 --- a/backend/tests/scale/common/events.py +++ b/backend/tests/scale/common/events.py @@ -23,6 +23,8 @@ "rels_amount", ] +failed_request: bool = False + @events.test_start.add_listener def setup_iteration_limit(environment: Environment, **kwargs): @@ -47,7 +49,7 @@ def wrapped(self, task): # need to trigger this in a separate greenlet, in case test_stop handlers do something async gevent.spawn_later(0.1, runner.quit) raise StopUser() - runner.iterations_started = runner.iterations_started + 1 + runner.iterations_started += 1 method(self, task) return wrapped @@ -68,6 +70,9 @@ def request_event_handler( "failed": True if exception else False, } + if exception: + config.failed_requests += 1 + if os.getenv("CI") is None: server_container_stats = get_container_resource_usage(config.server_container) db_container_stats = get_container_resource_usage(config.db_container) diff --git a/backend/tests/scale/common/protocols.py b/backend/tests/scale/common/protocols.py index cf44204a61..665fafa8f7 100644 --- a/backend/tests/scale/common/protocols.py +++ b/backend/tests/scale/common/protocols.py @@ -1,4 +1,5 @@ import time +from urllib.parse import urlparse from infrahub_sdk import Config, InfrahubClientSync @@ -28,3 +29,28 @@ def execute_graphql(self, *args, **kwargs): request_meta["response_time"] = (time.perf_counter() - start_perf_counter) * 1000 self._request_event.fire(**request_meta) return request_meta["response"] + + def _request(self, *args, **kwargs): + # Filter endpoints we want to trace + if "/api/diff/data" not in kwargs.get("url"): + return super()._request(*args, **kwargs) + + api_path = urlparse(kwargs.get("url")).path + + request_meta = { + "request_type": "InfrahubClient", + "name": f"{kwargs.get('method').name} {api_path}", + "start_time": time.time(), + "response_length": 0, + "response": None, + "context": {}, + "exception": None, + } + start_perf_counter = time.perf_counter() + try: + request_meta["response"] = super()._request(*args, **kwargs) + except Exception as e: + request_meta["exception"] = e + request_meta["response_time"] = (time.perf_counter() - start_perf_counter) * 1000 + self._request_event.fire(**request_meta) + return request_meta["response"] diff --git a/backend/tests/scale/common/stagers.py b/backend/tests/scale/common/stagers.py index c1dc630c20..32dc47b2be 100644 --- a/backend/tests/scale/common/stagers.py +++ b/backend/tests/scale/common/stagers.py @@ -13,22 +13,24 @@ def load_schema( client: InfrahubClientSync, schema: Path, branch: Optional[str] = None, - extra_attributes: List[dict] = [], - relationships: List[dict] = [], + attributes: Optional[List[dict]] = None, + relationships: Optional[List[dict]] = None, ): + attributes = attributes or [] + relationships = relationships or [] branch = branch or "main" data = yaml.safe_load(schema.read_text()) - data["nodes"][0]["attributes"] += extra_attributes + data["nodes"][0]["attributes"] += attributes if "relationships" not in data["nodes"][0]: - data["nodes"][0]["relationships"] = list() + data["nodes"][0]["relationships"] = [] data["nodes"][0]["relationships"] += relationships client.schema.validate(data) - (loaded, response) = client.schema.load(schemas=[data], branch=branch) - if not loaded: - raise ValueError(f"Could not load schema: {response}") + response = client.schema.load(schemas=[data], branch=branch) + if response.errors: + raise ValueError(f"Could not load schema: {response.errors}") def _stage_node(client: InfrahubClientSync, kind: str, prefix: str, amount: int, offset: int = 0): @@ -39,15 +41,61 @@ def _stage_node(client: InfrahubClientSync, kind: str, prefix: str, amount: int, node = client.create(kind=kind, data={"name": f"{prefix}{i}", **extra_attributes}) node.save() + for j in range(config.changes_amount): + node.name.value = f"{prefix}{i}-update{j}" + node.save() + stage_infranode = partial(_stage_node, kind="InfraNode", prefix="Node") def _stage_branch(client: InfrahubClientSync, prefix: str, amount: int, offset: int = 0): + extra_attributes = prepare_node_attributes(client) + for i in range(offset, offset + config.node_amount): - client.branch.create(branch_name=f"{prefix}{i}", description="description", sync_with_git=False) + branch_name = f"{prefix}{i}" + client.branch.create(branch_name=branch_name, description="description", sync_with_git=True) + # Add diff by creating a new node + node = client.create(kind="InfraNode", branch=branch_name, data={"name": "DiffTestNode", **extra_attributes}) + node.save() stage_infranode(client=client, amount=100) stage_branch = partial(_stage_branch, prefix="Branch") + + +def _stage_branch_update(client: InfrahubClientSync, prefix: str, amount: int, offset: int = 0): + # Create node for diff + extra_attributes = prepare_node_attributes(client) + node = client.create(kind="InfraNode", data={"name": "DiffTestNode", **extra_attributes}) + node.save() + + for i in range(offset, offset + config.node_amount): + branch_name = f"{prefix}{i}" + client.branch.create(branch_name=branch_name, description="description", sync_with_git=True) + # Apply diff to base node + node._branch = branch_name + node.name.value = f"DiffTestNodeBranch{i}" + node.save() + + stage_infranode(client=client, amount=100) + + +stage_branch_update = partial(_stage_branch_update, prefix="Branch") + + +def _stage_branch_diff(client: InfrahubClientSync, prefix: str, amount: int, offset: int = 0): + extra_attributes = prepare_node_attributes(client) + + branch_name = "DiffTestBranch" + client.branch.create(branch_name=branch_name, description="description", sync_with_git=True) + for i in range(offset, offset + config.node_amount): + # Add diff by creating a new node + node = client.create(kind="InfraNode", branch=branch_name, data={"name": f"{prefix}{i}", **extra_attributes}) + node.save() + + stage_infranode(client=client, amount=100) + + +stage_branch_diff = partial(_stage_branch_diff, prefix="Node") diff --git a/backend/tests/scale/common/users.py b/backend/tests/scale/common/users.py index bd24ed3186..85ad652243 100644 --- a/backend/tests/scale/common/users.py +++ b/backend/tests/scale/common/users.py @@ -56,7 +56,7 @@ def crud(self): for i in range(self.custom_options["rels"]) ] common.stagers.load_schema( - self.client, self.custom_options["schema"], extra_attributes=attributes, relationships=relationships + self.client, self.custom_options["schema"], attributes=attributes, relationships=relationships ) time.sleep(5) print("--- staging nodes, attributes and relations") @@ -87,3 +87,10 @@ def crud(self): update_this_node.save() delete_this_node.delete() + + if "diff" in self.custom_options["stager"]: + self.client.branch.diff_data("DiffTestBranch") + + # End with a branch merge + if "diff" in self.custom_options["stager"]: + self.client.branch.merge("DiffTestBranch") diff --git a/backend/tests/scale/common/utils.py b/backend/tests/scale/common/utils.py index d930de0b27..3c1b579b48 100644 --- a/backend/tests/scale/common/utils.py +++ b/backend/tests/scale/common/utils.py @@ -98,7 +98,7 @@ def get_graphdb_stats() -> DbStats: def prepare_node_attributes(client: InfrahubClientSync) -> dict: - extra_attributes = dict() + extra_attributes = {} for i in range(config.attrs_amount): extra_attributes[f"attr{i}"] = random_ascii_string() diff --git a/backend/tests/scale/main.py b/backend/tests/scale/main.py index 2d8b480eb1..dcb635df70 100644 --- a/backend/tests/scale/main.py +++ b/backend/tests/scale/main.py @@ -37,17 +37,24 @@ type=click.IntRange(min=0, max=1_000_000_000), help="Amount of relationships per object to be created in the `staging function`", ) +@click.option( + "--changes", + default=0, + type=click.IntRange(min=0, max=1_000_000_000), + help="Amount of changes to apply to a single node", +) @click.option("--test", default="InfrahubClientUser", help="The Locust test user class") -def main(schema: Path, stager: str, amount: int, attrs: int, rels: int, test: str) -> int: +def main(schema: Path, stager: str, amount: int, attrs: int, rels: int, changes: int, test: str) -> int: if not hasattr(common.users, test): print(f"Invalid test class provided: {test}") - return 1 + raise SystemExit(1) user_class = getattr(common.users, test) config.node_amount = amount config.attrs_amount = attrs config.rels_amount = rels + config.changes_amount = changes env = Environment(user_classes=[user_class], events=events) env.custom_options = { @@ -56,6 +63,7 @@ def main(schema: Path, stager: str, amount: int, attrs: int, rels: int, test: st "amount": amount, "attrs": attrs, "rels": rels, + "changes": changes, "schema": schema, } runner = env.create_local_runner() @@ -65,8 +73,13 @@ def main(schema: Path, stager: str, amount: int, attrs: int, rels: int, test: st runner.greenlet.join() print("--- done") - return 0 + + if config.failed_requests > 0: + print(f"--- failed requests: {config.failed_requests}") + raise SystemExit(1) + + raise SystemExit(0) if __name__ == "__main__": - raise SystemExit(main()) + main() diff --git a/backend/tests/unit/api/conftest.py b/backend/tests/unit/api/conftest.py index ac4b3bbae4..710c5a9383 100644 --- a/backend/tests/unit/api/conftest.py +++ b/backend/tests/unit/api/conftest.py @@ -329,7 +329,7 @@ async def car_person_data_artifact_diff(db: InfrahubDatabase, default_branch, ca transformation=t1, content_type="application/json", artifact_name="myartifact", - parameters='{"name": "name__value"}', + parameters={"value": {"name": "name__value"}}, ) await ad1.save(db=db) diff --git a/backend/tests/unit/api/test_11_artifact.py b/backend/tests/unit/api/test_11_artifact.py index f190e48e61..bdd8963d0b 100644 --- a/backend/tests/unit/api/test_11_artifact.py +++ b/backend/tests/unit/api/test_11_artifact.py @@ -43,7 +43,7 @@ async def test_artifact_definition_endpoint( transformation=t1, content_type="application/json", artifact_name="myartifact", - parameters='{"name": "name__value"}', + parameters={"value": {"name": "name__value"}}, ) await ad1.save(db=db) diff --git a/backend/tests/unit/api/test_15_diff.py b/backend/tests/unit/api/test_15_diff.py index 04e82a949d..db7427267a 100644 --- a/backend/tests/unit/api/test_15_diff.py +++ b/backend/tests/unit/api/test_15_diff.py @@ -1,7 +1,7 @@ import pytest from deepdiff import DeepDiff -from infrahub.core.constants import InfrahubKind +from infrahub.core.constants import NULL_VALUE, InfrahubKind from infrahub.core.diff.payload_builder import get_display_labels, get_display_labels_per_kind from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager @@ -118,7 +118,7 @@ async def r1_update_01(data_diff_attribute): "type": "HAS_VALUE", "changed_at": "2023-08-01T11:07:25.255688Z", "action": "updated", - "value": {"new": "Second update in Branch", "previous": "NULL"}, + "value": {"new": "Second update in Branch", "previous": NULL_VALUE}, } ], }, diff --git a/backend/tests/unit/api/test_40_schema_api.py b/backend/tests/unit/api/test_40_schema_api.py index 5c7f2000eb..2cb190ca27 100644 --- a/backend/tests/unit/api/test_40_schema_api.py +++ b/backend/tests/unit/api/test_40_schema_api.py @@ -170,16 +170,19 @@ async def test_schema_load_endpoint_valid_simple( authentication_base, helper, ): - # Must execute in a with block to execute the startup/shutdown events + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + # Must execute in a with block to execute the startup/shutdown event with client: creation = client.post( "/api/schema/load", headers=admin_headers, json={"schemas": [helper.schema_file("infra_simple_01.json")]} ) read = client.get("/api/schema", headers=admin_headers) - assert creation.json() == {} - assert creation.status_code == 202 + assert creation.json()["schema_updated"] + assert creation.status_code == 200 assert read.status_code == 200 nodes = read.json()["nodes"] device = [node for node in nodes if node["name"] == "Device"] @@ -222,6 +225,10 @@ async def test_schema_load_endpoint_idempotent_simple( authentication_base, helper, ): + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + # Must execute in a with block to execute the startup/shutdown events with client: creation = client.post( @@ -231,7 +238,7 @@ async def test_schema_load_endpoint_idempotent_simple( nbr_rels = await count_relationships(db=db) - assert creation.status_code == 202 + assert creation.status_code == 200 assert read.status_code == 200 nodes = read.json()["nodes"] device = [node for node in nodes if node["name"] == "Device"] @@ -250,7 +257,7 @@ async def test_schema_load_endpoint_idempotent_simple( ) read = client.get("/api/schema", headers=admin_headers) - assert creation.status_code == 202 + assert creation.status_code == 200 assert read.status_code == 200 assert nbr_rels == await count_relationships(db=db) @@ -265,6 +272,10 @@ async def test_schema_load_endpoint_valid_with_generics( authentication_base, helper, ): + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + # Must execute in a with block to execute the startup/shutdown events with client: response1 = client.post( @@ -272,7 +283,7 @@ async def test_schema_load_endpoint_valid_with_generics( headers=admin_headers, json={"schemas": [helper.schema_file("infra_w_generics_01.json")]}, ) - assert response1.status_code == 202 + assert response1.status_code == 200 response2 = client.get("/api/schema", headers=admin_headers) assert response2.status_code == 200 @@ -290,6 +301,10 @@ async def test_schema_load_endpoint_idempotent_with_generics( authentication_base, helper, ): + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + # Must execute in a with block to execute the startup/shutdown events with client: response1 = client.post( @@ -297,8 +312,8 @@ async def test_schema_load_endpoint_idempotent_with_generics( headers=admin_headers, json={"schemas": [helper.schema_file("infra_w_generics_01.json")]}, ) - assert response1.json() == {} - assert response1.status_code == 202 + assert response1.json()["schema_updated"] + assert response1.status_code == 200 response2 = client.get("/api/schema", headers=admin_headers) assert response2.status_code == 200 @@ -313,8 +328,8 @@ async def test_schema_load_endpoint_idempotent_with_generics( headers=admin_headers, json={"schemas": [helper.schema_file("infra_w_generics_01.json")]}, ) - assert response3.json() == {} - assert response3.status_code == 202 + assert response3.json()["schema_updated"] is False + assert response3.status_code == 200 response4 = client.get("/api/schema", headers=admin_headers) assert response4.status_code == 200 @@ -332,6 +347,10 @@ async def test_schema_load_endpoint_valid_with_extensions( authentication_base, helper, ): + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + org_schema = registry.schema.get(name="CoreOrganization", branch=default_branch.name) initial_nbr_relationships = len(org_schema.relationships) @@ -359,8 +378,8 @@ async def test_schema_load_endpoint_valid_with_extensions( json={"schemas": [helper.schema_file("infra_w_extensions_01.json")]}, ) - assert response.json() == {} - assert response.status_code == 202 + assert response.json()["schema_updated"] + assert response.status_code == 200 org_schema = registry.schema.get(name="CoreOrganization", branch=default_branch.name) assert len(org_schema.relationships) == initial_nbr_relationships + 1 @@ -481,6 +500,10 @@ async def test_schema_load_endpoint_constraints_not_valid( # await person.new(db=db, name="ALFRED", height=160, cars=[car_accord_main.id]) # await person.save(db=db) + # Load the schema in the database + schema = registry.schema.get_schema_branch(name=default_branch.name) + await registry.schema.load_schema_to_db(schema=schema, branch=default_branch, db=db) + rpc_bus.response.append( SchemaValidatorPathResponse( data=SchemaValidatorPathResponseData( diff --git a/backend/tests/unit/api/test_api_exception_handler.py b/backend/tests/unit/api/test_api_exception_handler.py index 73cc1e5cc1..5dea3bda87 100644 --- a/backend/tests/unit/api/test_api_exception_handler.py +++ b/backend/tests/unit/api/test_api_exception_handler.py @@ -1,8 +1,8 @@ -from json import loads from typing import Optional from pydantic.v1 import BaseModel, root_validator, validator from pydantic.v1.error_wrappers import ValidationError +from ujson import loads from infrahub.api.exception_handlers import generic_api_exception_handler from infrahub.exceptions import Error diff --git a/backend/tests/unit/api/test_menu.py b/backend/tests/unit/api/test_menu.py index 82a9dcabca..d727792c1c 100644 --- a/backend/tests/unit/api/test_menu.py +++ b/backend/tests/unit/api/test_menu.py @@ -23,4 +23,4 @@ async def test_get_menu( menu = [InterfaceMenu(**menu_item) for menu_item in response.json()] assert menu[0].title == "Objects" - menu[0].children[0].title == "Car" + assert menu[0].children[0].title == "Car" diff --git a/backend/tests/unit/conftest.py b/backend/tests/unit/conftest.py index 458fbefbad..0cae2aa76d 100644 --- a/backend/tests/unit/conftest.py +++ b/backend/tests/unit/conftest.py @@ -31,11 +31,17 @@ from infrahub.core.schema_manager import SchemaBranch from infrahub.core.utils import delete_all_nodes from infrahub.database import InfrahubDatabase +from infrahub.dependencies.registry import build_component_registry from infrahub.git import InfrahubRepository from infrahub.test_data import dataset01 as ds01 from tests.helpers.file_repo import FileRepo +@pytest.fixture(scope="module", autouse=True) +def load_component_dependency_registry(): + build_component_registry() + + @pytest.fixture(params=["main", "branch2"]) async def branch(request, db: InfrahubDatabase, default_branch: Branch): if request.param == "main": @@ -226,9 +232,9 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (bool_true:Boolean { value: true }) CREATE (bool_false:Boolean { value: false }) - CREATE (atvf:AttributeValue { value: false }) - CREATE (atvt:AttributeValue { value: true }) - CREATE (atv44:AttributeValue { value: "#444444" }) + CREATE (atvf:AttributeValue { value: false, is_default: false }) + CREATE (atvt:AttributeValue { value: true, is_default: false }) + CREATE (atv44:AttributeValue { value: "#444444", is_default: false }) CREATE (c1at1:Attribute { uuid: "c1at1", name: "name", branch_support: "aware"}) CREATE (c1at2:Attribute { uuid: "c1at2", name: "nbr_seats", branch_support: "aware"}) @@ -239,10 +245,10 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c1)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(c1at3) CREATE (c1)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(c1at4) - CREATE (c1av11:AttributeValue { value: "accord"}) - CREATE (c1av12:AttributeValue { value: "volt"}) - CREATE (c1av21:AttributeValue { value: 5}) - CREATE (c1av22:AttributeValue { value: 4}) + CREATE (c1av11:AttributeValue { value: "accord", is_default: false }) + CREATE (c1av12:AttributeValue { value: "volt", is_default: false }) + CREATE (c1av21:AttributeValue { value: 5, is_default: false }) + CREATE (c1av22:AttributeValue { value: 4, is_default: false }) CREATE (c1at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60, to: $time_m20}]->(c1av11) CREATE (c1at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(c1av12) @@ -275,8 +281,8 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c2)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20}]->(c2at3) CREATE (c2)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20}]->(c2at4) - CREATE (c2av11:AttributeValue { value: "odyssey" }) - CREATE (c2av21:AttributeValue { value: 8 }) + CREATE (c2av11:AttributeValue { value: "odyssey", is_default: false }) + CREATE (c2av21:AttributeValue { value: 8, is_default: false }) CREATE (c2at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(c2av11) CREATE (c2at1)-[:IS_PROTECTED {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(bool_false) @@ -306,8 +312,8 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c3)-[:HAS_ATTRIBUTE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40}]->(c3at3) CREATE (c3)-[:HAS_ATTRIBUTE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40}]->(c3at4) - CREATE (c3av11:AttributeValue { uuid: "c3av11", value: "volt"}) - CREATE (c3av21:AttributeValue { uuid: "c3av21", value: 4}) + CREATE (c3av11:AttributeValue { value: "volt", is_default: false }) + CREATE (c3av21:AttributeValue { value: 4, is_default: false }) CREATE (c3at1)-[:HAS_VALUE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40 }]->(c3av11) CREATE (c3at1)-[:IS_PROTECTED {branch: $branch1, branch_level: 2, status: "active", from: $time_m40 }]->(bool_false) @@ -329,7 +335,7 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p1)-[:IS_PART_OF { branch: $main_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p1at1:Attribute { uuid: "p1at1", name: "name", branch_support: "aware"}) CREATE (p1)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(p1at1) - CREATE (p1av11:AttributeValue { uuid: "p1av11", value: "John Doe"}) + CREATE (p1av11:AttributeValue { value: "John Doe", is_default: false }) CREATE (p1at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p1av11) CREATE (p1at1)-[:IS_PROTECTED {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p1at1)-[:IS_VISIBLE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -338,7 +344,7 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p2)-[:IS_PART_OF {branch: $main_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p2at1:Attribute { uuid: "p2at1", name: "name", branch_support: "aware"}) CREATE (p2)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(p2at1) - CREATE (p2av11:AttributeValue { uuid: "p2av11", value: "Jane Doe"}) + CREATE (p2av11:AttributeValue { value: "Jane Doe", is_default: false }) CREATE (p2at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p2av11) CREATE (p2at1)-[:IS_PROTECTED {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p2at1)-[:IS_VISIBLE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -347,7 +353,7 @@ async def base_dataset_02(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p3)-[:IS_PART_OF {branch: $main_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p3at1:Attribute { uuid: "p3at1", name: "name", branch_support: "aware"}) CREATE (p3)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(p3at1) - CREATE (p3av11:AttributeValue { uuid: "p3av11", value: "Bill"}) + CREATE (p3av11:AttributeValue { value: "Bill", is_default: false }) CREATE (p3at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p3av11) CREATE (p3at1)-[:IS_PROTECTED {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p3at1)-[:IS_VISIBLE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -446,9 +452,9 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (bool_true:Boolean { value: true }) CREATE (bool_false:Boolean { value: false }) - CREATE (atvf:AttributeValue { value: false }) - CREATE (atvt:AttributeValue { value: true }) - CREATE (atv44:AttributeValue { value: "#444444" }) + CREATE (atvf:AttributeValue { value: false, is_default: false }) + CREATE (atvt:AttributeValue { value: true, is_default: false }) + CREATE (atv44:AttributeValue { value: "#444444", is_default: false }) CREATE (c1at1:Attribute { uuid: "c1at1", name: "name", branch_support: "aware"}) CREATE (c1at2:Attribute { uuid: "c1at2", name: "nbr_seats", branch_support: "agnostic"}) @@ -459,10 +465,10 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c1)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(c1at3) CREATE (c1)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60}]->(c1at4) - CREATE (c1av11:AttributeValue { value: "accord"}) - CREATE (c1av12:AttributeValue { value: "volt"}) - CREATE (c1av21:AttributeValue { value: 5}) - CREATE (c1av22:AttributeValue { value: 4}) + CREATE (c1av11:AttributeValue { value: "accord", is_default: false }) + CREATE (c1av12:AttributeValue { value: "volt", is_default: false }) + CREATE (c1av21:AttributeValue { value: 5, is_default: false }) + CREATE (c1av22:AttributeValue { value: 4, is_default: false }) CREATE (c1at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m60, to: $time_m20}]->(c1av11) CREATE (c1at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(c1av12) @@ -495,8 +501,8 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c2)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20}]->(c2at3) CREATE (c2)-[:HAS_ATTRIBUTE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20}]->(c2at4) - CREATE (c2av11:AttributeValue { value: "odyssey" }) - CREATE (c2av21:AttributeValue { value: 8 }) + CREATE (c2av11:AttributeValue { value: "odyssey", is_default: false }) + CREATE (c2av21:AttributeValue { value: 8, is_default: false }) CREATE (c2at1)-[:HAS_VALUE {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(c2av11) CREATE (c2at1)-[:IS_PROTECTED {branch: $main_branch, branch_level: 1, status: "active", from: $time_m20 }]->(bool_false) @@ -526,8 +532,8 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (c3)-[:HAS_ATTRIBUTE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40}]->(c3at3) CREATE (c3)-[:HAS_ATTRIBUTE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40}]->(c3at4) - CREATE (c3av11:AttributeValue { uuid: "c3av11", value: "volt"}) - CREATE (c3av21:AttributeValue { uuid: "c3av21", value: 4}) + CREATE (c3av11:AttributeValue { value: "volt", is_default: false }) + CREATE (c3av21:AttributeValue { value: 4, is_default: false }) CREATE (c3at1)-[:HAS_VALUE {branch: $branch1, branch_level: 2, status: "active", from: $time_m40 }]->(c3av11) CREATE (c3at1)-[:IS_PROTECTED {branch: $branch1, branch_level: 2, status: "active", from: $time_m40 }]->(bool_false) @@ -549,7 +555,7 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p1)-[:IS_PART_OF { branch: $global_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p1at1:Attribute { uuid: "p1at1", name: "name", branch_support: "agnostic"}) CREATE (p1)-[:HAS_ATTRIBUTE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60}]->(p1at1) - CREATE (p1av11:AttributeValue { uuid: "p1av11", value: "John Doe"}) + CREATE (p1av11:AttributeValue { value: "John Doe", is_default: false }) CREATE (p1at1)-[:HAS_VALUE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p1av11) CREATE (p1at1)-[:IS_PROTECTED {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p1at1)-[:IS_VISIBLE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -558,7 +564,7 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p2)-[:IS_PART_OF {branch: $global_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p2at1:Attribute { uuid: "p2at1", name: "name", branch_support: "agnostic"}) CREATE (p2)-[:HAS_ATTRIBUTE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60}]->(p2at1) - CREATE (p2av11:AttributeValue { uuid: "p2av11", value: "Jane Doe"}) + CREATE (p2av11:AttributeValue { value: "Jane Doe", is_default: false }) CREATE (p2at1)-[:HAS_VALUE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p2av11) CREATE (p2at1)-[:IS_PROTECTED {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p2at1)-[:IS_VISIBLE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -567,7 +573,7 @@ async def base_dataset_12(db: InfrahubDatabase, default_branch: Branch, car_pers CREATE (p3)-[:IS_PART_OF {branch: $global_branch, branch_level: 1, from: $time_m60, status: "active"}]->(root) CREATE (p3at1:Attribute { uuid: "p3at1", name: "name", branch_support: "agnostic"}) CREATE (p3)-[:HAS_ATTRIBUTE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60}]->(p3at1) - CREATE (p3av11:AttributeValue { uuid: "p3av11", value: "Bill"}) + CREATE (p3av11:AttributeValue { value: "Bill", is_default: false }) CREATE (p3at1)-[:HAS_VALUE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(p3av11) CREATE (p3at1)-[:IS_PROTECTED {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_false) CREATE (p3at1)-[:IS_VISIBLE {branch: $global_branch, branch_level: 1, status: "active", from: $time_m60 }]->(bool_true) @@ -710,35 +716,35 @@ async def base_dataset_03(db: InfrahubDatabase, default_branch: Branch, person_t CREATE (bool_false:Boolean { value: false }) // Create the Boolean nodes for the attribute value - CREATE (atvf:AttributeValue { value: false }) - CREATE (atvt:AttributeValue { value: true }) + CREATE (atvf:AttributeValue { value: false, is_default: false }) + CREATE (atvt:AttributeValue { value: true, is_default: false }) // Create a bunch a Attribute Value that can be easily identify and remembered - CREATE (mon:AttributeValue { value: "monday"}) - CREATE (tue:AttributeValue { value: "tuesday"}) - CREATE (wed:AttributeValue { value: "wednesday"}) - CREATE (thu:AttributeValue { value: "thursday"}) - CREATE (fri:AttributeValue { value: "friday"}) - CREATE (sat:AttributeValue { value: "saturday"}) - CREATE (sun:AttributeValue { value: "sunday"}) - - CREATE (jan:AttributeValue { value: "january"}) - CREATE (feb:AttributeValue { value: "february"}) - CREATE (mar:AttributeValue { value: "march"}) - CREATE (apr:AttributeValue { value: "april"}) - CREATE (may:AttributeValue { value: "may"}) - CREATE (june:AttributeValue { value: "june"}) - CREATE (july:AttributeValue { value: "july"}) - CREATE (aug:AttributeValue { value: "august"}) - CREATE (sept:AttributeValue { value: "september"}) - CREATE (oct:AttributeValue { value: "october"}) - CREATE (nov:AttributeValue { value: "november"}) - CREATE (dec:AttributeValue { value: "december"}) - - CREATE (blue:AttributeValue { value: "blue"}) - CREATE (red:AttributeValue { value: "red"}) - CREATE (black:AttributeValue { value: "black"}) - CREATE (green:AttributeValue { value: "green"}) + CREATE (mon:AttributeValue { value: "monday", is_default: false }) + CREATE (tue:AttributeValue { value: "tuesday", is_default: false }) + CREATE (wed:AttributeValue { value: "wednesday", is_default: false }) + CREATE (thu:AttributeValue { value: "thursday", is_default: false }) + CREATE (fri:AttributeValue { value: "friday", is_default: false }) + CREATE (sat:AttributeValue { value: "saturday", is_default: false }) + CREATE (sun:AttributeValue { value: "sunday", is_default: false }) + + CREATE (jan:AttributeValue { value: "january", is_default: false }) + CREATE (feb:AttributeValue { value: "february", is_default: false }) + CREATE (mar:AttributeValue { value: "march", is_default: false }) + CREATE (apr:AttributeValue { value: "april", is_default: false }) + CREATE (may:AttributeValue { value: "may", is_default: false }) + CREATE (june:AttributeValue { value: "june", is_default: false }) + CREATE (july:AttributeValue { value: "july", is_default: false }) + CREATE (aug:AttributeValue { value: "august", is_default: false }) + CREATE (sept:AttributeValue { value: "september", is_default: false }) + CREATE (oct:AttributeValue { value: "october", is_default: false }) + CREATE (nov:AttributeValue { value: "november", is_default: false }) + CREATE (dec:AttributeValue { value: "december", is_default: false }) + + CREATE (blue:AttributeValue { value: "blue", is_default: false }) + CREATE (red:AttributeValue { value: "red", is_default: false }) + CREATE (black:AttributeValue { value: "black", is_default: false }) + CREATE (green:AttributeValue { value: "green", is_default: false }) // TAG 1 - BLUE CREATE (t1:Node:Tag { uuid: "t1", kind: "Tag", branch_support: "aware"}) @@ -788,32 +794,30 @@ async def base_dataset_03(db: InfrahubDatabase, default_branch: Branch, person_t MERGE (bool_false:Boolean { value: false }) // Create the Boolean nodes for the attribute value - MERGE (atvf:AttributeValue { value: false }) - MERGE (atvt:AttributeValue { value: true }) + MERGE (atvf:AttributeValue { value: false, is_default: false }) + MERGE (atvt:AttributeValue { value: true, is_default: false }) // Create a bunch a Attribute Value that can be easily identify and remembered - MERGE (mon:AttributeValue { value: "monday"}) - MERGE (tue:AttributeValue { value: "tuesday"}) - MERGE (wed:AttributeValue { value: "wednesday"}) - MERGE (thu:AttributeValue { value: "thursday"}) - MERGE (fri:AttributeValue { value: "friday"}) - MERGE (sat:AttributeValue { value: "saturday"}) - MERGE (sun:AttributeValue { value: "sunday"}) - - MERGE (jan:AttributeValue { value: "january"}) - MERGE (feb:AttributeValue { value: "february"}) - MERGE (mar:AttributeValue { value: "march"}) - MERGE (apr:AttributeValue { value: "april"}) - MERGE (may:AttributeValue { value: "may"}) - MERGE (june:AttributeValue { value: "june"}) - MERGE (july:AttributeValue { value: "july"}) - MERGE (aug:AttributeValue { value: "august"}) - MERGE (sept:AttributeValue { value: "september"}) - MERGE (oct:AttributeValue { value: "october"}) - MERGE (nov:AttributeValue { value: "november"}) - MERGE (dec:AttributeValue { value: "december"}) - - + MERGE (mon:AttributeValue { value: "monday", is_default: false }) + MERGE (tue:AttributeValue { value: "tuesday", is_default: false }) + MERGE (wed:AttributeValue { value: "wednesday", is_default: false }) + MERGE (thu:AttributeValue { value: "thursday", is_default: false }) + MERGE (fri:AttributeValue { value: "friday", is_default: false }) + MERGE (sat:AttributeValue { value: "saturday", is_default: false }) + MERGE (sun:AttributeValue { value: "sunday", is_default: false }) + + MERGE (jan:AttributeValue { value: "january", is_default: false }) + MERGE (feb:AttributeValue { value: "february", is_default: false }) + MERGE (mar:AttributeValue { value: "march", is_default: false }) + MERGE (apr:AttributeValue { value: "april", is_default: false }) + MERGE (may:AttributeValue { value: "may", is_default: false }) + MERGE (june:AttributeValue { value: "june", is_default: false }) + MERGE (july:AttributeValue { value: "july", is_default: false }) + MERGE (aug:AttributeValue { value: "august", is_default: false }) + MERGE (sept:AttributeValue { value: "september", is_default: false }) + MERGE (oct:AttributeValue { value: "october", is_default: false }) + MERGE (nov:AttributeValue { value: "november", is_default: false }) + MERGE (dec:AttributeValue { value: "december", is_default: false }) """ query2 = """ @@ -1207,6 +1211,7 @@ async def car_person_schema_generics( "default_filter": "name__value", "display_labels": ["name__value", "color__value"], "order_by": ["name__value"], + "include_in_menu": True, "attributes": [ {"name": "name", "kind": "Text", "unique": True}, {"name": "nbr_seats", "kind": "Number"}, @@ -1438,6 +1443,15 @@ async def person_alfred_main(db: InfrahubDatabase, default_branch: Branch, car_p return person +@pytest.fixture +async def car_profile1_main(db: InfrahubDatabase, default_branch: Branch, car_person_schema) -> Node: + profile = await Node.init(db=db, schema="ProfileTestCar", branch=default_branch) + await profile.new(db=db, profile_name="car-profile1", nbr_seats=5, is_electric=False) + await profile.save(db=db) + + return profile + + @pytest.fixture async def car_accord_main(db: InfrahubDatabase, default_branch: Branch, person_john_main: Node) -> Node: car = await Node.init(db=db, schema="TestCar", branch=default_branch) @@ -1627,6 +1641,42 @@ async def all_attribute_types_schema( {"name": "myint", "kind": "Number", "optional": True}, {"name": "mylist", "kind": "List", "optional": True}, {"name": "myjson", "kind": "JSON", "optional": True}, + {"name": "ipaddress", "kind": "IPHost", "optional": True}, + {"name": "prefix", "kind": "IPNetwork", "optional": True}, + ], + } + + node_schema = NodeSchema(**SCHEMA) + registry.schema.set(name=node_schema.kind, schema=node_schema, branch=default_branch.name) + registry.schema.process_schema_branch(name=default_branch.name) + return node_schema + + +@pytest.fixture +async def all_attribute_default_types_schema( + db: InfrahubDatabase, default_branch: Branch, group_schema, data_schema +) -> NodeSchema: + SCHEMA: dict[str, Any] = { + "name": "AllAttributeTypes", + "namespace": "Test", + "branch": BranchSupportType.AWARE.value, + "attributes": [ + {"name": "name", "kind": "Text", "optional": True}, + {"name": "mystring", "kind": "Text", "optional": True}, + {"name": "mybool", "kind": "Boolean", "optional": True}, + {"name": "myint", "kind": "Number", "optional": True}, + {"name": "mylist", "kind": "List", "optional": True}, + {"name": "myjson", "kind": "JSON", "optional": True}, + {"name": "mystring_default", "kind": "Text", "optional": True, "default_value": "a string"}, + {"name": "mybool_default", "kind": "Boolean", "optional": True, "default_value": False}, + {"name": "myint_default", "kind": "Number", "optional": True, "default_value": 10}, + {"name": "mylist_default", "kind": "List", "optional": True, "default_value": [10, 11, 12]}, + {"name": "myjson_default", "kind": "JSON", "optional": True, "default_value": {"name": "value"}}, + {"name": "mystring_none", "kind": "Text", "optional": True}, + {"name": "mybool_none", "kind": "Boolean", "optional": True}, + {"name": "myint_none", "kind": "Number", "optional": True}, + {"name": "mylist_none", "kind": "List", "optional": True}, + {"name": "myjson_none", "kind": "JSON", "optional": True}, ], } @@ -1944,6 +1994,7 @@ async def hierarchical_location_schema_simple(db: InfrahubDatabase, default_bran "name": "Rack", "namespace": "Location", "default_filter": "name__value", + "order_by": ["name__value"], "inherit_from": ["LocationGeneric"], "parent": "LocationSite", "children": "", @@ -1969,8 +2020,7 @@ async def hierarchical_location_schema_simple(db: InfrahubDatabase, default_bran @pytest.fixture async def hierarchical_location_schema( db: InfrahubDatabase, default_branch: Branch, hierarchical_location_schema_simple, register_core_models_schema -) -> None: - ... +) -> None: ... @pytest.fixture @@ -2073,7 +2123,7 @@ def batched(iterable, n): tags = [] nbr_tags_per_group = 2 - for idx in range(0, len(GROUPS_DATA) * nbr_tags_per_group): + for idx in range(len(GROUPS_DATA) * nbr_tags_per_group): obj = await Node.init(db=db, schema="BuiltinTag") await obj.new(db=db, name=f"tag-{idx}") await obj.save(db=db) @@ -2262,6 +2312,34 @@ async def builtin_schema() -> SchemaRoot: return SchemaRoot(**SCHEMA) +@pytest.fixture +async def ipam_schema() -> SchemaRoot: + SCHEMA: dict[str, Any] = { + "nodes": [ + { + "name": "IPPrefix", + "namespace": "Ipam", + "default_filter": "prefix__value", + "order_by": ["prefix__value"], + "display_labels": ["prefix__value"], + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.IPPREFIX], + }, + { + "name": "IPAddress", + "namespace": "Ipam", + "default_filter": "address__value", + "order_by": ["address__value"], + "display_labels": ["address__value"], + "branch": BranchSupportType.AWARE.value, + "inherit_from": [InfrahubKind.IPADDRESS], + }, + ], + } + + return SchemaRoot(**SCHEMA) + + @pytest.fixture async def register_builtin_models_schema(default_branch: Branch, builtin_schema: SchemaRoot) -> SchemaBranch: schema_branch = registry.schema.register_schema(schema=builtin_schema, branch=default_branch.name) @@ -2296,6 +2374,13 @@ async def register_account_schema(db: InfrahubDatabase) -> None: registry.schema.register_schema(schema=SchemaRoot(nodes=nodes, generics=generics)) +@pytest.fixture +async def register_ipam_schema(default_branch: Branch, ipam_schema: SchemaRoot) -> SchemaBranch: + schema_branch = registry.schema.register_schema(schema=ipam_schema, branch=default_branch.name) + default_branch.update_schema_hash() + return schema_branch + + @pytest.fixture async def create_test_admin(db: InfrahubDatabase, register_core_models_schema, data_schema) -> Node: account = await Node.init(db=db, schema=InfrahubKind.ACCOUNT) diff --git a/backend/tests/unit/core/constraint_validators/test_attribute_regex_update.py b/backend/tests/unit/core/constraint_validators/test_attribute_regex_update.py index a3bf08e0d5..d937cda770 100644 --- a/backend/tests/unit/core/constraint_validators/test_attribute_regex_update.py +++ b/backend/tests/unit/core/constraint_validators/test_attribute_regex_update.py @@ -2,7 +2,7 @@ from infrahub.core import registry from infrahub.core.branch import Branch -from infrahub.core.constants import PathType, SchemaPathType +from infrahub.core.constants import NULL_VALUE, PathType, SchemaPathType from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.core.path import DataPath, SchemaPath @@ -57,7 +57,9 @@ async def test_query_NULL_allowed( registry.schema.set(name="TestCar", schema=car_schema) no_color_car = await Node.init(db=db, schema="TestCar", branch=default_branch) - await no_color_car.new(db=db, name="NoColor", color="NULL", nbr_seats=3, is_electric=False, owner=person_john_main) + await no_color_car.new( + db=db, name="NoColor", color=NULL_VALUE, nbr_seats=3, is_electric=False, owner=person_john_main + ) await no_color_car.save(db=db) upper_color_car = await Node.init(db=db, schema="TestCar", branch=default_branch) await upper_color_car.new( diff --git a/backend/tests/unit/core/constraint_validators/test_uniqueness_checker.py b/backend/tests/unit/core/constraint_validators/test_uniqueness_checker.py index 3395e59370..c3700e4004 100644 --- a/backend/tests/unit/core/constraint_validators/test_uniqueness_checker.py +++ b/backend/tests/unit/core/constraint_validators/test_uniqueness_checker.py @@ -1,9 +1,13 @@ +import pytest + from infrahub.core import registry from infrahub.core.branch import Branch from infrahub.core.constants import PathType, SchemaPathType from infrahub.core.manager import NodeManager from infrahub.core.node import Node from infrahub.core.path import DataPath, SchemaPath +from infrahub.core.schema import SchemaRoot +from infrahub.core.schema.relationship_schema import RelationshipSchema from infrahub.core.validators.model import SchemaConstraintValidatorRequest from infrahub.core.validators.uniqueness.checker import UniquenessChecker from infrahub.database import InfrahubDatabase @@ -48,6 +52,8 @@ async def test_one_violation( ): schema = registry.schema.get("TestCar", branch=branch) schema.get_attribute("nbr_seats").unique = True + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) @@ -137,6 +143,7 @@ async def test_combined_uniqueness_constraint_no_violations( assert len(grouped_data_paths) == 1 assert not grouped_data_paths[0].get_all_data_paths() + @pytest.mark.skip("We technically don't support unqiueness constraints on properties of relationships") async def test_combined_uniqueness_constraints_with_violations( self, db: InfrahubDatabase, @@ -164,6 +171,8 @@ async def test_combined_uniqueness_constraints_with_violations( schema = registry.schema.get("TestCar", branch=branch) schema.uniqueness_constraints = [["color__value", "owner__height"]] + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) @@ -268,6 +277,7 @@ async def test_generic_unique_attribute_violations( in all_data_paths ) + @pytest.mark.skip("We technically don't support unqiueness constraints on properties of relationships") async def test_generic_unique_attribute_multiple_relationship_violations_to_same_node( self, db: InfrahubDatabase, @@ -295,6 +305,8 @@ async def test_generic_unique_attribute_multiple_relationship_violations_to_same schema = registry.schema.get("TestCar", branch=branch) schema.uniqueness_constraints = [["owner__height", "previous_owner__height"]] + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) @@ -350,6 +362,7 @@ async def test_generic_unique_attribute_multiple_relationship_violations_to_same in all_data_paths ) + @pytest.mark.skip("We technically don't support unqiueness constraints on properties of relationships") async def test_generic_unique_constraint_relationship_with_and_without_attr( self, db: InfrahubDatabase, @@ -371,6 +384,8 @@ async def test_generic_unique_constraint_relationship_with_and_without_attr( schema = registry.schema.get("TestCar", branch=branch) schema.uniqueness_constraints = [["owner", "previous_owner__height"]] + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) assert len(grouped_data_paths) == 1 @@ -441,6 +456,8 @@ async def test_relationship_violation_wo_attribute( schema = registry.schema.get("TestCar", branch=branch) schema.uniqueness_constraints = [["owner"]] + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) assert len(grouped_data_paths) == 1 @@ -483,6 +500,83 @@ async def test_relationship_violation_wo_attribute( in all_data_paths ) + async def test_relationship_violation_wo_attribute_schema_update_on_branch( + self, + db: InfrahubDatabase, + car_accord_main, + car_prius_main, + car_camry_main, + person_john_main, + branch: Branch, + default_branch: Branch, + ): + schema_on_branch = registry.schema.get_node_schema(name="TestCar", branch=branch) + schema_on_branch.relationships.append( + RelationshipSchema( + name="yet_another_owner", + peer="TestPerson", + optional=False, + cardinality="one", + direction="outbound", + identifier="yet_another_owner__testperson", + branch="aware", + ) + ) + schema_on_branch.uniqueness_constraints = [["yet_another_owner"]] + schema_root = SchemaRoot(nodes=[schema_on_branch]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) + + cars_to_update = await NodeManager.get_many( + ids=[car_camry_main.id, car_accord_main.id, car_prius_main.id], db=db, branch=branch + ) + for car_to_update in cars_to_update.values(): + await car_to_update.yet_another_owner.update(data=person_john_main, db=db) + await car_to_update.save(db=db) + + # get the schema from the default branch to test that the constraint gets the + # schema from the correct branch + grouped_data_paths = await self.__call_system_under_test(db, branch, schema_on_branch) + + assert len(grouped_data_paths) == 1 + all_data_paths = grouped_data_paths[0].get_all_data_paths() + assert len(all_data_paths) == 3 + assert ( + DataPath( + branch=branch.name, + path_type=PathType.RELATIONSHIP_ONE, + node_id=car_accord_main.id, + kind="TestCar", + field_name="yet_another_owner", + property_name="id", + value=person_john_main.id, + ) + in all_data_paths + ) + assert ( + DataPath( + branch=branch.name, + path_type=PathType.RELATIONSHIP_ONE, + node_id=car_prius_main.id, + kind="TestCar", + field_name="yet_another_owner", + property_name="id", + value=person_john_main.id, + ) + in all_data_paths + ) + assert ( + DataPath( + branch=branch.name, + path_type=PathType.RELATIONSHIP_ONE, + node_id=car_camry_main.id, + kind="TestCar", + field_name="yet_another_owner", + property_name="id", + value=person_john_main.id, + ) + in all_data_paths + ) + async def test_relationship_no_violation_with_overlaps( self, db: InfrahubDatabase, @@ -546,6 +640,8 @@ async def test_relationship_violations_with_overlaps( schema = registry.schema.get("TestCar", branch=branch) schema.uniqueness_constraints = [["owner", "color"], ["color", "nbr_seats"]] + schema_root = SchemaRoot(nodes=[schema]) + registry.schema.register_schema(schema=schema_root, branch=branch.name) grouped_data_paths = await self.__call_system_under_test(db, branch, schema) assert len(grouped_data_paths) == 1 diff --git a/backend/tests/unit/core/constraint_validators/test_uniqueness_constraint_query.py b/backend/tests/unit/core/constraint_validators/test_uniqueness_constraint_query.py index 07ba8b349f..48d69ef8bd 100644 --- a/backend/tests/unit/core/constraint_validators/test_uniqueness_constraint_query.py +++ b/backend/tests/unit/core/constraint_validators/test_uniqueness_constraint_query.py @@ -20,7 +20,7 @@ async def test_query_uniqueness_no_violations( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{"kind": "TestCar", "unique_attribute_paths": [{"attribute_name": "name", "property_name": "value"}]} + kind="TestCar", unique_attribute_paths=[{"attribute_name": "name", "property_name": "value"}] ), ) query_result = await query.execute(db=db) @@ -35,13 +35,11 @@ async def test_query_uniqueness_one_violation( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), ) query_result = await query.execute(db=db) @@ -68,13 +66,11 @@ async def test_query_uniqueness_deleted_node_ignored( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), ) query_result = await query.execute(db=db) @@ -96,13 +92,11 @@ async def test_query_uniqueness_get_latest_update( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), ) query_result = await query.execute(db=db) @@ -129,7 +123,7 @@ async def test_query_uniqueness_cross_branch_conflict( db=db, branch=branch_2, query_request=NodeUniquenessQueryRequest( - **{"kind": "TestCar", "unique_attribute_paths": [{"attribute_name": "name", "property_name": "value"}]} + kind="TestCar", unique_attribute_paths=[{"attribute_name": "name", "property_name": "value"}] ), ) query_result = await query.execute(db=db) @@ -210,14 +204,12 @@ async def test_query_uniqueness_multiple_attribute_violations( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "color", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "color", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), ) query_result = await query.execute(db=db) @@ -248,11 +240,9 @@ async def test_query_relationship_uniqueness_no_violations( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [{"attribute_name": "name", "property_name": "value"}], - "relationship_attribute_paths": [{"identifier": "testcar__testperson", "attribute_name": "height"}], - } + kind="TestCar", + unique_attribute_paths=[{"attribute_name": "name", "property_name": "value"}], + relationship_attribute_paths=[{"identifier": "testcar__testperson", "attribute_name": "height"}], ), ) query_result = await query.execute(db=db) @@ -280,11 +270,9 @@ async def test_query_relationship_uniqueness_one_violation( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [{"attribute_name": "name", "property_name": "value"}], - "relationship_attribute_paths": [{"identifier": "testcar__testperson", "attribute_name": "height"}], - } + kind="TestCar", + unique_attribute_paths=[{"attribute_name": "name", "property_name": "value"}], + relationship_attribute_paths=[{"identifier": "testcar__testperson", "attribute_name": "height"}], ), ) query_result = await query.execute(db=db) @@ -367,14 +355,12 @@ async def test_query_relationship_and_attribute_uniqueness_violations( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - "relationship_attribute_paths": [{"identifier": "testcar__testperson", "attribute_name": "height"}], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], + relationship_attribute_paths=[{"identifier": "testcar__testperson", "attribute_name": "height"}], ), ) query_result = await query.execute(db=db) @@ -428,10 +414,7 @@ async def test_query_relationship_violation_no_attribute( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "relationship_attribute_paths": [{"identifier": "testcar__testperson", "attribute_name": None}], - } + kind="TestCar", relationship_attribute_paths=[{"identifier": "testcar__testperson", "attribute_name": None}] ), ) query_result = await query.execute(db=db) @@ -484,13 +467,11 @@ async def test_query_response_min_count_0_attribute_paths( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), min_count_required=0, ) @@ -544,13 +525,11 @@ async def test_query_response_min_count_0_relationship_paths( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "relationship_attribute_paths": [ - {"identifier": "testcar__testperson", "attribute_name": "height"}, - {"identifier": "testcar__testperson", "attribute_name": "name"}, - ], - } + kind="TestCar", + relationship_attribute_paths=[ + {"identifier": "testcar__testperson", "attribute_name": "height"}, + {"identifier": "testcar__testperson", "attribute_name": "name"}, + ], ), min_count_required=0, ) @@ -596,13 +575,11 @@ async def test_query_response_min_count_0_attribute_paths_with_value( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "unique_attribute_paths": [ - {"attribute_name": "name", "property_name": "value", "value": "accord"}, - {"attribute_name": "nbr_seats", "property_name": "value"}, - ], - } + kind="TestCar", + unique_attribute_paths=[ + {"attribute_name": "name", "property_name": "value", "value": "accord"}, + {"attribute_name": "nbr_seats", "property_name": "value"}, + ], ), min_count_required=0, ) @@ -648,13 +625,11 @@ async def test_query_response_min_count_0_relationship_paths_with_value( db=db, branch=branch, query_request=NodeUniquenessQueryRequest( - **{ - "kind": "TestCar", - "relationship_attribute_paths": [ - {"identifier": "testcar__testperson", "attribute_name": "height"}, - {"identifier": "testcar__testperson", "attribute_name": "name", "value": "Jane"}, - ], - } + kind="TestCar", + relationship_attribute_paths=[ + {"identifier": "testcar__testperson", "attribute_name": "height"}, + {"identifier": "testcar__testperson", "attribute_name": "name", "value": "Jane"}, + ], ), min_count_required=0, ) diff --git a/backend/tests/unit/core/graph/test_graph_constraints.py b/backend/tests/unit/core/graph/test_graph_constraints.py index ebbdf6ad96..cce97e6ced 100644 --- a/backend/tests/unit/core/graph/test_graph_constraints.py +++ b/backend/tests/unit/core/graph/test_graph_constraints.py @@ -119,6 +119,62 @@ def test_constraint_manager_from_graph_schema_neo4j(db: InfrahubDatabase): type=GraphPropertyType.STRING, mandatory=True, ), + ConstraintNodeNeo4j( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="value", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="is_default", + type=GraphPropertyType.BOOLEAN, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="binary_address", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="version", + type=GraphPropertyType.INTEGER, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="value", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="is_default", + type=GraphPropertyType.BOOLEAN, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="binary_address", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeNeo4j( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="version", + type=GraphPropertyType.INTEGER, + mandatory=True, + ), ConstraintNodeNeo4j( item_name="boolean", item_label="Boolean", @@ -484,6 +540,62 @@ def test_constraint_manager_from_graph_schema_memgraph(db: InfrahubDatabase): type=GraphPropertyType.STRING, mandatory=True, ), + ConstraintNodeMemgraph( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="value", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="is_default", + type=GraphPropertyType.BOOLEAN, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="binary_address", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeipnetwork", + item_label="AttributeIPNetwork", + property="version", + type=GraphPropertyType.INTEGER, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="value", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="is_default", + type=GraphPropertyType.BOOLEAN, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="binary_address", + type=GraphPropertyType.STRING, + mandatory=True, + ), + ConstraintNodeMemgraph( + item_name="attributeiphost", + item_label="AttributeIPHost", + property="version", + type=GraphPropertyType.INTEGER, + mandatory=True, + ), ConstraintNodeMemgraph( item_name="boolean", item_label="Boolean", @@ -517,6 +629,56 @@ async def test_constraint_manager_database_neo4j(db: InfrahubDatabase, default_b ConstraintInfo(item_name="node_attribute_name_type", item_label="Attribute", property="name"), ConstraintInfo(item_name="node_attribute_uuid_exist", item_label="Attribute", property="uuid"), ConstraintInfo(item_name="node_attribute_uuid_type", item_label="Attribute", property="uuid"), + ConstraintInfo( + item_name="node_attributeiphost_binary_address_exist", + item_label="AttributeIPHost", + property="binary_address", + ), + ConstraintInfo( + item_name="node_attributeiphost_binary_address_type", + item_label="AttributeIPHost", + property="binary_address", + ), + ConstraintInfo( + item_name="node_attributeiphost_is_default_exist", item_label="AttributeIPHost", property="is_default" + ), + ConstraintInfo( + item_name="node_attributeiphost_is_default_type", item_label="AttributeIPHost", property="is_default" + ), + ConstraintInfo(item_name="node_attributeiphost_value_exist", item_label="AttributeIPHost", property="value"), + ConstraintInfo(item_name="node_attributeiphost_value_type", item_label="AttributeIPHost", property="value"), + ConstraintInfo( + item_name="node_attributeiphost_version_exist", item_label="AttributeIPHost", property="version" + ), + ConstraintInfo(item_name="node_attributeiphost_version_type", item_label="AttributeIPHost", property="version"), + ConstraintInfo( + item_name="node_attributeipnetwork_binary_address_exist", + item_label="AttributeIPNetwork", + property="binary_address", + ), + ConstraintInfo( + item_name="node_attributeipnetwork_binary_address_type", + item_label="AttributeIPNetwork", + property="binary_address", + ), + ConstraintInfo( + item_name="node_attributeipnetwork_is_default_exist", item_label="AttributeIPNetwork", property="is_default" + ), + ConstraintInfo( + item_name="node_attributeipnetwork_is_default_type", item_label="AttributeIPNetwork", property="is_default" + ), + ConstraintInfo( + item_name="node_attributeipnetwork_value_exist", item_label="AttributeIPNetwork", property="value" + ), + ConstraintInfo( + item_name="node_attributeipnetwork_value_type", item_label="AttributeIPNetwork", property="value" + ), + ConstraintInfo( + item_name="node_attributeipnetwork_version_exist", item_label="AttributeIPNetwork", property="version" + ), + ConstraintInfo( + item_name="node_attributeipnetwork_version_type", item_label="AttributeIPNetwork", property="version" + ), ConstraintInfo(item_name="node_boolean_value_exist", item_label="Boolean", property="value"), ConstraintInfo(item_name="node_boolean_value_type", item_label="Boolean", property="value"), ConstraintInfo(item_name="node_node_branch_support_exist", item_label="Node", property="branch_support"), @@ -646,6 +808,14 @@ async def test_constraint_manager_database_memgraph(db: InfrahubDatabase, defaul ConstraintInfo(item_name="n_a", item_label="Attribute", property="branch_support"), ConstraintInfo(item_name="n_a", item_label="Attribute", property="name"), ConstraintInfo(item_name="n_a", item_label="Attribute", property="uuid"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPNetwork", property="value"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPNetwork", property="is_default"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPNetwork", property="binary_address"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPNetwork", property="version"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPHost", property="value"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPHost", property="is_default"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPHost", property="binary_address"), + ConstraintInfo(item_name="n_a", item_label="AttributeIPHost", property="version"), ConstraintInfo(item_name="n_a", item_label="Boolean", property="value"), ] diff --git a/backend/tests/unit/core/hierarchy/__init__.py b/backend/tests/unit/core/hierarchy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/core/hierarchy/test_hierarchy_update.py b/backend/tests/unit/core/hierarchy/test_hierarchy_update.py new file mode 100644 index 0000000000..b8062a59b7 --- /dev/null +++ b/backend/tests/unit/core/hierarchy/test_hierarchy_update.py @@ -0,0 +1,32 @@ +from infrahub.core import registry +from infrahub.core.manager import NodeManager +from infrahub.database import InfrahubDatabase + +CHECK_HIERARCHY_QUERY = """ +MATCH ({uuid: $node_uuid})-[rel:IS_RELATED]-(rel_node:Relationship {name: "parent__child"}) +RETURN rel +""" + + +async def test_update_node_with_hierarchy(db: InfrahubDatabase, hierarchical_location_data): + site_schema = registry.schema.get(name="LocationSite", duplicate=False) + retrieved_node = await NodeManager.get_one(db=db, id=hierarchical_location_data["seattle"].id) + new_parent = await NodeManager.get_one(db=db, id=hierarchical_location_data["europe"].id) + results = await db.execute_query( + query=CHECK_HIERARCHY_QUERY, params={"node_uuid": hierarchical_location_data["seattle"].id} + ) + for result in results: + assert result.get("rel").get("hierarchy") == site_schema.hierarchy + + await retrieved_node.parent.update(db=db, data=new_parent) + await retrieved_node.save(db=db) + + updated_node = await NodeManager.get_one(db=db, id=retrieved_node.id) + parent_rels = await updated_node.parent.get_relationships(db=db) + assert len(parent_rels) == 1 + assert parent_rels[0].peer_id == new_parent.id + results = await db.execute_query(query=CHECK_HIERARCHY_QUERY, params={"node_uuid": updated_node.id}) + for result in results: + assert result.get("rel").get("hierarchy") == site_schema.hierarchy + nodes = await NodeManager.query(db=db, schema=site_schema, filters={"parent__name__value": "europe"}) + assert {node.name.value for node in nodes} == {"paris", "london", "seattle"} diff --git a/backend/tests/unit/core/ipam/__init__.py b/backend/tests/unit/core/ipam/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/core/ipam/conftest.py b/backend/tests/unit/core/ipam/conftest.py new file mode 100644 index 0000000000..173beccebe --- /dev/null +++ b/backend/tests/unit/core/ipam/conftest.py @@ -0,0 +1,105 @@ +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import InfrahubKind +from infrahub.core.node import Node +from infrahub.core.schema_manager import SchemaBranch +from infrahub.database import InfrahubDatabase + + +@pytest.fixture +async def ip_dataset_01( + db: InfrahubDatabase, + default_branch: Branch, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + # ----------------------- + # Namespace NS1 + # ----------------------- + + ns1 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns1.new(db=db, name="ns1") + await ns1.save(db=db) + + net161 = await Node.init(db=db, schema=prefix_schema) + await net161.new(db=db, prefix="2001:db8::/48", ip_namespace=ns1) + await net161.save(db=db) + + net162 = await Node.init(db=db, schema=prefix_schema) + await net162.new(db=db, prefix="2001:db8::/64", ip_namespace=ns1, parent=net161) + await net162.save(db=db) + + net146 = await Node.init(db=db, schema=prefix_schema) + await net146.new(db=db, prefix="10.0.0.0/8", ip_namespace=ns1) + await net146.save(db=db) + + net140 = await Node.init(db=db, schema=prefix_schema) + await net140.new(db=db, prefix="10.10.0.0/16", ip_namespace=ns1, parent=net146) + await net140.save(db=db) + + net142 = await Node.init(db=db, schema=prefix_schema) + await net142.new(db=db, prefix="10.10.1.0/24", parent=net140, ip_namespace=ns1) + await net142.save(db=db) + + net143 = await Node.init(db=db, schema=prefix_schema) + await net143.new(db=db, prefix="10.10.1.0/27", parent=net142, ip_namespace=ns1) + await net143.save(db=db) + + net144 = await Node.init(db=db, schema=prefix_schema) + await net144.new(db=db, prefix="10.10.2.0/24", parent=net140, ip_namespace=ns1) + await net144.save(db=db) + + net145 = await Node.init(db=db, schema=prefix_schema) + await net145.new(db=db, prefix="10.10.3.0/27", parent=net140, ip_namespace=ns1) + await net145.save(db=db) + + address10 = await Node.init(db=db, schema=address_schema) + await address10.new(db=db, address="10.10.0.0", ip_prefix=net140, ip_namespace=ns1) + await address10.save(db=db) + + address11 = await Node.init(db=db, schema=address_schema) + await address11.new(db=db, address="10.10.1.1", ip_prefix=net143, ip_namespace=ns1) + await address11.save(db=db) + + # ----------------------- + # Namespace NS2 + # ----------------------- + ns2 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns2.new(db=db, name="ns2") + await ns2.save(db=db) + + net240 = await Node.init(db=db, schema=prefix_schema) + await net240.new(db=db, prefix="10.10.0.0/15", ip_namespace=ns2) + await net240.save(db=db) + + net241 = await Node.init(db=db, schema=prefix_schema) + await net241.new(db=db, prefix="10.10.0.0/24", parent=net240, ip_namespace=ns2) + await net241.save(db=db) + + net242 = await Node.init(db=db, schema=prefix_schema) + await net242.new(db=db, prefix="10.10.4.0/27", parent=net240, ip_namespace=ns2) + await net242.save(db=db) + + data = { + "ns1": ns1, + "ns2": ns2, + "net161": net161, + "net162": net162, + "net140": net140, + "net142": net142, + "net143": net143, + "net144": net144, + "net145": net145, + "net146": net146, + "address10": address10, + "address11": address11, + "net240": net240, + "net241": net241, + "net242": net242, + } + return data diff --git a/backend/tests/unit/core/ipam/test_ipam.py b/backend/tests/unit/core/ipam/test_ipam.py new file mode 100644 index 0000000000..0aa09571d7 --- /dev/null +++ b/backend/tests/unit/core/ipam/test_ipam.py @@ -0,0 +1,253 @@ +import ipaddress + +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import InfrahubKind +from infrahub.core.ipam.reconciler import IpamReconciler +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.query.ipam import ( + IPPrefixContainerFetch, + IPPrefixSubnetFetch, + IPPrefixUtilizationAddress, + IPPrefixUtilizationPrefix, + get_container, + get_ip_addresses, + get_ip_prefix_for_ip_address, + get_utilization, +) +from infrahub.core.schema_manager import SchemaBranch +from infrahub.database import InfrahubDatabase + + +async def test_ipprefix_creation( + db: InfrahubDatabase, + default_branch: Branch, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + + prefix1 = await Node.init(db=db, schema=prefix_schema) + await prefix1.new(db=db, prefix="2001:db8::/32") + await prefix1.save(db=db) + + prefix2 = await Node.init(db=db, schema=prefix_schema) + await prefix2.new(db=db, prefix="192.0.2.0/24") + await prefix2.save(db=db) + + +async def test_ipaddress_creation( + db: InfrahubDatabase, + default_branch: Branch, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + address1 = await Node.init(db=db, schema=address_schema) + await address1.new(db=db, address="2001:db8::/64") + await address1.save(db=db) + + address2 = await Node.init(db=db, schema=address_schema) + await address2.new(db=db, address="192.0.2.0/24") + await address2.save(db=db) + + +async def test_ipprefix_is_within_container( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + + container = await Node.init(db=db, schema=prefix_schema) + await container.new(db=db, prefix="2001:db8::/32", ip_namespace=default_ipnamespace) + await container.save(db=db) + + prefix = await Node.init(db=db, schema=prefix_schema) + await prefix.new(db=db, prefix="2001:db8::/48", parent=container, ip_namespace=default_ipnamespace) + await prefix.save(db=db) + + unrelated = await Node.init(db=db, schema=prefix_schema) + await unrelated.new(db=db, prefix="192.0.2.0/24", ip_namespace=default_ipnamespace) + await unrelated.save(db=db) + + container_ip_network = ipaddress.ip_network(container.prefix.value) + prefix_ip_network = ipaddress.ip_network(prefix.prefix.value) + + prefix_container = await get_container(db=db, branch=default_branch, ip_prefix=container_ip_network) + assert prefix_container is None + + prefix_container = await get_container(db=db, branch=default_branch, ip_prefix=prefix_ip_network) + assert prefix_container + assert prefix_container.prefix == ipaddress.ip_network(container_ip_network) + + +@pytest.mark.parametrize( + "input,response", + [ + (ipaddress.ip_network("10.10.0.0/22"), ["10.10.1.0/24", "10.10.2.0/24", "10.10.3.0/27"]), + (ipaddress.ip_network("2001:db8::/32"), ["2001:db8::/48"]), + ], +) +async def test_ipprefix_subnets(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01, input, response): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixSubnetFetch.init(db=db, branch=default_branch, obj=input, namespace=ns1_id) + await query.execute(db=db) + subnets = query.get_subnets() + + assert sorted([str(subnet.prefix) for subnet in subnets]) == response + + +async def test_ipprefix_subnets_small_dataset( + db: InfrahubDatabase, + default_branch: Branch, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + + ns1 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns1.new(db=db, name="ns1") + await ns1.save(db=db) + + net161 = await Node.init(db=db, schema=prefix_schema) + await net161.new(db=db, prefix="2001:db8::/48", ip_namespace=ns1) + await net161.save(db=db) + + query = await IPPrefixSubnetFetch.init( + db=db, branch=default_branch, obj=ipaddress.ip_network("2001:db8::/32"), namespace=ns1.id + ) + await query.execute(db=db) + subnets = query.get_subnets() + + assert sorted([str(subnet.prefix) for subnet in subnets]) == ["2001:db8::/48"] + + +@pytest.mark.parametrize( + "input,response", + [ + (ipaddress.ip_network("10.10.0.0/22"), "10.10.0.0/16"), + (ipaddress.ip_network("10.10.1.0/28"), "10.10.1.0/27"), + (ipaddress.ip_interface("10.10.1.10/27"), "10.10.1.0/27"), + ], +) +async def test_ipprefix_container(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01, input, response): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixContainerFetch.init(db=db, branch=default_branch, obj=input, namespace=ns1_id) + + await query.execute(db=db) + container = query.get_container() + assert container is not None + assert str(container.prefix) == response + + +async def test_ipaddress_is_within_ipprefix( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + prefix = await Node.init(db=db, schema=prefix_schema) + await prefix.new(db=db, prefix="2001:db8::/64", ip_namespace=default_ipnamespace) + await prefix.save(db=db) + + address = await Node.init(db=db, schema=address_schema) + await address.new(db=db, address="2001:db8::1/64", ip_prefix=prefix, ip_namespace=default_ipnamespace) + await address.save(db=db) + + unrelated = await Node.init(db=db, schema=address_schema) + await unrelated.new(db=db, address="192.0.2.1/32", ip_namespace=default_ipnamespace) + await unrelated.save(db=db) + + prefix_ip_network = ipaddress.ip_network(prefix.prefix.value) + address_ip_address = ipaddress.ip_interface(address.address.value) + + ip_addresses = await get_ip_addresses(db=db, branch=default_branch, ip_prefix=prefix_ip_network) + assert len(ip_addresses) == 1 + assert ip_addresses[0].address == address_ip_address + + ip_prefix = await get_ip_prefix_for_ip_address(db=db, branch=default_branch, ip_address=address_ip_address) + assert ip_prefix + assert ip_prefix.prefix == prefix_ip_network + + +async def test_ipprefix_utilization( + db: InfrahubDatabase, + default_branch: Branch, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + container = await Node.init(db=db, schema=prefix_schema) + await container.new(db=db, prefix="192.0.2.0/24", member_type="prefix") + await container.save(db=db) + + prefix = await Node.init(db=db, schema=prefix_schema) + await prefix.new(db=db, prefix="192.0.2.0/28", member_type="address", parent=container) + await prefix.save(db=db) + + prefix2 = await Node.init(db=db, schema=prefix_schema) + await prefix2.new(db=db, prefix="192.0.2.128/28", member_type="prefix", parent=container) + await prefix2.save(db=db) + + addresses = [] + for i in range(1, 8): + address = await Node.init(db=db, schema=address_schema) + await address.new(db=db, address=f"192.0.2.{i}/28", ip_prefix=prefix) + await address.save(db=db) + addresses.append(address) + + query = await IPPrefixUtilizationPrefix.init(db, branch=default_branch, ip_prefix=container) + await query.execute(db) + assert query.get_percentage() == 100 / 8 + assert await get_utilization(db=db, branch=default_branch, ip_prefix=container) == 100 / 8 + + query = await IPPrefixUtilizationPrefix.init(db, branch=default_branch, ip_prefix=prefix2) + await query.execute(db) + assert query.get_percentage() == 0 + assert await get_utilization(db=db, branch=default_branch, ip_prefix=prefix2) == 0 + + query = await IPPrefixUtilizationAddress.init(db, branch=default_branch, ip_prefix=prefix) + await query.execute(db) + assert query.get_percentage() == 50.0 + assert await get_utilization(db=db, branch=default_branch, ip_prefix=prefix) == 50.0 + + +async def test_query_by_parent_ids(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + reconciler = IpamReconciler(db=db, branch=default_branch) + ns1 = ip_dataset_01["ns1"] + net146 = ip_dataset_01["net146"] + nodes = await NodeManager.query( + db=db, branch=default_branch, schema="IpamIPPrefix", filters={"parent__ids": [net146.id]} + ) + assert len(nodes) == 1 + assert nodes[0].id == ip_dataset_01["net140"].id + + net150 = await Node.init(db=db, schema=prefix_schema) + await net150.new(db=db, prefix="10.10.0.0/15", ip_namespace=ns1, parent=net146) + await net150.save(db=db) + await reconciler.reconcile(ip_value=ipaddress.ip_network(net150.prefix.value), namespace=ns1) + + nodes = await NodeManager.query( + db=db, branch=default_branch, schema="IpamIPPrefix", filters={"parent__ids": [net146.id]} + ) + assert len(nodes) == 1 + assert nodes[0].id == net150.id + nodes = await NodeManager.query( + db=db, branch=default_branch, schema="IpamIPPrefix", filters={"parent__ids": [net150.id]} + ) + assert len(nodes) == 1 + assert nodes[0].id == ip_dataset_01["net140"].id diff --git a/backend/tests/unit/core/ipam/test_ipam_diff_parser.py b/backend/tests/unit/core/ipam/test_ipam_diff_parser.py new file mode 100644 index 0000000000..6ad98c4e7b --- /dev/null +++ b/backend/tests/unit/core/ipam/test_ipam_diff_parser.py @@ -0,0 +1,132 @@ +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.diff.branch_differ import BranchDiffer +from infrahub.core.diff.ipam_diff_parser import IpamDiffParser +from infrahub.core.initialization import create_branch +from infrahub.core.ipam.model import IpamNodeDetails +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase + + +async def test_ipam_diff_parser_update(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + branch_2 = await create_branch(db=db, branch_name="branch_2") + + # updated prefix value + net146_branch = await NodeManager.get_one(db=db, branch=branch_2, id=ip_dataset_01["net146"].id) + net146_branch.prefix.value = "10.0.0.0/9" + await net146_branch.save(db=db) + # updated address + address11_branch = await NodeManager.get_one(db=db, branch=branch_2, id=ip_dataset_01["address11"].id) + address11_branch.address.value = "10.10.1.2/32" + await address11_branch.save(db=db) + + differ = await BranchDiffer.init(db=db, branch=branch_2) + parser = IpamDiffParser( + db=db, differ=differ, source_branch_name=branch_2.name, target_branch_name=default_branch.name + ) + ipam_diffs = await parser.get_changed_ipam_node_details() + + assert len(ipam_diffs) == 2 + assert ( + IpamNodeDetails( + node_uuid=net146_branch.id, + is_delete=False, + is_address=False, + namespace_id=ip_dataset_01["ns1"].id, + ip_value="10.0.0.0/9", + ) + in ipam_diffs + ) + assert ( + IpamNodeDetails( + node_uuid=address11_branch.id, + is_delete=False, + is_address=True, + namespace_id=ip_dataset_01["ns1"].id, + ip_value="10.10.1.2/32", + ) + in ipam_diffs + ) + + +async def test_ipam_diff_parser_create(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + branch_2 = await create_branch(db=db, branch_name="branch_2") + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + + # new prefix + new_prefix_branch = await Node.init(db=db, branch=branch_2, schema=prefix_schema) + await new_prefix_branch.new(db=db, prefix="10.10.3.0/26", ip_namespace=ip_dataset_01["ns2"].id) + await new_prefix_branch.save(db=db) + # new address + new_address_branch = await Node.init(db=db, branch=branch_2, schema=address_schema) + await new_address_branch.new(db=db, address="10.10.4.5/32", ip_namespace=ip_dataset_01["ns2"].id) + await new_address_branch.save(db=db) + + differ = await BranchDiffer.init(db=db, branch=branch_2) + parser = IpamDiffParser( + db=db, differ=differ, source_branch_name=branch_2.name, target_branch_name=default_branch.name + ) + ipam_diffs = await parser.get_changed_ipam_node_details() + + assert len(ipam_diffs) == 2 + assert ( + IpamNodeDetails( + node_uuid=new_prefix_branch.id, + is_delete=False, + is_address=False, + namespace_id=ip_dataset_01["ns2"].id, + ip_value="10.10.3.0/26", + ) + in ipam_diffs + ) + assert ( + IpamNodeDetails( + node_uuid=new_address_branch.id, + is_delete=False, + is_address=True, + namespace_id=ip_dataset_01["ns2"].id, + ip_value="10.10.4.5/32", + ) + in ipam_diffs + ) + + +async def test_ipam_diff_parser_delete(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + branch_2 = await create_branch(db=db, branch_name="branch_2") + + # delete prefix + net146_branch = await NodeManager.get_one(db=db, branch=branch_2, id=ip_dataset_01["net146"].id) + await net146_branch.delete(db=db) + # delete address + address11_branch = await NodeManager.get_one(db=db, branch=branch_2, id=ip_dataset_01["address11"].id) + await address11_branch.delete(db=db) + + differ = await BranchDiffer.init(db=db, branch=branch_2) + parser = IpamDiffParser( + db=db, differ=differ, source_branch_name=branch_2.name, target_branch_name=default_branch.name + ) + ipam_diffs = await parser.get_changed_ipam_node_details() + + assert len(ipam_diffs) == 2 + assert ( + IpamNodeDetails( + node_uuid=net146_branch.id, + is_delete=True, + is_address=False, + namespace_id=ip_dataset_01["ns1"].id, + ip_value=net146_branch.prefix.value, + ) + in ipam_diffs + ) + assert ( + IpamNodeDetails( + node_uuid=address11_branch.id, + is_delete=True, + is_address=True, + namespace_id=ip_dataset_01["ns1"].id, + ip_value=address11_branch.address.value, + ) + in ipam_diffs + ) diff --git a/backend/tests/unit/core/ipam/test_ipam_reconcile_query.py b/backend/tests/unit/core/ipam/test_ipam_reconcile_query.py new file mode 100644 index 0000000000..4570cff3cc --- /dev/null +++ b/backend/tests/unit/core/ipam/test_ipam_reconcile_query.py @@ -0,0 +1,271 @@ +import ipaddress + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.initialization import create_ipam_namespace, get_default_ipnamespace +from infrahub.core.node import Node +from infrahub.core.query.ipam import IPPrefixReconcileQuery +from infrahub.database import InfrahubDatabase + + +async def test_ipprefix_reconcile_query_simple(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + prefix_140 = ip_dataset_01["net140"] + namespace = ip_dataset_01["ns1"] + ip_network = ipaddress.ip_network(prefix_140.prefix.value) + + query = await IPPrefixReconcileQuery.init(db=db, branch=default_branch, ip_value=ip_network, namespace=namespace) + await query.execute(db=db) + + assert query.get_ip_node_uuid() == prefix_140.id + assert query.get_current_parent_uuid() == ip_dataset_01["net146"].id + assert set(query.get_current_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + ip_dataset_01["address10"].id, + } + assert query.get_calculated_parent_uuid() == ip_dataset_01["net146"].id + assert set(query.get_calculated_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + ip_dataset_01["address10"].id, + } + + +async def test_ipprefix_reconcile_query_for_new_prefix(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network("10.10.0.0/22"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net140"].id + assert set(query.get_calculated_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + ip_dataset_01["address10"].id, + } + + +async def test_ipprefix_reconcile_query_for_new_address(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_interface("10.10.3.0"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net145"].id + assert query.get_calculated_children_uuids() == [] + + +async def test_ipprefix_reconcile_query_for_new_address_with_node( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + new_address = await Node.init(db=db, schema=address_schema) + await new_address.new(db=db, address="10.10.3.1", ip_namespace=ns1_id) + await new_address.save(db=db) + + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_interface("10.10.3.1"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() == new_address.id + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net145"].id + assert query.get_calculated_children_uuids() == [] + + +async def test_ipprefix_reconcile_query_for_new_prefix_multiple_possible_parents( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network("10.10.1.8/30"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net143"].id + assert query.get_calculated_children_uuids() == [] + + +async def test_ipprefix_reconcile_query_for_new_prefix_multiple_possible_children( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network("10.8.0.0/14"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net146"].id + assert query.get_calculated_children_uuids() == [ip_dataset_01["net140"].id] + + +async def test_ipprefix_reconcile_query_for_new_address_multiple_possible_children( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_interface("10.8.0.0"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net146"].id + assert query.get_calculated_children_uuids() == [] + + +async def test_ipprefix_reconcile_query_for_new_prefix_exactly_one_possible_child_address( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network("10.10.0.0/30"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net140"].id + assert query.get_calculated_children_uuids() == [ip_dataset_01["address10"].id] + + +async def test_ipprefix_reconcile_query_for_new_prefix_v6(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network("2001:db8::/50"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net161"].id + assert query.get_calculated_children_uuids() == [ip_dataset_01["net162"].id] + + +async def test_ipprefix_reconcile_query_for_new_address_v6(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + ns1_id = ip_dataset_01["ns1"].id + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_interface("2001:db8::"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net162"].id + assert query.get_calculated_children_uuids() == [] + + +async def test_ipprefix_reconcile_query_get_deleted_node_by_prefix( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + net140 = ip_dataset_01["net140"] + await net140.delete(db=db) + + query = await IPPrefixReconcileQuery.init( + db=db, branch=default_branch, ip_value=ipaddress.ip_network(net140.prefix.value), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net146"].id + assert set(query.get_calculated_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + ip_dataset_01["address10"].id, + } + + +async def test_ipprefix_reconcile_query_get_deleted_node_by_uuid( + db: InfrahubDatabase, default_branch: Branch, ip_dataset_01 +): + ns1_id = ip_dataset_01["ns1"].id + net140 = ip_dataset_01["net140"] + await net140.delete(db=db) + + query = await IPPrefixReconcileQuery.init( + db=db, + branch=default_branch, + ip_value=ipaddress.ip_network(net140.prefix.value), + node_uuid=net140.id, + namespace=ns1_id, + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() == net140.id + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == ip_dataset_01["net146"].id + assert set(query.get_calculated_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + ip_dataset_01["address10"].id, + } + + +async def test_branch_updates_respected(db: InfrahubDatabase, branch: Branch, default_branch: Branch, ip_dataset_01): + ns1_id = ip_dataset_01["ns1"].id + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=branch) + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=branch) + await branch.rebase(db=db) + net140 = ip_dataset_01["net140"] + await net140.delete(db=db) + address10 = ip_dataset_01["address10"] + await address10.delete(db=db) + new_parent_branch = await Node.init(db=db, schema=prefix_schema, branch=branch) + await new_parent_branch.new(db=db, prefix="10.10.0.0/17", ip_namespace=ns1_id) + await new_parent_branch.save(db=db) + new_address_main = await Node.init(db=db, schema=address_schema, branch=default_branch) + await new_address_main.new(db=db, address="10.10.0.2", ip_namespace=ns1_id) + await new_address_main.save(db=db) + new_address_branch = await Node.init(db=db, schema=address_schema, branch=branch) + await new_address_branch.new(db=db, address="10.10.0.1", ip_namespace=ns1_id) + await new_address_branch.save(db=db) + + query = await IPPrefixReconcileQuery.init( + db=db, branch=branch, ip_value=ipaddress.ip_network("10.10.0.0/22"), namespace=ns1_id + ) + await query.execute(db=db) + + assert query.get_ip_node_uuid() is None + assert query.get_current_parent_uuid() is None + assert query.get_current_children_uuids() == [] + assert query.get_calculated_parent_uuid() == new_parent_branch.id + assert set(query.get_calculated_children_uuids()) == { + ip_dataset_01["net142"].id, + ip_dataset_01["net144"].id, + ip_dataset_01["net145"].id, + new_address_branch.id, + new_address_main.id, + } diff --git a/backend/tests/unit/core/ipam/test_ipam_reconciler.py b/backend/tests/unit/core/ipam/test_ipam_reconciler.py new file mode 100644 index 0000000000..53aec18fe2 --- /dev/null +++ b/backend/tests/unit/core/ipam/test_ipam_reconciler.py @@ -0,0 +1,253 @@ +import ipaddress + +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.initialization import create_ipam_namespace, get_default_ipnamespace +from infrahub.core.ipam.reconciler import IpamReconciler +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import NodeNotFoundError + + +async def test_invalid_ip_node_raises_error(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + + reconciler = IpamReconciler(db=db, branch=default_branch) + with pytest.raises(NodeNotFoundError): + await reconciler.reconcile(ip_value=ipaddress.ip_interface("192.168.1.1"), namespace=default_ipnamespace) + + +async def test_first_prefix( + db: InfrahubDatabase, default_branch: Branch, register_core_models_schema, register_ipam_schema +): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + net161 = await Node.init(db=db, schema=prefix_schema) + await net161.new(db=db, prefix="2001:db8::/48", ip_namespace=default_ipnamespace) + await net161.save(db=db) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ipaddress.ip_network(net161.prefix.value), namespace=default_ipnamespace) + + all_prefixes = await NodeManager.query(db=db, schema="BuiltinIPPrefix") + assert len(all_prefixes) == 1 + assert all_prefixes[0].id == net161.id + assert all_prefixes[0].is_top_level.value is True + + +async def test_ipprefix_reconciler_no_change(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + prefix_140 = ip_dataset_01["net140"] + namespace = ip_dataset_01["ns1"] + ip_network = ipaddress.ip_network(prefix_140.prefix.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_network, namespace=namespace) + + updated_prefix_140 = await NodeManager.get_one(db=db, branch=default_branch, id=prefix_140.id) + assert updated_prefix_140.is_top_level.value is False + prefix_140_parent_rels = await updated_prefix_140.parent.get_relationships(db=db) + assert len(prefix_140_parent_rels) == 1 + assert prefix_140_parent_rels[0].peer_id == ip_dataset_01["net146"].id + updated_prefix_146 = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net146"].id) + prefix_146_children_rels = await updated_prefix_146.children.get_relationships(db=db) + assert len(prefix_146_children_rels) == 1 + assert prefix_146_children_rels[0].peer_id == updated_prefix_140.id + + +async def test_ipprefix_reconciler_new_prefix_update(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + namespace = ip_dataset_01["ns1"] + new_prefix = await Node.init(db=db, schema=prefix_schema) + await new_prefix.new(db=db, prefix="10.10.0.0/18", ip_namespace=namespace, parent=ip_dataset_01["net146"]) + await new_prefix.save(db=db) + ip_network = ipaddress.ip_network(new_prefix.prefix.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_network, namespace=namespace) + + # check new prefix parent + updated_prefix = await NodeManager.get_one(db=db, branch=default_branch, id=new_prefix.id) + assert updated_prefix.is_top_level.value is False + updated_prefix_parent_rels = await updated_prefix.parent.get_relationships(db=db) + assert len(updated_prefix_parent_rels) == 1 + assert updated_prefix_parent_rels[0].peer_id == ip_dataset_01["net140"].id + # check new prefix children + expected_child_prefix_ids = [ip_dataset_01["net142"].id, ip_dataset_01["net144"].id, ip_dataset_01["net145"].id] + expected_child_address_ids = [ip_dataset_01["address10"].id] + updated_prefix_child_rels = await updated_prefix.children.get_relationships(db=db) + assert len(updated_prefix_child_rels) == 3 + assert {rel.peer_id for rel in updated_prefix_child_rels} == set(expected_child_prefix_ids) + updated_address_child_rels = await updated_prefix.ip_addresses.get_relationships(db=db) + assert len(updated_address_child_rels) == 1 + assert {rel.peer_id for rel in updated_address_child_rels} == set(expected_child_address_ids) + # check new parent children + updated_prefix_140 = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net140"].id) + prefix_140_children_rels = await updated_prefix_140.children.get_relationships(db=db) + assert len(prefix_140_children_rels) == 1 + assert prefix_140_children_rels[0].peer_id == updated_prefix.id + prefix_140_address_rels = await updated_prefix_140.ip_addresses.get_relationships(db=db) + assert len(prefix_140_address_rels) == 0 + # check new child prefixes parents + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_prefix_ids) + for child in updated_children.values(): + child_parent_rels = await child.parent.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_prefix.id + assert child.is_top_level.value is False + # check new child address parents + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_address_ids) + for child in updated_children.values(): + child_parent_rels = await child.ip_prefix.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_prefix.id + + +async def test_ipprefix_reconciler_new_address_update(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + address_schema = registry.schema.get_node_schema(name="IpamIPAddress", branch=default_branch) + namespace = ip_dataset_01["ns1"] + new_address = await Node.init(db=db, schema=address_schema) + await new_address.new(db=db, address="10.10.3.1", ip_namespace=namespace) + await new_address.save(db=db) + ip_interface = ipaddress.ip_interface(new_address.address.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_interface, namespace=namespace) + + # check address parent + updated_address = await NodeManager.get_one(db=db, branch=default_branch, id=new_address.id) + prefix_rels = await updated_address.ip_prefix.get_relationships(db=db) + assert len(prefix_rels) == 1 + assert prefix_rels[0].peer_id == ip_dataset_01["net145"].id + # check prefix ip addresses + updated_prefix = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net145"].id) + ip_address_rels = await updated_prefix.ip_addresses.get_relationships(db=db) + assert len(ip_address_rels) == 1 + assert ip_address_rels[0].peer_id == new_address.id + + +async def test_ip_prefix_reconciler_delete_prefix(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + namespace = ip_dataset_01["ns1"] + net_140_prefix = ip_dataset_01["net140"] + ip_network = ipaddress.ip_network(net_140_prefix.prefix.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_network, node_uuid=net_140_prefix.id, namespace=namespace, is_delete=True) + + # check prefix is deleted + deleted = await NodeManager.get_one(db=db, branch=default_branch, id=net_140_prefix.id) + assert deleted is None + # check children of former parent + expected_child_prefix_ids = [ip_dataset_01["net142"].id, ip_dataset_01["net144"].id, ip_dataset_01["net145"].id] + expected_child_address_ids = [ip_dataset_01["address10"].id] + updated_parent = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net146"].id) + updated_prefix_child_rels = await updated_parent.children.get_relationships(db=db) + assert len(updated_prefix_child_rels) == 3 + assert {rel.peer_id for rel in updated_prefix_child_rels} == set(expected_child_prefix_ids) + updated_address_child_rels = await updated_parent.ip_addresses.get_relationships(db=db) + assert len(updated_address_child_rels) == 1 + assert {rel.peer_id for rel in updated_address_child_rels} == set(expected_child_address_ids) + # check parent of former child prefixes + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_prefix_ids) + for child in updated_children.values(): + child_parent_rels = await child.parent.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_parent.id + assert child.is_top_level.value is False + # check parent of former child addresses + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_address_ids) + for child in updated_children.values(): + child_parent_rels = await child.ip_prefix.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_parent.id + + +async def test_ip_prefix_reconciler_delete_address(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + namespace = ip_dataset_01["ns1"] + address10 = ip_dataset_01["address10"] + ip_network = ipaddress.ip_interface(address10.address.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_network, node_uuid=address10.id, namespace=namespace, is_delete=True) + + # check prefix is deleted + deleted = await NodeManager.get_one(db=db, branch=default_branch, id=address10.id) + assert deleted is None + # check children of former parent + expected_child_prefix_ids = [ip_dataset_01["net142"].id, ip_dataset_01["net144"].id, ip_dataset_01["net145"].id] + updated_parent = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net140"].id) + updated_prefix_child_rels = await updated_parent.children.get_relationships(db=db) + assert len(updated_prefix_child_rels) == 3 + assert {rel.peer_id for rel in updated_prefix_child_rels} == set(expected_child_prefix_ids) + updated_address_child_rels = await updated_parent.ip_addresses.get_relationships(db=db) + assert len(updated_address_child_rels) == 0 + + +async def test_ipprefix_reconciler_prefix_value_update(db: InfrahubDatabase, default_branch: Branch, ip_dataset_01): + await create_ipam_namespace(db=db) + default_ipnamespace = await get_default_ipnamespace(db=db) + registry.default_ipnamespace = default_ipnamespace.id + namespace = ip_dataset_01["ns1"] + net_146 = ip_dataset_01["net146"] + net_146.prefix.value = "10.10.0.0/18" + await net_146.save(db=db) + ip_network = ipaddress.ip_network(net_146.prefix.value) + + reconciler = IpamReconciler(db=db, branch=default_branch) + await reconciler.reconcile(ip_value=ip_network, namespace=namespace) + + # check new prefix parent + new_parent = await NodeManager.get_one(db=db, branch=default_branch, id=ip_dataset_01["net140"].id) + assert new_parent.is_top_level.value is True + new_parent_parent_rels = await new_parent.parent.get_relationships(db=db) + assert len(new_parent_parent_rels) == 0 + new_parent_child_rels = await new_parent.children.get_relationships(db=db) + assert len(new_parent_child_rels) == 1 + assert new_parent_child_rels[0].peer_id == net_146.id + # check updated prefix parent relationship + updated_prefix = await NodeManager.get_one(db=db, branch=default_branch, id=net_146.id) + assert updated_prefix.is_top_level.value is False + updated_prefix_parent_rels = await updated_prefix.parent.get_relationships(db=db) + assert len(updated_prefix_parent_rels) == 1 + assert updated_prefix_parent_rels[0].peer_id == ip_dataset_01["net140"].id + # check updated prefix child relationships + expected_child_prefix_ids = [ip_dataset_01["net142"].id, ip_dataset_01["net144"].id, ip_dataset_01["net145"].id] + expected_child_address_ids = [ip_dataset_01["address10"].id] + updated_prefix_child_rels = await updated_prefix.children.get_relationships(db=db) + assert len(updated_prefix_child_rels) == 3 + assert {rel.peer_id for rel in updated_prefix_child_rels} == set(expected_child_prefix_ids) + updated_address_child_rels = await updated_prefix.ip_addresses.get_relationships(db=db) + assert len(updated_address_child_rels) == 1 + assert {rel.peer_id for rel in updated_address_child_rels} == set(expected_child_address_ids) + # check new child prefixes parents + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_prefix_ids) + for child in updated_children.values(): + child_parent_rels = await child.parent.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_prefix.id + assert child.is_top_level.value is False + # check new child address parents + updated_children = await NodeManager.get_many(db=db, branch=default_branch, ids=expected_child_address_ids) + for child in updated_children.values(): + child_parent_rels = await child.ip_prefix.get_relationships(db=db) + assert len(child_parent_rels) == 1 + assert child_parent_rels[0].peer_id == updated_prefix.id diff --git a/backend/tests/unit/core/migrations/graph/test_003.py b/backend/tests/unit/core/migrations/graph/test_003.py new file mode 100644 index 0000000000..d8a0b118d9 --- /dev/null +++ b/backend/tests/unit/core/migrations/graph/test_003.py @@ -0,0 +1,64 @@ +import pytest + +from infrahub.core.migrations.graph.m003_relationship_parent_optional import Migration003, Migration003Query01 +from infrahub.core.node import Node +from infrahub.core.schema import SchemaRoot, internal_schema +from infrahub.core.schema_manager import SchemaBranch +from infrahub.core.utils import count_relationships +from infrahub.database import InfrahubDatabase + + +@pytest.fixture +async def migration_003_data(db: InfrahubDatabase, reset_registry, default_branch, delete_all_nodes_in_db): + # # load the internal schema from + schema = SchemaRoot(**internal_schema) + schema_branch = SchemaBranch(cache={}, name="default_branch") + schema_branch.load_schema(schema=schema) + schema_branch.process() + + node_schema = schema_branch.get(name="SchemaNode") + rel_schema = schema_branch.get(name="SchemaRelationship") + + node1 = await Node.init(db=db, schema=node_schema) + await node1.new(db=db, name="Node", namespace="Test") + await node1.save(db=db) + + rel1 = await Node.init(db=db, schema=rel_schema) + await rel1.new(db=db, name="rel1", kind="Parent", peer="CoreNode", optional=True, node=node1) + await rel1.save(db=db) + + rel2 = await Node.init(db=db, schema=rel_schema) + await rel2.new(db=db, name="rel2", kind="Parent", peer="CoreNode", optional=False, node=node1) + await rel2.save(db=db) + + +async def test_migration_003_query1( + db: InfrahubDatabase, reset_registry, default_branch, delete_all_nodes_in_db, migration_003_data +): + nbr_rels_before = await count_relationships(db=db) + query = await Migration003Query01.init(db=db) + await query.execute(db=db) + assert query.num_of_results == 1 + + query = await Migration003Query01.init(db=db) + await query.execute(db=db) + assert query.num_of_results == 0 + + nbr_rels_after = await count_relationships(db=db) + assert nbr_rels_after == nbr_rels_before + 1 + + +async def test_migration_003( + db: InfrahubDatabase, reset_registry, default_branch, delete_all_nodes_in_db, migration_003_data +): + nbr_rels_before = await count_relationships(db=db) + + migration = Migration003() + execution_result = await migration.execute(db=db) + assert not execution_result.errors + + validation_result = await migration.validate_migration(db=db) + assert not validation_result.errors + + nbr_rels_after = await count_relationships(db=db) + assert nbr_rels_after == nbr_rels_before + 1 diff --git a/backend/tests/unit/core/migrations/schema/test_attribute_name_update.py b/backend/tests/unit/core/migrations/schema/test_attribute_name_update.py index 37de2ab90d..a07d151c86 100644 --- a/backend/tests/unit/core/migrations/schema/test_attribute_name_update.py +++ b/backend/tests/unit/core/migrations/schema/test_attribute_name_update.py @@ -15,7 +15,9 @@ from infrahub.database import InfrahubDatabase -async def test_query_default_branch(db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main): +async def test_query_default_branch( + db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main, car_profile1_main +): schema = registry.schema.get_schema_branch(name=default_branch.name) prev_car_schema = schema.get(name="TestCar") prev_attr = prev_car_schema.get_attribute(name="color") @@ -37,22 +39,24 @@ async def test_query_default_branch(db: InfrahubDatabase, default_branch: Branch query = await AttributeNameUpdateMigrationQuery01.init(db=db, branch=default_branch, migration=migration) await query.execute(db=db) - assert query.get_nbr_migrations_executed() == 2 + assert query.get_nbr_migrations_executed() == 3 - # We expect 8 more relationships because there are 2 attributes with 4 relationships each - assert await count_relationships(db=db) == count_rels + 8 - assert await count_nodes(db=db, label="Attribute") == count_attr_node + 2 + # We expect 12 more relationships because there are 3 attributes with 4 relationships each + assert await count_relationships(db=db) == count_rels + 12 + assert await count_nodes(db=db, label="Attribute") == count_attr_node + 3 # Re-execute the query once to ensure that it won't change anything query = await AttributeNameUpdateMigrationQuery01.init(db=db, branch=default_branch, migration=migration) await query.execute(db=db) assert query.get_nbr_migrations_executed() == 0 - assert await count_nodes(db=db, label="Attribute") == count_attr_node + 2 - assert await count_relationships(db=db) == count_rels + 8 + assert await count_nodes(db=db, label="Attribute") == count_attr_node + 3 + assert await count_relationships(db=db) == count_rels + 12 -async def test_query_branch1(db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main): +async def test_query_branch1( + db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main, car_profile1_main +): branch1 = await create_branch(db=db, branch_name="branch1", isolated=True) schema = registry.schema.get_schema_branch(name=branch1.name) @@ -76,22 +80,24 @@ async def test_query_branch1(db: InfrahubDatabase, default_branch: Branch, car_a query = await AttributeNameUpdateMigrationQuery01.init(db=db, branch=branch1, migration=migration) await query.execute(db=db) - assert query.get_nbr_migrations_executed() == 2 + assert query.get_nbr_migrations_executed() == 3 - # We expect 16 more relationships because there are 2 attributes with 4 relationships each - assert await count_relationships(db=db) == count_rels + 16 - assert await count_nodes(db=db, label="Attribute") == count_attr_node + 2 + # We expect 24 more relationships because there are 3 attributes with 8 relationships each + assert await count_relationships(db=db) == count_rels + 24 + assert await count_nodes(db=db, label="Attribute") == count_attr_node + 3 # Re-execute the query once to ensure that it won't change anything query = await AttributeNameUpdateMigrationQuery01.init(db=db, branch=branch1, migration=migration) await query.execute(db=db) assert query.get_nbr_migrations_executed() == 0 - assert await count_nodes(db=db, label="Attribute") == count_attr_node + 2 - assert await count_relationships(db=db) == count_rels + 16 + assert await count_nodes(db=db, label="Attribute") == count_attr_node + 3 + assert await count_relationships(db=db) == count_rels + 24 -async def test_migration(db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main): +async def test_migration( + db: InfrahubDatabase, default_branch: Branch, car_accord_main, car_camry_main, car_profile1_main +): schema = registry.schema.get_schema_branch(name=default_branch.name) prev_car_schema = schema.get(name="TestCar") prev_attr = prev_car_schema.get_attribute(name="color") @@ -114,7 +120,7 @@ async def test_migration(db: InfrahubDatabase, default_branch: Branch, car_accor execution_result = await migration.execute(db=db, branch=default_branch) assert not execution_result.errors - assert execution_result.nbr_migrations_executed == 2 + assert execution_result.nbr_migrations_executed == 3 - assert await count_nodes(db=db, label="Attribute") == count_attr_node + 2 - assert await count_relationships(db=db) == count_rels + 8 + assert await count_nodes(db=db, label="Attribute") == count_attr_node + 3 + assert await count_relationships(db=db) == count_rels + 12 diff --git a/backend/tests/unit/core/schema_manager/__init__.py b/backend/tests/unit/core/schema_manager/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/core/schema_manager/conftest.py b/backend/tests/unit/core/schema_manager/conftest.py new file mode 100644 index 0000000000..39c19a4bd7 --- /dev/null +++ b/backend/tests/unit/core/schema_manager/conftest.py @@ -0,0 +1,273 @@ +import pytest + +from infrahub.core.constants import BranchSupportType, InfrahubKind + + +def _get_schema_by_kind(full_schema, kind): + for schema_dict in full_schema["nodes"] + full_schema["generics"]: + schema_kind = schema_dict["namespace"] + schema_dict["name"] + if schema_kind == kind: + return schema_dict + + +@pytest.fixture +def schema_all_in_one(): + FULL_SCHEMA = { + "nodes": [ + { + "name": "Criticality", + "namespace": "Builtin", + "inherit_from": ["InfraGenericInterface"], + "default_filter": "name__value", + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + {"name": "level", "kind": "Number", "branch": BranchSupportType.AWARE.value}, + {"name": "color", "kind": "Text", "default_value": "#444444"}, + {"name": "description", "kind": "Text", "optional": True}, + ], + "relationships": [ + { + "name": "tags", + "peer": InfrahubKind.TAG, + "label": "Tags", + "optional": True, + "cardinality": "many", + }, + ], + }, + { + "name": "Tag", + "namespace": "Builtin", + "label": "Tag", + "default_filter": "name__value", + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + { + "name": "description", + "kind": "Text", + "label": "Description", + "optional": True, + "branch": BranchSupportType.AGNOSTIC.value, + }, + ], + }, + { + "name": "Status", + "namespace": "Builtin", + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + ], + }, + { + "name": "Badge", + "namespace": "Builtin", + "branch": BranchSupportType.LOCAL.value, + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + ], + }, + { + "name": "StandardGroup", + "namespace": "Core", + "inherit_from": [InfrahubKind.GENERICGROUP], + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + ], + }, + { + "name": "TinySchema", + "namespace": "Infra", + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + ], + }, + ], + "generics": [ + { + "name": "GenericInterface", + "namespace": "Infra", + "attributes": [ + {"name": "my_generic_name", "kind": "Text"}, + {"name": "mybool", "kind": "Boolean", "default_value": False}, + {"name": "local_attr", "kind": "Number", "branch": BranchSupportType.LOCAL.value}, + ], + "relationships": [ + { + "name": "primary_tag", + "peer": InfrahubKind.TAG, + "label": "Primary Tag", + "identifier": "primary_tag__criticality", + "optional": True, + "cardinality": "one", + "branch": BranchSupportType.AGNOSTIC.value, + }, + { + "name": "status", + "peer": "BuiltinStatus", + "optional": True, + "cardinality": "one", + }, + { + "name": "badges", + "peer": "BuiltinBadge", + "optional": True, + "cardinality": "many", + }, + ], + }, + { + "name": "Node", + "namespace": "Core", + "description": "Base Node in Infrahub.", + "label": "Node", + }, + { + "name": "Group", + "namespace": "Core", + "description": "Generic Group Object.", + "label": "Group", + "default_filter": "name__value", + "order_by": ["name__value"], + "display_labels": ["label__value"], + "branch": BranchSupportType.AWARE.value, + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + {"name": "label", "kind": "Text", "optional": True}, + {"name": "description", "kind": "Text", "optional": True}, + ], + "relationships": [ + { + "name": "members", + "peer": "CoreNode", + "optional": True, + "identifier": "group_member", + "cardinality": "many", + }, + { + "name": "subscribers", + "peer": "CoreNode", + "optional": True, + "identifier": "group_subscriber", + "cardinality": "many", + }, + ], + }, + ], + } + + return FULL_SCHEMA + + +@pytest.fixture +def schema_criticality_tag(): + FULL_SCHEMA = { + "nodes": [ + { + "name": "Criticality", + "namespace": "Builtin", + "default_filter": "name__value", + "label": "Criticality", + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + {"name": "level", "kind": "Number", "label": "Level"}, + {"name": "color", "kind": "Text", "label": "Color", "default_value": "#444444"}, + {"name": "description", "kind": "Text", "label": "Description", "optional": True}, + ], + "relationships": [ + { + "name": "tags", + "peer": InfrahubKind.TAG, + "label": "Tags", + "optional": True, + "cardinality": "many", + }, + { + "name": "primary_tag", + "peer": InfrahubKind.TAG, + "label": "Primary Tag", + "identifier": "primary_tag__criticality", + "optional": True, + "cardinality": "one", + }, + ], + }, + { + "name": "Tag", + "namespace": "Builtin", + "label": "Tag", + "default_filter": "name__value", + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + {"name": "description", "kind": "Text", "label": "Description", "optional": True}, + ], + }, + ] + } + return FULL_SCHEMA + + +@pytest.fixture +def schema_parent_component() -> dict: + FULL_SCHEMA = { + "generics": [ + { + "name": "ComponentGenericOne", + "namespace": "Test", + "attributes": [ + {"name": "smell", "kind": "Text", "label": "Name"}, + ], + "relationships": [], + }, + ], + "nodes": [ + { + "name": "ParentNodeOne", + "namespace": "Test", + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + {"name": "level", "kind": "Number", "label": "Level"}, + {"name": "color", "kind": "Text", "label": "Color", "default_value": "#444444"}, + {"name": "description", "kind": "Text", "label": "Description", "optional": True}, + ], + "relationships": [ + { + "name": "component_ones", + "peer": "TestComponentNodeOne", + "optional": True, + "cardinality": "many", + "kind": "Component", + }, + ], + }, + { + "name": "ParentNodeTwo", + "namespace": "Test", + "attributes": [ + {"name": "height", "kind": "Number", "label": "Height"}, + {"name": "width", "kind": "Number", "label": "Width"}, + ], + "relationships": [], + }, + { + "name": "ComponentNodeOne", + "namespace": "Test", + "inherit_from": ["TestComponentGenericOne"], + "attributes": [ + {"name": "name", "kind": "Text", "label": "Name", "unique": True}, + {"name": "description", "kind": "Text", "label": "Description", "optional": True}, + ], + "relationships": [ + { + "name": "parent_one", + "peer": "TestParentNodeOne", + "kind": "Parent", + "optional": False, + "cardinality": "one", + }, + ], + }, + ], + } + return FULL_SCHEMA diff --git a/backend/tests/unit/core/test_manager_schema.py b/backend/tests/unit/core/schema_manager/test_manager_schema.py similarity index 91% rename from backend/tests/unit/core/test_manager_schema.py rename to backend/tests/unit/core/schema_manager/test_manager_schema.py index 73a6dc1c47..324ced828f 100644 --- a/backend/tests/unit/core/test_manager_schema.py +++ b/backend/tests/unit/core/schema_manager/test_manager_schema.py @@ -8,10 +8,13 @@ from infrahub.core import registry from infrahub.core.branch import Branch from infrahub.core.constants import ( + AllowOverrideType, BranchSupportType, FilterSchemaKind, HashableModelState, InfrahubKind, + RelationshipDeleteBehavior, + RelationshipKind, SchemaPathType, ) from infrahub.core.schema import ( @@ -24,206 +27,7 @@ from infrahub.core.schema_manager import SchemaBranch, SchemaManager from infrahub.database import InfrahubDatabase - -# ----------------------------------------------------------------- -# SchemaBranch -# ----------------------------------------------------------------- -@pytest.fixture -def schema_all_in_one(): - FULL_SCHEMA = { - "nodes": [ - { - "name": "Criticality", - "namespace": "Builtin", - "inherit_from": ["InfraGenericInterface"], - "default_filter": "name__value", - "branch": BranchSupportType.AGNOSTIC.value, - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "level", "kind": "Number", "branch": BranchSupportType.AWARE.value}, - {"name": "color", "kind": "Text", "default_value": "#444444"}, - {"name": "description", "kind": "Text", "optional": True}, - ], - "relationships": [ - { - "name": "tags", - "peer": InfrahubKind.TAG, - "label": "Tags", - "optional": True, - "cardinality": "many", - }, - ], - }, - { - "name": "Tag", - "namespace": "Builtin", - "label": "Tag", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - { - "name": "description", - "kind": "Text", - "label": "Description", - "optional": True, - "branch": BranchSupportType.AGNOSTIC.value, - }, - ], - }, - { - "name": "Status", - "namespace": "Builtin", - "branch": BranchSupportType.AGNOSTIC.value, - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - ], - }, - { - "name": "Badge", - "namespace": "Builtin", - "branch": BranchSupportType.LOCAL.value, - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - ], - }, - { - "name": "StandardGroup", - "namespace": "Core", - "inherit_from": [InfrahubKind.GENERICGROUP], - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - ], - }, - ], - "generics": [ - { - "name": "GenericInterface", - "namespace": "Infra", - "attributes": [ - {"name": "my_generic_name", "kind": "Text"}, - {"name": "mybool", "kind": "Boolean", "default_value": False}, - {"name": "local_attr", "kind": "Number", "branch": BranchSupportType.LOCAL.value}, - ], - "relationships": [ - { - "name": "primary_tag", - "peer": InfrahubKind.TAG, - "label": "Primary Tag", - "identifier": "primary_tag__criticality", - "optional": True, - "cardinality": "one", - "branch": BranchSupportType.AGNOSTIC.value, - }, - { - "name": "status", - "peer": "BuiltinStatus", - "optional": True, - "cardinality": "one", - }, - { - "name": "badges", - "peer": "BuiltinBadge", - "optional": True, - "cardinality": "many", - }, - ], - }, - { - "name": "Node", - "namespace": "Core", - "description": "Base Node in Infrahub.", - "label": "Node", - }, - { - "name": "Group", - "namespace": "Core", - "description": "Generic Group Object.", - "label": "Group", - "default_filter": "name__value", - "order_by": ["name__value"], - "display_labels": ["label__value"], - "branch": BranchSupportType.AWARE.value, - "attributes": [ - {"name": "name", "kind": "Text", "unique": True}, - {"name": "label", "kind": "Text", "optional": True}, - {"name": "description", "kind": "Text", "optional": True}, - ], - "relationships": [ - { - "name": "members", - "peer": "CoreNode", - "optional": True, - "identifier": "group_member", - "cardinality": "many", - }, - { - "name": "subscribers", - "peer": "CoreNode", - "optional": True, - "identifier": "group_subscriber", - "cardinality": "many", - }, - ], - }, - ], - } - - return FULL_SCHEMA - - -@pytest.fixture -def schema_criticality_tag(): - FULL_SCHEMA = { - "nodes": [ - { - "name": "Criticality", - "namespace": "Builtin", - "default_filter": "name__value", - "label": "Criticality", - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - {"name": "level", "kind": "Number", "label": "Level"}, - {"name": "color", "kind": "Text", "label": "Color", "default_value": "#444444"}, - {"name": "description", "kind": "Text", "label": "Description", "optional": True}, - ], - "relationships": [ - { - "name": "tags", - "peer": InfrahubKind.TAG, - "label": "Tags", - "optional": True, - "cardinality": "many", - }, - { - "name": "primary_tag", - "peer": InfrahubKind.TAG, - "label": "Primary Tag", - "identifier": "primary_tag__criticality", - "optional": True, - "cardinality": "one", - }, - ], - }, - { - "name": "Tag", - "namespace": "Builtin", - "label": "Tag", - "default_filter": "name__value", - "attributes": [ - {"name": "name", "kind": "Text", "label": "Name", "unique": True}, - {"name": "description", "kind": "Text", "label": "Description", "optional": True}, - ], - }, - ] - } - return FULL_SCHEMA - - -def _get_schema_by_kind(full_schema, kind): - for schema_dict in full_schema["nodes"] + full_schema["generics"]: - schema_kind = schema_dict["namespace"] + schema_dict["name"] - if schema_kind == kind: - return schema_dict +from .conftest import _get_schema_by_kind async def test_schema_branch_set(): @@ -289,11 +93,24 @@ async def test_schema_branch_process_inheritance(schema_all_in_one): criticality = schema.get(name="BuiltinCriticality") assert criticality.get_relationship(name="status") assert criticality.get_relationship(name="status").inherited - assert criticality.get_attribute(name="my_generic_name") assert criticality.get_attribute(name="my_generic_name").inherited - assert criticality.get_attribute(name="mybool") + assert criticality.get_attribute(name="mybool").inherited + assert criticality.get_attribute(name="color") + assert criticality.get_attribute(name="color").inherited is False + assert criticality.get_attribute(name="description") + assert criticality.get_attribute(name="description").inherited is False + + core_node = schema.get(name="CoreNode") + assert set(core_node.used_by) == { + "BuiltinCriticality", + "BuiltinTag", + "BuiltinStatus", + "BuiltinBadge", + "CoreStandardGroup", + "InfraTinySchema", + } async def test_schema_branch_process_branch_support(schema_all_in_one): @@ -346,6 +163,91 @@ async def test_schema_branch_add_groups(schema_all_in_one): assert std_group.get_relationship_or_none(name="subscriber_of_groups") is None +@pytest.mark.parametrize( + "schema_dict,expected_error", + [ + ( + { + "nodes": [ + { + "name": "Criticality", + "namespace": "Test", + "inherit_from": ["InfraGenericInterface"], + "default_filter": "name__value", + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [{"name": "name", "kind": "Text", "unique": True}], + }, + { + "name": "Status", + "namespace": "Test", + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [{"name": "name", "kind": "Text", "label": "Name", "unique": True}], + }, + ], + "generics": [ + { + "name": "GenericInterface", + "namespace": "Infra", + "attributes": [{"name": "name", "kind": "Text", "allow_override": AllowOverrideType.NONE}], + "relationships": [ + {"name": "status", "peer": "TestStatus", "optional": True, "cardinality": "one"} + ], + }, + ], + }, + "TestCriticality's attribute name inherited from InfraGenericInterface cannot be overriden", + ), + ( + { + "nodes": [ + { + "name": "Criticality", + "namespace": "Test", + "inherit_from": ["InfraGenericInterface"], + "default_filter": "name__value", + "branch": BranchSupportType.AGNOSTIC.value, + "relationships": [ + {"name": "status", "peer": "BuiltinStatus", "optional": True, "cardinality": "one"} + ], + }, + { + "name": "Status", + "namespace": "Test", + "branch": BranchSupportType.AGNOSTIC.value, + "attributes": [{"name": "name", "kind": "Text", "label": "Name", "unique": True}], + }, + ], + "generics": [ + { + "name": "GenericInterface", + "namespace": "Infra", + "attributes": [{"name": "name", "kind": "Text"}], + "relationships": [ + { + "name": "status", + "peer": "TestStatus", + "optional": True, + "cardinality": "one", + "allow_override": AllowOverrideType.NONE, + } + ], + }, + ], + }, + "TestCriticality's relationship status inherited from InfraGenericInterface cannot be overriden", + ), + ], +) +async def test_schema_protected_generics(schema_dict, expected_error): + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_dict)) + + with pytest.raises(ValueError) as exc: + schema.process_inheritance() + + assert str(exc.value) == expected_error + + async def test_schema_branch_generate_weight(schema_all_in_one): def extract_weights(schema: SchemaBranch): weights = [] @@ -392,6 +294,29 @@ def extract_weights(schema: SchemaBranch): assert len(in_second) == 1 and in_second[0].startswith(new_attr2_partial_id) +async def test_schema_branch_add_profile_schema(schema_all_in_one): + core_profile_schema = _get_schema_by_kind(core_models, kind="CoreProfile") + schema_all_in_one["generics"].append(core_profile_schema) + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) + schema.add_profile_schemas() + + profile = schema.get(name="ProfileBuiltinCriticality") + assert profile.get_attribute("profile_name").branch == BranchSupportType.AGNOSTIC.value + assert profile.get_attribute("profile_priority").branch == BranchSupportType.AGNOSTIC.value + assert set(profile.attribute_names) == {"profile_name", "profile_priority", "description"} + core_profile_schema = schema.get("CoreProfile") + assert set(core_profile_schema.used_by) == { + "ProfileBuiltinCriticality", + "ProfileBuiltinTag", + "ProfileBuiltinStatus", + "ProfileBuiltinBadge", + "ProfileCoreStandardGroup", + "ProfileInfraTinySchema", + } + + async def test_schema_branch_generate_identifiers(schema_all_in_one): schema = SchemaBranch(cache={}, name="test") schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) @@ -1505,6 +1430,37 @@ async def test_schema_branch_process_filters( assert not DeepDiff(criticality_dict["filters"], expected_filters, ignore_order=True) +async def test_process_relationships_on_delete_defaults_set(schema_all_in_one): + schema_dict = _get_schema_by_kind(schema_all_in_one, "BuiltinCriticality") + schema_dict["relationships"][0]["kind"] = "Component" + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) + + schema.process_relationships() + + processed_criticality = schema.get(name="BuiltinCriticality", duplicate=False) + processed_relationship = processed_criticality.get_relationship(name="tags") + assert processed_relationship.on_delete == RelationshipDeleteBehavior.CASCADE + for node_schema in schema.get_all(duplicate=False).values(): + for relationship in node_schema.relationships: + if relationship.kind != RelationshipKind.COMPONENT: + assert relationship.on_delete == RelationshipDeleteBehavior.NO_ACTION + + +async def test_process_relationships_component_can_be_overridden(schema_all_in_one): + schema_dict = _get_schema_by_kind(schema_all_in_one, "BuiltinCriticality") + schema_dict["relationships"][0]["kind"] = "Component" + schema_dict["relationships"][0]["on_delete"] = "no-action" + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_all_in_one)) + + schema.process_relationships() + + processed_criticality = schema.get(name="BuiltinCriticality", duplicate=False) + processed_relationship = processed_criticality.get_relationship(name="tags") + assert processed_relationship.on_delete == RelationshipDeleteBehavior.NO_ACTION + + async def test_schema_branch_copy( db: InfrahubDatabase, reset_registry, default_branch: Branch, register_internal_models_schema ): @@ -1610,8 +1566,10 @@ async def test_schema_branch_diff_attribute( ] } + schema = SchemaRoot(**FULL_SCHEMA) + schema.generate_uuid() schema_branch = SchemaBranch(cache={}, name="test") - schema_branch.load_schema(schema=SchemaRoot(**FULL_SCHEMA)) + schema_branch.load_schema(schema=schema) new_schema = schema_branch.duplicate() node = new_schema.get(name="BuiltinCriticality") @@ -1820,10 +1778,15 @@ async def test_schema_branch_diff_add_node_relationship( }, } + schema1 = SchemaRoot(**SCHEMA1) + schema1.generate_uuid() + schema2 = SchemaRoot(**SCHEMA2) + schema2.generate_uuid() + schema_branch = SchemaBranch(cache={}, name="test") - schema_branch.load_schema(schema=SchemaRoot(**SCHEMA1)) + schema_branch.load_schema(schema=schema1) new_schema = schema_branch.duplicate() - new_schema.load_schema(schema=SchemaRoot(**SCHEMA2)) + new_schema.load_schema(schema=schema2) diff = schema_branch.diff(other=new_schema) assert diff.model_dump() == { @@ -1891,9 +1854,10 @@ async def test_schema_branch_validate_check_missing( }, ] } - + schema = SchemaRoot(**FULL_SCHEMA) + schema.generate_uuid() schema_branch = SchemaBranch(cache={}, name="test") - schema_branch.load_schema(schema=SchemaRoot(**FULL_SCHEMA)) + schema_branch.load_schema(schema=schema) new_schema = schema_branch.duplicate() node = new_schema.get(name="BuiltinCriticality") @@ -1978,10 +1942,15 @@ async def test_schema_branch_validate_add_node_relationships( }, } + schema1 = SchemaRoot(**SCHEMA1) + schema1.generate_uuid() + schema2 = SchemaRoot(**SCHEMA2) + schema2.generate_uuid() + schema_branch = SchemaBranch(cache={}, name="test") - schema_branch.load_schema(schema=SchemaRoot(**SCHEMA1)) + schema_branch.load_schema(schema=schema1) new_schema = schema_branch.duplicate() - new_schema.load_schema(schema=SchemaRoot(**SCHEMA2)) + new_schema.load_schema(schema=schema2) result = schema_branch.validate_update(other=new_schema) assert result.model_dump(exclude=["diff"]) == {"constraints": [], "errors": [], "migrations": []} @@ -2130,6 +2099,7 @@ async def test_load_schema_to_db_internal_models(db: InfrahubDatabase, default_b node_schema = registry.schema.get(name="SchemaNode", branch=default_branch) results = await SchemaManager.query(schema=node_schema, db=db) assert len(results) > 1 + assert all(r for r in results if r.namespace.value != "Profile") async def test_load_schema_to_db_core_models( @@ -2143,6 +2113,7 @@ async def test_load_schema_to_db_core_models( node_schema = registry.schema.get(name="SchemaGeneric") results = await SchemaManager.query(schema=node_schema, db=db) assert len(results) > 1 + assert all(r for r in results if r.namespace.value != "Profile") async def test_load_schema_to_db_simple_01( @@ -2247,7 +2218,8 @@ async def test_load_schema_from_db( schema2 = await registry.schema.load_schema_from_db(db=db, branch=default_branch.name) assert len(schema2.nodes) == 6 - assert len(schema2.generics) == 1 + assert set(schema2.generics.keys()) == {"CoreProfile", "TestGenericInterface"} + assert set(schema2.profiles.keys()) == {"ProfileBuiltinTag", "ProfileTestCriticality"} assert schema11.get(name="TestCriticality").get_hash() == schema2.get(name="TestCriticality").get_hash() assert schema11.get(name=InfrahubKind.TAG).get_hash() == schema2.get(name="BuiltinTag").get_hash() @@ -2320,7 +2292,8 @@ async def test_load_schema( schema2 = await registry.schema.load_schema(db=db, branch=default_branch.name) assert len(schema2.nodes) == 6 - assert len(schema2.generics) == 1 + assert set(schema2.generics.keys()) == {"CoreProfile", "TestGenericInterface"} + assert set(schema2.profiles.keys()) == {"ProfileBuiltinTag", "ProfileTestCriticality"} assert schema11.get(name="TestCriticality").get_hash() == schema2.get(name="TestCriticality").get_hash() assert schema11.get(name=InfrahubKind.TAG).get_hash() == schema2.get(name=InfrahubKind.TAG).get_hash() diff --git a/backend/tests/unit/core/schema_manager/test_parent_component_validation.py b/backend/tests/unit/core/schema_manager/test_parent_component_validation.py new file mode 100644 index 0000000000..cfd8831343 --- /dev/null +++ b/backend/tests/unit/core/schema_manager/test_parent_component_validation.py @@ -0,0 +1,144 @@ +import re + +import pytest + +from infrahub.core.schema import SchemaRoot +from infrahub.core.schema_manager import SchemaBranch + +from .conftest import _get_schema_by_kind + + +async def test_one_parent_relationship_allowed(schema_parent_component): + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + schema.validate_parent_component() + + +async def test_many_parent_relationships_not_allowed(schema_parent_component): + schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentNodeOne") + schema_dict["relationships"].append( + { + "name": "parent_two", + "peer": "TestParentNodeOne", + "kind": "Parent", + "optional": False, + "cardinality": "one", + } + ) + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises(ValueError, match=r"Only one relationship of type parent is allowed") as exc: + schema.validate_parent_component() + + err_msg = str(exc.value) + assert "parent_one" in err_msg + assert "parent_two" in err_msg + + +async def test_parent_relationship_must_be_cardinality_one(schema_parent_component): + schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentNodeOne") + schema_dict["relationships"][0]["cardinality"] = "many" + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises( + ValueError, match=r"TestComponentNodeOne.parent_one: Relationship of type parent must be cardinality=one" + ): + schema.validate_parent_component() + + +async def test_parent_relationship_must_be_mandatory(schema_parent_component): + schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentNodeOne") + schema_dict["relationships"][0]["optional"] = True + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises( + ValueError, match=r"TestComponentNodeOne.parent_one: Relationship of type parent must not be optional" + ): + schema.validate_parent_component() + + +async def test_only_one_parent_relationship_when_inheriting_from_generic(schema_parent_component): + schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentGenericOne") + schema_dict["relationships"].append( + { + "name": "parent_two", + "peer": "TestParentNodeOne", + "kind": "Parent", + "optional": False, + "cardinality": "one", + } + ) + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises(ValueError, match=r"Only one relationship of type parent is allowed") as exc: + schema.validate_parent_component() + + err_msg = str(exc.value) + assert "parent_one" in err_msg + assert "parent_two" in err_msg + + +async def test_hierarchy_cannot_contain_loop(schema_parent_component): + schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentGenericOne") + schema_dict["relationships"].append( + { + "name": "bad_component", + "peer": "TestParentNodeOne", + "kind": "Component", + "optional": True, + "cardinality": "many", + } + ) + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises( + ValueError, + match=re.escape( + "Cycles exist among parents and components in schema: ['TestParentNodeOne --> TestComponentNodeOne --> TestParentNodeOne']" + ), + ): + schema.validate_parent_component() + + +async def test_hierarchy_cannot_contain_implied_loop(schema_parent_component): + component_schema_dict = _get_schema_by_kind(schema_parent_component, "TestComponentNodeOne") + component_schema_dict["relationships"].append( + { + "name": "component_two", + "peer": "TestParentNodeTwo", + "kind": "Component", + "optional": True, + "cardinality": "many", + } + ) + node_schema_dict = _get_schema_by_kind(schema_parent_component, "TestParentNodeOne") + node_schema_dict["relationships"].append( + { + "name": "parent_two", + "peer": "TestParentNodeTwo", + "kind": "Parent", + "optional": False, + "cardinality": "one", + } + ) + + schema = SchemaBranch(cache={}, name="test") + schema.load_schema(schema=SchemaRoot(**schema_parent_component)) + + with pytest.raises(ValueError) as exc: + schema.validate_parent_component() + + error_msg = str(exc.value) + assert "Cycles exist among parents and components in schema" in error_msg + assert "['TestParentNodeOne --> TestComponentNodeOne --> TestParentNodeTwo --> TestParentNodeOne']" in error_msg diff --git a/backend/tests/unit/core/test_attribute.py b/backend/tests/unit/core/test_attribute.py index 6dd8f20457..a29196edad 100644 --- a/backend/tests/unit/core/test_attribute.py +++ b/backend/tests/unit/core/test_attribute.py @@ -30,7 +30,7 @@ async def test_init( assert attr._source is None with pytest.raises(LookupError): - attr.source + _ = attr.source # initialize with a more complex data structure attr = String( @@ -53,9 +53,9 @@ async def test_validate_format_ipnetwork_and_iphost( # 1/ test with prefixlen IPHost(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.0/32") - IPHost(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/32") - IPNetwork(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.0/32") - IPNetwork(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/32") + IPHost(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/128") + IPNetwork(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.0/27") + IPNetwork(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/64") # 2/ test with netmask IPHost(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.1/255.255.255.0") @@ -102,63 +102,112 @@ async def test_validate_validate_url(db: InfrahubDatabase, default_branch: Branc async def test_validate_iphost_returns(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): schema = criticality_schema.get_attribute("name") - test_ipv4 = IPHost( - name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.1/31" - ) + test_ipv4 = IPHost(name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="10.0.2.1/31") test_ipv6 = IPHost( name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/32" ) - assert test_ipv4.value == "192.0.2.1/31" - assert test_ipv4.ip == "192.0.2.1" + assert test_ipv4.value == "10.0.2.1/31" + assert test_ipv4.ip == "10.0.2.1" assert test_ipv4.hostmask == "0.0.0.1" assert test_ipv4.netmask == "255.255.255.254" - assert test_ipv4.network == "192.0.2.0/31" - assert test_ipv4.prefixlen == "31" - assert test_ipv4.with_hostmask == "192.0.2.1/0.0.0.1" - assert test_ipv4.with_netmask == "192.0.2.1/255.255.255.254" + assert test_ipv4.network == "10.0.2.0/31" + assert test_ipv4.prefixlen == 31 + assert test_ipv4.with_hostmask == "10.0.2.1/0.0.0.1" + assert test_ipv4.with_netmask == "10.0.2.1/255.255.255.254" assert test_ipv4.version == 4 + assert test_ipv4.ip_integer == 167772673 + assert test_ipv4.ip_binary == "00001010000000000000001000000001" + assert len(test_ipv4.ip_binary) == 32 + assert test_ipv4.to_db() == { + "binary_address": "00001010000000000000001000000001", + "is_default": False, + "prefixlen": 31, + "value": "10.0.2.1/31", + "version": 4, + } assert test_ipv6.value == "2001:db8::/32" assert test_ipv6.ip == "2001:db8::" assert test_ipv6.hostmask == "::ffff:ffff:ffff:ffff:ffff:ffff" assert test_ipv6.netmask == "ffff:ffff::" assert test_ipv6.network == "2001:db8::/32" - assert test_ipv6.prefixlen == "32" + assert test_ipv6.prefixlen == 32 assert test_ipv6.with_hostmask == "2001:db8::/::ffff:ffff:ffff:ffff:ffff:ffff" assert test_ipv6.with_netmask == "2001:db8::/ffff:ffff::" assert test_ipv6.version == 6 + assert test_ipv6.ip_integer == 42540766411282592856903984951653826560 + assert ( + test_ipv6.ip_binary + == "00100000000000010000110110111000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + ) + assert len(test_ipv6.ip_binary) == 128 + + assert test_ipv6.to_db() == { + "binary_address": f"0010000000000001000011011011100000000000000000000000000000000000000000000000000000000000{'0' * 40}", + "is_default": False, + "prefixlen": 32, + "value": "2001:db8::/32", + "version": 6, + } async def test_validate_ipnetwork_returns(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): schema = criticality_schema.get_attribute("name") test_ipv4 = IPNetwork( - name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="192.0.2.0/31" + name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="10.0.2.0/31" ) test_ipv6 = IPNetwork( name="test", schema=schema, branch=default_branch, at=Timestamp(), node=None, data="2001:db8::/32" ) - assert test_ipv4.value == "192.0.2.0/31" - assert test_ipv4.broadcast_address == "192.0.2.1" + assert test_ipv4.value == "10.0.2.0/31" + assert test_ipv4.broadcast_address == "10.0.2.1" assert test_ipv4.hostmask == "0.0.0.1" assert test_ipv4.netmask == "255.255.255.254" - assert test_ipv4.prefixlen == "31" + assert test_ipv4.prefixlen == 31 assert test_ipv4.num_addresses == 2 - assert test_ipv4.with_hostmask == "192.0.2.0/0.0.0.1" - assert test_ipv4.with_netmask == "192.0.2.0/255.255.255.254" + assert test_ipv4.with_hostmask == "10.0.2.0/0.0.0.1" + assert test_ipv4.with_netmask == "10.0.2.0/255.255.255.254" assert test_ipv4.version == 4 + assert test_ipv4.network_address_integer == 167772672 + assert test_ipv4.network_address_binary == "00001010000000000000001000000000" + assert len(test_ipv4.network_address_binary) == 32 + + assert test_ipv4.to_db() == { + "binary_address": "00001010000000000000001000000000", + "is_default": False, + # "num_addresses": 2, + "prefixlen": 31, + "value": "10.0.2.0/31", + "version": 4, + } assert test_ipv6.value == "2001:db8::/32" assert test_ipv6.broadcast_address == "2001:db8:ffff:ffff:ffff:ffff:ffff:ffff" assert test_ipv6.hostmask == "::ffff:ffff:ffff:ffff:ffff:ffff" assert test_ipv6.netmask == "ffff:ffff::" - assert test_ipv6.prefixlen == "32" + assert test_ipv6.prefixlen == 32 assert test_ipv6.num_addresses == 79228162514264337593543950336 assert test_ipv6.with_hostmask == "2001:db8::/::ffff:ffff:ffff:ffff:ffff:ffff" assert test_ipv6.with_netmask == "2001:db8::/ffff:ffff::" assert test_ipv6.version == 6 + assert test_ipv6.network_address_integer == 42540766411282592856903984951653826560 + assert ( + test_ipv6.network_address_binary + == f"0010000000000001000011011011100000000000000000000000000000000000000000000000000000000000{'0' * 40}" + ) + assert len(test_ipv6.network_address_binary) == 128 + + assert test_ipv6.to_db() == { + "binary_address": f"0010000000000001000011011011100000000000000000000000000000000000000000000000000000000000{'0' * 40}", + "is_default": False, + # "num_addresses": 79228162514264337593543950336, + "prefixlen": 32, + "value": "2001:db8::/32", + "version": 6, + } async def test_validate_content_dropdown(db: InfrahubDatabase, default_branch: Branch, criticality_schema: NodeSchema): @@ -363,7 +412,7 @@ async def test_get_query_filter_any_node_property(db: InfrahubDatabase, default_ "-[:HAS_ATTRIBUTE]-", "(i:Attribute)", "-[:HAS_SOURCE]-", - "(ap:CoreNode { uuid: $attr_any_source_id })", + "(ap:Node { uuid: $attr_any_source_id })", ] assert [str(item) for item in filters] == expected_response assert params == {"attr_any_source_id": "abcdef"} diff --git a/backend/tests/unit/core/test_branch.py b/backend/tests/unit/core/test_branch.py index 5954f83ad9..1ac808abaa 100644 --- a/backend/tests/unit/core/test_branch.py +++ b/backend/tests/unit/core/test_branch.py @@ -1,12 +1,12 @@ import pytest from pydantic import ValidationError as PydanticValidationError -from infrahub.core import get_branch from infrahub.core.branch import Branch from infrahub.core.constants import GLOBAL_BRANCH_NAME from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node +from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase from infrahub.exceptions import BranchNotFoundError, ValidationError @@ -48,7 +48,7 @@ async def test_branch_name_validator(db: InfrahubDatabase): # Test DEL character with pytest.raises(ValidationError): - Branch(name="new\x7Fbranch") + Branch(name="new\x7fbranch") # Test space character with pytest.raises(ValidationError): @@ -116,7 +116,7 @@ async def test_branch_branched_form_format_validator(db: InfrahubDatabase): async def test_get_query_filter_relationships_main(db: InfrahubDatabase, base_dataset_02): - default_branch = await get_branch(branch="main", db=db) + default_branch = await registry.get_branch(branch="main", db=db) filters, params = default_branch.get_query_filter_relationships( rel_labels=["r1", "r2"], at=Timestamp().to_string(), include_outside_parentheses=False @@ -135,7 +135,7 @@ async def test_get_query_filter_relationships_main(db: InfrahubDatabase, base_da async def test_get_query_filter_relationships_branch1(db: InfrahubDatabase, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) filters, params = branch1.get_query_filter_relationships( rel_labels=["r1", "r2"], at=Timestamp().to_string(), include_outside_parentheses=False @@ -150,7 +150,7 @@ async def test_get_query_filter_relationships_branch1(db: InfrahubDatabase, base async def test_get_branches_and_times_to_query_main(db: InfrahubDatabase, base_dataset_02): now = Timestamp("1s") - main_branch = await get_branch(branch="main", db=db) + main_branch = await registry.get_branch(branch="main", db=db) results = main_branch.get_branches_and_times_to_query(at=Timestamp()) assert Timestamp(results[frozenset(["main"])]) > now @@ -163,7 +163,7 @@ async def test_get_branches_and_times_to_query_main(db: InfrahubDatabase, base_d async def test_get_branches_and_times_to_query_branch1(db: InfrahubDatabase, base_dataset_02): now = Timestamp("1s") - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) t0 = Timestamp() results = branch1.get_branches_and_times_to_query(at=t0) @@ -184,7 +184,7 @@ async def test_get_branches_and_times_to_query_branch1(db: InfrahubDatabase, bas async def test_get_branches_and_times_to_query_global_main(db: InfrahubDatabase, base_dataset_02): now = Timestamp("1s") - main_branch = await get_branch(branch="main", db=db) + main_branch = await registry.get_branch(branch="main", db=db) results = main_branch.get_branches_and_times_to_query_global(at=Timestamp()) assert Timestamp(results[frozenset((GLOBAL_BRANCH_NAME, "main"))]) > now @@ -197,7 +197,7 @@ async def test_get_branches_and_times_to_query_global_main(db: InfrahubDatabase, async def test_get_branches_and_times_to_query_global_branch1(db: InfrahubDatabase, base_dataset_02): now = Timestamp("1s") - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) t0 = Timestamp() results = branch1.get_branches_and_times_to_query_global(at=t0) @@ -217,7 +217,7 @@ async def test_get_branches_and_times_to_query_global_branch1(db: InfrahubDataba async def test_get_branches_and_times_for_range_main(db: InfrahubDatabase, base_dataset_02): now = Timestamp() - main_branch = await get_branch(branch="main", db=db) + main_branch = await registry.get_branch(branch="main", db=db) start_times, end_times = main_branch.get_branches_and_times_for_range(start_time=Timestamp("1h"), end_time=now) assert list(start_times.keys()) == ["main"] @@ -236,7 +236,7 @@ async def test_get_branches_and_times_for_range_main(db: InfrahubDatabase, base_ async def test_get_branches_and_times_for_range_branch1(db: InfrahubDatabase, base_dataset_02): now = Timestamp() - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) start_times, end_times = branch1.get_branches_and_times_for_range(start_time=Timestamp("1h"), end_time=now) assert sorted(list(start_times.keys())) == ["branch1", "main"] @@ -259,7 +259,7 @@ async def test_get_branches_and_times_for_range_branch1(db: InfrahubDatabase, ba async def test_get_branches_and_times_for_range_branch2(db: InfrahubDatabase, base_dataset_03): now = Timestamp() - branch2 = await get_branch(branch="branch2", db=db) + branch2 = await registry.get_branch(branch="branch2", db=db) start_times, end_times = branch2.get_branches_and_times_for_range(start_time=Timestamp("1h"), end_time=now) assert sorted(list(start_times.keys())) == ["branch2", "main"] diff --git a/backend/tests/unit/core/test_branch_diff.py b/backend/tests/unit/core/test_branch_diff.py index 74e36b1ee0..95b692d868 100644 --- a/backend/tests/unit/core/test_branch_diff.py +++ b/backend/tests/unit/core/test_branch_diff.py @@ -5,7 +5,6 @@ from deepdiff import DeepDiff from pydantic.v1 import Field -from infrahub.core import get_branch, registry from infrahub.core.branch import Branch from infrahub.core.constants import DiffAction, InfrahubKind from infrahub.core.diff.branch_differ import BranchDiffer @@ -13,6 +12,7 @@ from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager from infrahub.core.node import Node +from infrahub.core.registry import registry from infrahub.core.schema import AttributeSchema from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase @@ -880,7 +880,7 @@ async def test_diff_relationship_one_conflict(db: InfrahubDatabase, default_bran async def test_diff_relationship_many(db: InfrahubDatabase, default_branch: Branch, base_dataset_04): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) diff = await BranchDiffer.init(branch=branch1, db=db) rels = await diff.get_relationships() @@ -1030,11 +1030,11 @@ async def test_diff_schema_changes( diff = BranchDiffer(db=db, branch=branch2) summary = await diff.get_schema_summary() - assert list(summary.keys()) == ["branch2", "main"] assert set([element.kind for elements in summary.values() for element in elements]) == { "SchemaNode", "SchemaAttribute", + "SchemaRelationship", } diff --git a/backend/tests/unit/core/test_enums.py b/backend/tests/unit/core/test_enums.py new file mode 100644 index 0000000000..e41b19f3b8 --- /dev/null +++ b/backend/tests/unit/core/test_enums.py @@ -0,0 +1,12 @@ +import enum + +from infrahub.core.enums import generate_python_enum + + +def test_generate_python_enum(): + enum_class = generate_python_enum(name="Color", options=["blue", "red"]) + assert isinstance(enum_class, enum.EnumType) + + enum_blue = enum_class("blue") + assert isinstance(enum_blue, enum.Enum) + assert {enum.name for enum in enum_class} == {"RED", "BLUE"} diff --git a/backend/tests/unit/core/test_manager_node.py b/backend/tests/unit/core/test_manager_node.py index f34978bbcf..570ae22940 100644 --- a/backend/tests/unit/core/test_manager_node.py +++ b/backend/tests/unit/core/test_manager_node.py @@ -1,12 +1,12 @@ import pytest from infrahub_sdk import UUIDT -from infrahub.core import get_branch, registry from infrahub.core.branch import Branch from infrahub.core.initialization import create_branch from infrahub.core.manager import NodeManager, identify_node_class from infrahub.core.node import Node from infrahub.core.query.node import NodeToProcess +from infrahub.core.registry import registry from infrahub.core.schema import NodeSchema from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase @@ -220,6 +220,47 @@ async def test_get_many_prefetch(db: InfrahubDatabase, default_branch: Branch, p assert tags[1]._peer +async def test_get_many_with_profile(db: InfrahubDatabase, default_branch: Branch, criticality_low, criticality_medium): + profile_schema = registry.schema.get("ProfileTestCriticality", branch=default_branch) + crit_profile_1 = await Node.init(db=db, schema=profile_schema) + await crit_profile_1.new(db=db, profile_name="crit_profile_1", color="green", profile_priority=1001) + await crit_profile_1.save(db=db) + crit_profile_2 = await Node.init(db=db, schema=profile_schema) + await crit_profile_2.new(db=db, profile_name="crit_profile_2", color="blue", profile_priority=1002) + await crit_profile_2.save(db=db) + crit_low = await NodeManager.get_one(db=db, id=criticality_low.id, branch=default_branch) + await crit_low.profiles.update(db=db, data=[crit_profile_1, crit_profile_2]) + await crit_low.save(db=db) + + node_map = await NodeManager.get_many(db=db, ids=[criticality_low.id, criticality_medium.id]) + assert len(node_map) == 2 + assert node_map[criticality_low.id].color.value == "green" + source = await node_map[criticality_low.id].color.get_source(db=db) + assert source.id == crit_profile_1.id + + +async def test_get_many_with_multiple_profiles_same_priority( + db: InfrahubDatabase, default_branch: Branch, criticality_low, criticality_medium +): + profile_schema = registry.schema.get("ProfileTestCriticality", branch=default_branch) + crit_profiles = [] + for i in range(1, 10): + crit_profile = await Node.init(db=db, schema=profile_schema) + await crit_profile.new(db=db, profile_name=f"crit_profile_{i}", color=f"green{i}", profile_priority=1000) + await crit_profile.save(db=db) + crit_profiles.append(crit_profile) + crit_low = await NodeManager.get_one(db=db, id=criticality_low.id, branch=default_branch) + await crit_low.profiles.update(db=db, data=crit_profiles) + await crit_low.save(db=db) + + lowest_uuid_profile = sorted(crit_profiles, key=lambda p: p.id)[0] + node_map = await NodeManager.get_many(db=db, ids=[criticality_low.id, criticality_medium.id]) + assert len(node_map) == 2 + assert node_map[criticality_low.id].color.value == lowest_uuid_profile.color.value + source = await node_map[criticality_low.id].color.get_source(db=db) + assert source.id == lowest_uuid_profile.id + + async def test_query_no_filter( db: InfrahubDatabase, default_branch: Branch, @@ -358,6 +399,7 @@ async def test_identify_node_class(db: InfrahubDatabase, car_schema, default_bra schema=car_schema, node_id=33, node_uuid=str(UUIDT()), + profile_uuids=[], updated_at=Timestamp().to_string(), branch=default_branch, labels=["Node", "TestCar"], @@ -427,7 +469,7 @@ async def test_get_one_local_attribute_with_branch(db: InfrahubDatabase, default async def test_get_one_global(db: InfrahubDatabase, default_branch: Branch, base_dataset_12): - branch1 = await get_branch(db=db, branch="branch1") + branch1 = await registry.get_branch(db=db, branch="branch1") obj1 = await NodeManager.get_one(db=db, id="p1", branch=branch1) @@ -447,7 +489,7 @@ async def test_get_one_global(db: InfrahubDatabase, default_branch: Branch, base async def test_get_one_global_isolated(db: InfrahubDatabase, default_branch: Branch, base_dataset_12): - branch1 = await get_branch(db=db, branch="branch1") + branch1 = await registry.get_branch(db=db, branch="branch1") branch1.is_isolated = True obj1 = await NodeManager.get_one(db=db, id="p1", branch=branch1) diff --git a/backend/tests/unit/core/test_node.py b/backend/tests/unit/core/test_node.py index b2b8a13e6d..7535842a16 100644 --- a/backend/tests/unit/core/test_node.py +++ b/backend/tests/unit/core/test_node.py @@ -64,9 +64,12 @@ async def test_node_init_schema_name(db: InfrahubDatabase, default_branch: Branc await obj.new(db=db, name="low", level=4) assert obj.name.value == "low" + assert obj.name.is_default is False assert obj.level.value == 4 + assert obj.level.is_default is False assert obj.description.value is None assert obj.color.value == "#444444" + assert obj.color.is_default is True async def test_node_init_id(db: InfrahubDatabase, default_branch: Branch, criticality_schema): @@ -605,11 +608,18 @@ async def test_node_update_local_attrs(db: InfrahubDatabase, default_branch: Bra await obj1.save(db=db) obj2 = await NodeManager.get_one(db=db, id=obj1.id) + + assert obj2.name.is_default is False + assert obj2.level.is_default is False + assert obj2.is_true.is_default is True + assert obj2.is_false.is_default is True + assert obj2.mylist.is_default is True + obj2.name.value = "high" obj2.level.value = 1 obj2.is_true.value = False obj2.is_false.value = True - obj2.mylist.value = ["one", "two"] + obj2.mylist.value = ["one", "two", "tree"] await obj2.save(db=db) nbr_rels = await count_relationships(db=db) @@ -619,7 +629,13 @@ async def test_node_update_local_attrs(db: InfrahubDatabase, default_branch: Bra assert obj3.level.value == 1 assert obj3.is_true.value is False assert obj3.is_false.value is True - assert obj3.mylist.value == ["one", "two"] + assert obj3.mylist.value == ["one", "two", "tree"] + + assert obj3.name.is_default is False + assert obj3.level.is_default is False + assert obj3.is_true.is_default is False + assert obj3.is_false.is_default is False + assert obj3.mylist.is_default is False # Validate that saving the object a second time doesn't do anything await obj2.save(db=db) diff --git a/backend/tests/unit/core/test_node_get_list_query.py b/backend/tests/unit/core/test_node_get_list_query.py new file mode 100644 index 0000000000..8421f621f1 --- /dev/null +++ b/backend/tests/unit/core/test_node_get_list_query.py @@ -0,0 +1,617 @@ +from random import randint + +from infrahub.core.branch import Branch +from infrahub.core.constants import ( + BranchSupportType, + InfrahubKind, + RelationshipCardinality, + RelationshipDirection, +) +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.query.node import NodeGetListQuery +from infrahub.core.registry import registry +from infrahub.core.schema.relationship_schema import RelationshipSchema +from infrahub.database import InfrahubDatabase + + +async def test_query_NodeGetListQuery( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +): + person_schema = registry.schema.get(name="TestPerson", branch=branch) + ids = [person_john_main.id, person_jim_main.id, person_albert_main.id, person_alfred_main.id] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=person_schema) + await query.execute(db=db) + assert sorted(query.get_node_ids()) == sorted(ids) + + +async def test_query_NodeGetListQuery_filter_id( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +): + person_schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init(db=db, branch=branch, schema=person_schema, filters={"id": person_john_main.id}) + await query.execute(db=db) + assert len(query.get_node_ids()) == 1 + + +async def test_query_NodeGetListQuery_filter_ids( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +): + person_schema = registry.schema.get(name="TestPerson", branch=branch) + person_schema.order_by = ["height__value"] + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=person_schema, + filters={"ids": [person_jim_main.id, person_john_main.id, person_albert_main.id]}, + ) + await query.execute(db=db) + assert query.get_node_ids() == [person_albert_main.id, person_jim_main.id, person_john_main.id] + + +async def test_query_NodeGetListQuery_filter_height( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +): + schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"height__value": 160}) + await query.execute(db=db) + assert len(query.get_node_ids()) == 2 + + +async def test_query_NodeGetListQuery_filter_owner( + db: InfrahubDatabase, default_branch: Branch, person_john_main: Node, first_account: Node, branch: Branch +): + person = await Node.init(db=db, schema="TestPerson", branch=branch) + await person.new(db=db, name={"value": "Diane", "owner": first_account.id}, height=165) + await person.save(db=db) + + schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=schema, filters={"any__owner__id": first_account.id} + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 1 + + schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=schema, filters={"name__owner__id": first_account.id} + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 1 + + schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=schema, filters={"height__owner__id": first_account.id} + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 0 + + +async def test_query_NodeGetListQuery_filter_boolean( + db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +): + schema = registry.schema.get(name="TestCar", branch=branch) + query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"is_electric__value": False}) + await query.execute(db=db) + assert len(query.get_node_ids()) == 3 + + +async def test_query_NodeGetListQuery_deleted_node( + db: InfrahubDatabase, car_accord_main, car_camry_main: Node, car_volt_main, car_yaris_main, branch: Branch +): + node_to_delete = await NodeManager.get_one(id=car_camry_main.id, db=db, branch=branch) + await node_to_delete.delete(db=db) + + schema = registry.schema.get(name="TestCar", branch=branch) + schema.order_by = ["owner__name__value"] + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"is_electric__value": False}) + await query.execute(db=db) + assert len(query.get_node_ids()) == 2 + + +async def test_query_NodeGetListQuery_filter_relationship( + db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +): + schema = registry.schema.get(name="TestCar", branch=branch) + query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"owner__name__value": "John"}) + await query.execute(db=db) + assert len(query.get_node_ids()) == 2 + + +async def test_query_NodeGetListQuery_filter_relationship_ids( + db: InfrahubDatabase, + person_john_main, + car_accord_main, + car_camry_main, + car_volt_main, + car_yaris_main, + branch: Branch, +): + schema = registry.schema.get(name="TestCar", branch=branch) + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=schema, filters={"owner__ids": [person_john_main.id]} + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 2 + + +async def test_query_NodeGetListQuery_filter_relationship_ids_with_update( + db: InfrahubDatabase, + person_john_main, + person_jane_main, + car_accord_main, + car_camry_main, + car_volt_main, + car_yaris_main, + branch: Branch, +): + schema = registry.schema.get(name="TestCar", branch=branch) + car_accord = await NodeManager.get_one(db=db, branch=branch, id=car_accord_main.id) + await car_accord.owner.update(db=db, data=person_jane_main) + await car_accord.save(db=db) + + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=schema, filters={"owner__ids": [person_john_main.id]} + ) + await query.execute(db=db) + node_ids = query.get_node_ids() + assert node_ids == [car_volt_main.id] + + +async def test_query_NodeGetListQuery_filter_and_sort( + db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +): + schema = registry.schema.get(name="TestCar", branch=branch) + schema.order_by = ["owner__name__value", "is_electric__value"] + + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + filters={"owner__name__value": "John", "is_electric__value": False}, + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 1 + + +async def test_query_NodeGetListQuery_filter_and_sort_with_revision( + db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +): + node = await NodeManager.get_one(id=car_volt_main.id, db=db, branch=branch) + node.is_electric.value = False + await node.save(db=db) + + schema = registry.schema.get(name="TestCar", branch=branch) + schema.order_by = ["owner__name__value", "is_electric__value"] + + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + filters={"owner__name__value": "John", "is_electric__value": False}, + ) + await query.execute(db=db) + assert len(query.get_node_ids()) == 2 + + +async def test_query_NodeGetListQuery_with_generics(db: InfrahubDatabase, group_group1_main, branch: Branch): + schema = registry.schema.get(name=InfrahubKind.GENERICGROUP, branch=branch) + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + ) + await query.execute(db=db) + assert query.get_node_ids() == [group_group1_main.id] + + +async def test_query_NodeGetListQuery_order_by( + db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +): + schema = registry.schema.get(name="TestCar", branch=branch) + schema.order_by = ["owner__name__value", "name__value"] + + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + ) + await query.execute(db=db) + assert query.get_node_ids() == [car_camry_main.id, car_yaris_main.id, car_accord_main.id, car_volt_main.id] + + +async def test_query_NodeGetListQuery_order_by_optional_relationship_nulls( + db: InfrahubDatabase, branch: Branch, car_accord_main, car_camry_main, car_volt_main, car_yaris_main +): + schema = registry.schema.get(name="TestCar", branch=branch, duplicate=False) + schema.relationships.append( + RelationshipSchema( + name="other_car", + peer="TestCar", + cardinality=RelationshipCardinality.ONE, + identifier="testcar__other_car", + branch=BranchSupportType.AWARE, + direction=RelationshipDirection.OUTBOUND, + ) + ) + schema.order_by = ["other_car__name__value"] + + accord = await NodeManager.get_one(db=db, branch=branch, id=car_accord_main.id) + await accord.other_car.update(db=db, data=car_camry_main) + await accord.save(db=db) + volt = await NodeManager.get_one(db=db, branch=branch, id=car_volt_main.id) + await volt.other_car.update(db=db, data=car_yaris_main) + await volt.save(db=db) + + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + ) + await query.execute(db=db) + + retrieved_node_ids = query.get_node_ids() + assert len(retrieved_node_ids) == 4 + assert retrieved_node_ids[0] == car_accord_main.id + assert retrieved_node_ids[1] == car_volt_main.id + # null ones can be any order + assert set(retrieved_node_ids[2:]) == {car_camry_main.id, car_yaris_main.id} + + +async def test_query_NodeGetListQuery_order_by_relationship_value_with_update( + db: InfrahubDatabase, + person_john_main, + person_jane_main, + car_accord_main, + car_camry_main, + car_volt_main, + car_yaris_main, + branch: Branch, +): + schema = registry.schema.get(name="TestCar", branch=branch, duplicate=False) + schema.relationships.append( + RelationshipSchema( + name="other_car", + peer="TestCar", + cardinality=RelationshipCardinality.ONE, + identifier="testcar__other_car", + branch=BranchSupportType.AWARE, + ) + ) + schema.order_by = ["other_car__name__value"] + + accord = await NodeManager.get_one(db=db, branch=branch, id=car_accord_main.id) + await accord.other_car.update(db=db, data=car_camry_main) + await accord.save(db=db) + # update related value to ZZZ + camry = await NodeManager.get_one(db=db, branch=branch, id=car_camry_main.id) + camry.name.value = "zzz" + await camry.save(db=db) + volt = await NodeManager.get_one(db=db, branch=branch, id=car_volt_main.id) + await volt.other_car.update(db=db, data=car_yaris_main) + await volt.save(db=db) + # update related value to AAA + yaris = await NodeManager.get_one(db=db, branch=branch, id=car_yaris_main.id) + yaris.name.value = "aaa" + await yaris.save(db=db) + # delete relationship, so related value is effectively null + volt = await NodeManager.get_one(db=db, branch=branch, id=car_volt_main.id) + await volt.other_car.update(db=db, data=None) + await volt.save(db=db) + + query = await NodeGetListQuery.init( + db=db, + branch=branch, + schema=schema, + ) + await query.execute(db=db) + + retrieved_node_ids = query.get_node_ids() + assert len(retrieved_node_ids) == 4 + assert retrieved_node_ids[0] == car_camry_main.id # accord + assert retrieved_node_ids[1] == car_accord_main.id # zzz + # null ones can be any order + assert set(retrieved_node_ids[2:]) == {car_yaris_main.id, car_volt_main.id} + + +async def test_query_NodeGetListQuery_filter_with_profiles( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +): + profile_schema = registry.schema.get("ProfileTestPerson", branch=branch, duplicate=False) + person_profile = await Node.init(db=db, schema=profile_schema) + await person_profile.new(db=db, profile_name="person_profile_1", height=172, profile_priority=1001) + await person_profile.save(db=db) + person_profile_2 = await Node.init(db=db, schema=profile_schema) + await person_profile_2.new(db=db, profile_name="person_profile_2", height=177, profile_priority=1002) + await person_profile_2.save(db=db) + + person_schema = registry.schema.get("TestPerson", branch=branch, duplicate=False) + person_schema.order_by = ["height__value", "name__value"] + person = await NodeManager.get_one(db=db, id=person_john_main.id, branch=branch) + person.height.value = None + person.height.is_default = True + await person.profiles.update(data=[person_profile, person_profile_2], db=db) + await person.save(db=db) + person = await NodeManager.get_one(db=db, id=person_jim_main.id, branch=branch) + person.height.value = None + person.height.is_default = True + await person.profiles.update(data=[person_profile], db=db) + await person.save(db=db) + person = await NodeManager.get_one(db=db, id=person_albert_main.id, branch=branch) + await person.profiles.update(data=[person_profile_2], db=db) + await person.save(db=db) + person = await NodeManager.get_one(db=db, id=person_alfred_main.id, branch=branch) + person.height.value = 172 + await person.save(db=db) + + person_schema = registry.schema.get(name="TestPerson", branch=branch) + query = await NodeGetListQuery.init(db=db, branch=branch, schema=person_schema, filters={"height__value": 172}) + + await query.execute(db=db) + + assert query.get_node_ids() == [person_alfred_main.id, person_jim_main.id, person_john_main.id] + + +async def test_query_NodeGetListQuery_order_with_profiles( + db: InfrahubDatabase, car_camry_main, car_accord_main, car_volt_main, branch: Branch +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + car_profile_black = await Node.init(db=db, schema=profile_schema) + await car_profile_black.new(db=db, profile_name="car_profile_black", color="#000000", profile_priority=1001) + await car_profile_black.save(db=db) + car_profile_white = await Node.init(db=db, schema=profile_schema) + await car_profile_white.new(db=db, profile_name="car_profile_white", color="#ffffff", profile_priority=1002) + await car_profile_white.save(db=db) + + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car_schema.order_by = ["color__value", "name__value"] + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=branch) + await car.profiles.update(data=[car_profile_white], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + await car.profiles.update(data=[car_profile_black], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_volt_main.id, branch=branch) + await car.profiles.update(data=[car_profile_black, car_profile_white], db=db) + await car.save(db=db) + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema) + + await query.execute(db=db) + + assert query.get_node_ids() == [car_accord_main.id, car_volt_main.id, car_camry_main.id] + + +async def test_query_NodeGetListQuery_with_profiles_deleted( + db: InfrahubDatabase, + car_camry_main, + car_accord_main, + car_volt_main, + branch: Branch, + default_branch: Branch, +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + car_profile_black = await Node.init(db=db, schema=profile_schema) + await car_profile_black.new(db=db, profile_name="car_profile_black", color="#000000", profile_priority=1001) + await car_profile_black.save(db=db) + car_profile_white = await Node.init(db=db, schema=profile_schema) + await car_profile_white.new(db=db, profile_name="car_profile_white", color="#ffffff", profile_priority=1002) + await car_profile_white.save(db=db) + await branch.rebase(db=db) + + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_white], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_volt_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black, car_profile_white], db=db) + await car.save(db=db) + + car_profile_white_branch = await NodeManager.get_one(db=db, id=car_profile_white.id, branch=branch) + await car_profile_white_branch.delete(db=db) + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#444444"}) + await query.execute(db=db) + assert query.get_node_ids() == [car_camry_main.id] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#000000"}) + await query.execute(db=db) + assert set(query.get_node_ids()) == {car_accord_main.id, car_volt_main.id} + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#ffffff"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + + +async def test_query_NodeGetListQuery_updated_profile_priorities_on_branch( + db: InfrahubDatabase, + car_camry_main, + car_accord_main, + car_volt_main, + branch: Branch, + default_branch: Branch, +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + car_profile_black = await Node.init(db=db, schema=profile_schema) + await car_profile_black.new(db=db, profile_name="car_profile_black", color="#000000", profile_priority=1001) + await car_profile_black.save(db=db) + car_profile_white = await Node.init(db=db, schema=profile_schema) + await car_profile_white.new(db=db, profile_name="car_profile_white", color="#ffffff", profile_priority=1002) + await car_profile_white.save(db=db) + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_white], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_volt_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black, car_profile_white], db=db) + await car.save(db=db) + await branch.rebase(db=db) + + car_profile_black_branch = await NodeManager.get_one(db=db, branch=branch, id=car_profile_black.id) + car_profile_black_branch.profile_priority.value = 3000 + await car_profile_black_branch.save(db=db) + car_profile_white_branch = await NodeManager.get_one(db=db, branch=branch, id=car_profile_white.id) + car_profile_white_branch.profile_priority.value = 2000 + await car_profile_white_branch.save(db=db) + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#444444"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#000000"}) + await query.execute(db=db) + assert query.get_node_ids() == [car_accord_main.id] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#ffffff"}) + await query.execute(db=db) + assert set(query.get_node_ids()) == {car_camry_main.id, car_volt_main.id} + + +async def test_query_NodeGetListQuery_updated_profile_attributes_on_branch( + db: InfrahubDatabase, + car_camry_main, + car_accord_main, + car_volt_main, + branch: Branch, + default_branch: Branch, +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + car_profile_black = await Node.init(db=db, schema=profile_schema) + await car_profile_black.new(db=db, profile_name="car_profile_black", color="#000000", profile_priority=1001) + await car_profile_black.save(db=db) + car_profile_white = await Node.init(db=db, schema=profile_schema) + await car_profile_white.new(db=db, profile_name="car_profile_white", color="#ffffff", profile_priority=1002) + await car_profile_white.save(db=db) + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_white], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_volt_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black, car_profile_white], db=db) + await car.save(db=db) + await branch.rebase(db=db) + + car_profile_black_branch = await NodeManager.get_one(db=db, branch=branch, id=car_profile_black.id) + car_profile_black_branch.color.value = "#000001" + await car_profile_black_branch.save(db=db) + car_profile_white_branch = await NodeManager.get_one(db=db, branch=branch, id=car_profile_white.id) + car_profile_white_branch.color.value = "#fffffe" + await car_profile_white_branch.save(db=db) + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#444444"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#000000"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#ffffff"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#000001"}) + await query.execute(db=db) + assert set(query.get_node_ids()) == {car_accord_main.id, car_volt_main.id} + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#fffffe"}) + await query.execute(db=db) + assert query.get_node_ids() == [car_camry_main.id] + + +async def test_query_NodeGetListQuery_updated_profile_attributes_nulled_on_branch( + db: InfrahubDatabase, + car_camry_main, + car_accord_main, + car_volt_main, + branch: Branch, + default_branch: Branch, +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + car_profile_black = await Node.init(db=db, schema=profile_schema) + await car_profile_black.new(db=db, profile_name="car_profile_black", color="#000000", profile_priority=1001) + await car_profile_black.save(db=db) + car_profile_white = await Node.init(db=db, schema=profile_schema) + await car_profile_white.new(db=db, profile_name="car_profile_white", color="#ffffff", profile_priority=1002) + await car_profile_white.save(db=db) + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_white], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black], db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_volt_main.id, branch=default_branch) + await car.profiles.update(data=[car_profile_black, car_profile_white], db=db) + await car.save(db=db) + await branch.rebase(db=db) + + car_profile_black_branch = await NodeManager.get_one(db=db, branch=branch, id=car_profile_black.id) + car_profile_black_branch.color.value = None + await car_profile_black_branch.save(db=db) + + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#444444"}) + await query.execute(db=db) + assert query.get_node_ids() == [car_accord_main.id] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#000000"}) + await query.execute(db=db) + assert query.get_node_ids() == [] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema, filters={"color__value": "#ffffff"}) + await query.execute(db=db) + assert set(query.get_node_ids()) == {car_camry_main.id, car_volt_main.id} + + +async def test_query_NodeGetListQuery_multiple_profiles_same_priority_filter_and_order( + db: InfrahubDatabase, + car_camry_main, + car_accord_main, + branch: Branch, + default_branch: Branch, +): + profile_schema = registry.schema.get("ProfileTestCar", branch=branch, duplicate=False) + profiles_group_1 = [] + expected_profile_1 = None + for i in range(10): + car_profile = await Node.init(db=db, schema=profile_schema) + await car_profile.new( + db=db, profile_name=f"car_profile_{i}", color=f"#{randint(100000, 499999)}", profile_priority=1000 + ) + await car_profile.save(db=db) + if not expected_profile_1 or car_profile.id < expected_profile_1.id: + expected_profile_1 = car_profile + profiles_group_1.append(car_profile) + profiles_group_2 = [] + expected_profile_2 = None + for i in range(10, 20): + car_profile = await Node.init(db=db, schema=profile_schema) + await car_profile.new( + db=db, profile_name=f"car_profile_{i}", color=f"#{randint(500000, 999999)}", profile_priority=1000 + ) + await car_profile.save(db=db) + if not expected_profile_2 or car_profile.id < expected_profile_2.id: + expected_profile_2 = car_profile + profiles_group_2.append(car_profile) + car_schema = registry.schema.get("TestCar", branch=branch, duplicate=False) + car_schema.order_by = ["color__value"] + car = await NodeManager.get_one(db=db, id=car_camry_main.id, branch=default_branch) + await car.profiles.update(data=profiles_group_1, db=db) + await car.save(db=db) + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=default_branch) + await car.profiles.update(data=profiles_group_2, db=db) + await car.save(db=db) + + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=car_schema, filters={"color__value": expected_profile_1.color.value} + ) + await query.execute(db=db) + assert query.get_node_ids() == [car_camry_main.id] + query = await NodeGetListQuery.init( + db=db, branch=branch, schema=car_schema, filters={"color__value": expected_profile_2.color.value} + ) + await query.execute(db=db) + assert query.get_node_ids() == [car_accord_main.id] + query = await NodeGetListQuery.init(db=db, branch=branch, schema=car_schema) + await query.execute(db=db) + assert query.get_node_ids() == [car_camry_main.id, car_accord_main.id] diff --git a/backend/tests/unit/core/test_node_manager_delete.py b/backend/tests/unit/core/test_node_manager_delete.py new file mode 100644 index 0000000000..16bf753715 --- /dev/null +++ b/backend/tests/unit/core/test_node_manager_delete.py @@ -0,0 +1,164 @@ +from typing import AsyncGenerator + +import pytest + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import BranchSupportType, RelationshipDeleteBehavior +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.schema.relationship_schema import RelationshipSchema +from infrahub.database import InfrahubDatabase +from infrahub.exceptions import ValidationError + + +async def test_delete_succeeds( + db: AsyncGenerator[InfrahubDatabase, None], + default_branch: Branch, + car_camry_main: Node, + car_accord_main: Node, + person_albert_main: Node, +): + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[person_albert_main]) + + assert {d.id for d in deleted} == {person_albert_main.id} + node = await NodeManager.get_one(db=db, id=person_albert_main.id) + assert node is None + + +async def test_delete_prevented( + db, default_branch, car_camry_main, car_accord_main, person_albert_main, person_jane_main +): + with pytest.raises(ValidationError) as exc: + await NodeManager.delete(db=db, branch=default_branch, nodes=[person_jane_main]) + + assert f"Cannot delete TestPerson '{person_jane_main.id}'" in str(exc.value) + assert f"It is linked to mandatory relationship owner on node TestCar '{car_camry_main.id}'" in str(exc.value) + + retrieved_jane = await NodeManager.get_one(db=db, id=person_jane_main.id) + assert retrieved_jane.id == person_jane_main.id + + +async def test_one_sided_relationship( + db, + default_branch, + car_camry_main, + car_accord_main, + person_albert_main, + person_jane_main, + car_person_schema_unregistered, +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + person_schema = schema_branch.get(name="TestPerson", duplicate=False) + person_schema.relationships.append( + RelationshipSchema( + name="other_car", + peer="TestCar", + identifier="person__other_car", + optional=True, + cardinality="one", + branch=BranchSupportType.AWARE, + ) + ) + jane = await NodeManager.get_one(db=db, id=person_jane_main.id, branch=default_branch) + await jane.other_car.update(db=db, data=car_accord_main) + await jane.save(db=db) + + with pytest.raises(ValidationError) as exc: + await NodeManager.delete(db=db, branch=default_branch, nodes=[jane]) + + assert f"Cannot delete TestPerson '{person_jane_main.id}'" in str(exc.value) + assert f"It is linked to mandatory relationship owner on node TestCar '{car_camry_main.id}'" in str(exc.value) + + retrieved_jane = await NodeManager.get_one(db=db, id=person_jane_main.id) + assert retrieved_jane.id == person_jane_main.id + + +async def test_source_node_already_deleted( + db, default_branch, car_camry_main, car_accord_main, person_albert_main, person_jane_main +): + car = await NodeManager.get_one(db=db, id=car_camry_main.id) + await car.delete(db=db) + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[person_jane_main]) + + assert {d.id for d in deleted} == {person_jane_main.id} + node = await NodeManager.get_one(db=db, id=person_jane_main.id) + assert node is None + + +async def test_cascade_delete_not_prevented( + db: AsyncGenerator[InfrahubDatabase, None], + default_branch: Branch, + car_camry_main: Node, + car_accord_main: Node, + person_albert_main: Node, + person_jane_main: Node, +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + person_schema = schema_branch.get(name="TestPerson", duplicate=False) + person_schema.get_relationship("cars").on_delete = RelationshipDeleteBehavior.CASCADE + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[person_jane_main]) + + assert {d.id for d in deleted} == {person_jane_main.id, car_camry_main.id} + node_map = await NodeManager.get_many(db=db, ids=[person_jane_main.id, car_camry_main.id]) + assert node_map == {} + + +async def test_delete_with_cascade_on_many_relationship( + db, default_branch, car_camry_main, car_accord_main, car_prius_main, person_john_main, person_jane_main +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + person_schema = schema_branch.get(name="TestPerson", duplicate=False) + person_schema.get_relationship("cars").on_delete = RelationshipDeleteBehavior.CASCADE + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[person_john_main]) + + assert {d.id for d in deleted} == {person_john_main.id, car_accord_main.id, car_prius_main.id} + node_map = await NodeManager.get_many(db=db, ids=[person_john_main.id, car_accord_main.id, car_prius_main.id]) + assert node_map == {} + + +async def test_delete_with_cascade_on_one_relationship( + db, default_branch, car_camry_main, car_accord_main, person_john_main +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + car_schema = schema_branch.get(name="TestCar", duplicate=False) + car_schema.get_relationship("owner").on_delete = RelationshipDeleteBehavior.CASCADE + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[car_accord_main]) + + assert {d.id for d in deleted} == {person_john_main.id, car_accord_main.id} + node_map = await NodeManager.get_many(db=db, ids=[person_john_main.id, car_accord_main.id]) + assert node_map == {} + + +async def test_delete_with_cascade_multiple_input_nodes( + db, default_branch, car_camry_main, car_accord_main, car_prius_main, person_john_main, person_jane_main +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + car_schema = schema_branch.get(name="TestCar", duplicate=False) + car_schema.get_relationship("owner").on_delete = RelationshipDeleteBehavior.CASCADE + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[car_accord_main, car_prius_main]) + + assert {d.id for d in deleted} == {person_john_main.id, car_accord_main.id, car_prius_main.id} + node_map = await NodeManager.get_many(db=db, ids=[person_john_main.id, car_accord_main.id, car_prius_main.id]) + assert node_map == {} + + +async def test_delete_with_cascade_both_directions_succeeds( + db, default_branch, car_camry_main, car_accord_main, car_prius_main, person_john_main, person_jane_main +): + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + car_schema = schema_branch.get(name="TestCar", duplicate=False) + car_schema.get_relationship("owner").on_delete = RelationshipDeleteBehavior.CASCADE + person_schema = schema_branch.get(name="TestPerson", duplicate=False) + person_schema.get_relationship("cars").on_delete = RelationshipDeleteBehavior.CASCADE + + deleted = await NodeManager.delete(db=db, branch=default_branch, nodes=[car_accord_main]) + + assert {d.id for d in deleted} == {person_john_main.id, car_accord_main.id, car_prius_main.id} + node_map = await NodeManager.get_many(db=db, ids=[person_john_main.id, car_accord_main.id, car_prius_main.id]) + assert node_map == {} diff --git a/backend/tests/unit/core/test_node_query.py b/backend/tests/unit/core/test_node_query.py index 9b425a3ac7..a6f8dea9fd 100644 --- a/backend/tests/unit/core/test_node_query.py +++ b/backend/tests/unit/core/test_node_query.py @@ -1,9 +1,10 @@ -import time from typing import Dict -from infrahub.core import get_branch, registry from infrahub.core.branch import Branch -from infrahub.core.constants import InfrahubKind, RelationshipHierarchyDirection +from infrahub.core.constants import ( + InfrahubKind, + RelationshipHierarchyDirection, +) from infrahub.core.manager import NodeManager from infrahub.core.migrations.schema.node_attribute_remove import ( NodeAttributeRemoveMigration, @@ -19,21 +20,19 @@ NodeCreateAllQuery, NodeDeleteQuery, NodeGetHierarchyQuery, - NodeGetListQuery, NodeListGetAttributeQuery, NodeListGetInfoQuery, NodeListGetRelationshipsQuery, ) +from infrahub.core.registry import registry +from infrahub.core.utils import count_nodes, get_nodes from infrahub.database import InfrahubDatabase async def test_query_NodeCreateAllQuery(db: InfrahubDatabase, default_branch: Branch, car_person_schema, first_account): obj = await Node.init(db=db, schema="TestPerson", branch=default_branch) await obj.new(db=db, name="John", height=180) - - original_start_time = time.time_ns() await obj.save(db=db) - time.time_ns() - original_start_time car = await Node.init(db=db, schema="TestCar", branch=default_branch) await car.new( @@ -45,189 +44,125 @@ async def test_query_NodeCreateAllQuery(db: InfrahubDatabase, default_branch: Br owner={"id": obj.id, "_relation__source": first_account}, ) - new_start_time = time.time_ns() query = await NodeCreateAllQuery.init(db=db, node=car) await query.execute(db=db) - time.time_ns() - new_start_time assert query.get_self_ids() -async def test_query_NodeGetListQuery( - db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch +async def test_query_NodeCreateAllQuery_iphost( + db: InfrahubDatabase, default_branch: Branch, all_attribute_types_schema ): - person_schema = registry.schema.get(name="TestPerson", branch=branch) - ids = [person_john_main.id, person_jim_main.id, person_albert_main.id, person_alfred_main.id] - query = await NodeGetListQuery.init(db=db, branch=branch, schema=person_schema) - await query.execute(db=db) - assert sorted(query.get_node_ids()) == sorted(ids) + obj = await Node.init(db=db, schema="TestAllAttributeTypes", branch=default_branch) + await obj.new(db=db, ipaddress="10.2.5.2/24") - -async def test_query_NodeGetListQuery_filter_id( - db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch -): - person_schema = registry.schema.get(name="TestPerson", branch=branch) - query = await NodeGetListQuery.init(db=db, branch=branch, schema=person_schema, filters={"id": person_john_main.id}) + query = await NodeCreateAllQuery.init(db=db, node=obj) await query.execute(db=db) - assert len(query.get_node_ids()) == 1 - -async def test_query_NodeGetListQuery_filter_ids( - db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch -): - person_schema = registry.schema.get(name="TestPerson", branch=branch) - person_schema.order_by = ["height__value"] - query = await NodeGetListQuery.init( - db=db, - branch=branch, - schema=person_schema, - filters={"ids": [person_jim_main.id, person_john_main.id, person_albert_main.id]}, - ) - await query.execute(db=db) - assert query.get_node_ids() == [person_albert_main.id, person_jim_main.id, person_john_main.id] + nodes = await get_nodes(db=db, label="AttributeIPHost") + assert len(nodes) == 1 + attribute = nodes[0] + assert attribute["value"] == "10.2.5.2/24" + assert attribute["version"] == 4 + assert attribute["binary_address"] == "00001010000000100000010100000010" + assert attribute["prefixlen"] == 24 -async def test_query_NodeGetListQuery_filter_height( - db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch -): - schema = registry.schema.get(name="TestPerson", branch=branch) - query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"height__value": 160}) - await query.execute(db=db) - assert len(query.get_node_ids()) == 2 + assert await count_nodes(db=db, label="AttributeIPNetwork") == 0 -async def test_query_NodeGetListQuery_filter_owner( - db: InfrahubDatabase, default_branch: Branch, person_john_main: Node, first_account: Node, branch: Branch +async def test_query_NodeCreateAllQuery_ipnetwork( + db: InfrahubDatabase, default_branch: Branch, all_attribute_types_schema ): - person = await Node.init(db=db, schema="TestPerson", branch=branch) - await person.new(db=db, name={"value": "Diane", "owner": first_account.id}, height=165) - await person.save(db=db) - - schema = registry.schema.get(name="TestPerson", branch=branch) - query = await NodeGetListQuery.init( - db=db, branch=branch, schema=schema, filters={"any__owner__id": first_account.id} - ) - await query.execute(db=db) - assert len(query.get_node_ids()) == 1 - - schema = registry.schema.get(name="TestPerson", branch=branch) - query = await NodeGetListQuery.init( - db=db, branch=branch, schema=schema, filters={"name__owner__id": first_account.id} - ) - await query.execute(db=db) - assert len(query.get_node_ids()) == 1 - - schema = registry.schema.get(name="TestPerson", branch=branch) - query = await NodeGetListQuery.init( - db=db, branch=branch, schema=schema, filters={"height__owner__id": first_account.id} - ) - await query.execute(db=db) - assert len(query.get_node_ids()) == 0 - - -async def test_query_NodeGetListQuery_filter_boolean( - db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch -): - schema = registry.schema.get(name="TestCar", branch=branch) - query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"is_electric__value": False}) - await query.execute(db=db) - assert len(query.get_node_ids()) == 3 - - -async def test_query_NodeGetListQuery_deleted_node( - db: InfrahubDatabase, car_accord_main, car_camry_main: Node, car_volt_main, car_yaris_main, branch: Branch -): - node_to_delete = await NodeManager.get_one(id=car_camry_main.id, db=db, branch=branch) - await node_to_delete.delete(db=db) - - schema = registry.schema.get(name="TestCar", branch=branch) - schema.order_by = ["owner__name__value"] + obj = await Node.init(db=db, schema="TestAllAttributeTypes", branch=default_branch) + await obj.new(db=db, prefix="10.2.5.0/24") - query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"is_electric__value": False}) + query = await NodeCreateAllQuery.init(db=db, node=obj) await query.execute(db=db) - assert len(query.get_node_ids()) == 2 + nodes = await get_nodes(db=db, label="AttributeIPNetwork") + assert len(nodes) == 1 + prefix = nodes[0] -async def test_query_NodeGetListQuery_filter_relationship( - db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch -): - schema = registry.schema.get(name="TestCar", branch=branch) - query = await NodeGetListQuery.init(db=db, branch=branch, schema=schema, filters={"owner__name__value": "John"}) - await query.execute(db=db) - assert len(query.get_node_ids()) == 2 + assert prefix["value"] == "10.2.5.0/24" + assert prefix["version"] == 4 + assert prefix["binary_address"] == "00001010000000100000010100000000" + assert prefix["prefixlen"] == 24 + # assert prefix["num_addresses"] == 256 - -async def test_query_NodeGetListQuery_filter_relationship_ids( - db: InfrahubDatabase, - person_john_main, - car_accord_main, - car_camry_main, - car_volt_main, - car_yaris_main, - branch: Branch, -): - schema = registry.schema.get(name="TestCar", branch=branch) - query = await NodeGetListQuery.init( - db=db, branch=branch, schema=schema, filters={"owner__ids": [person_john_main.id]} - ) - await query.execute(db=db) - assert len(query.get_node_ids()) == 2 + assert await count_nodes(db=db, label="AttributeIPHost") == 0 -async def test_query_NodeGetListQuery_filter_and_sort( - db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +async def test_query_NodeListGetInfoQuery( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch ): - schema = registry.schema.get(name="TestCar", branch=branch) - schema.order_by = ["owner__name__value", "is_electric__value"] - - query = await NodeGetListQuery.init( - db=db, - branch=branch, - schema=schema, - filters={"owner__name__value": "John", "is_electric__value": False}, - ) + ids = [person_john_main.id, person_jim_main.id, person_albert_main.id] + query = await NodeListGetInfoQuery.init(db=db, branch=branch, ids=ids) await query.execute(db=db) - assert len(query.get_node_ids()) == 1 + assert len(list(query.get_results_group_by(("n", "uuid")))) == 3 -async def test_query_NodeGetListQuery_filter_and_sort_with_revision( - db: InfrahubDatabase, car_accord_main, car_camry_main, car_volt_main, car_yaris_main, branch: Branch +async def test_query_NodeListGetInfoQuery_with_profiles( + db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch ): - node = await NodeManager.get_one(id=car_volt_main.id, db=db, branch=branch) - node.is_electric.value = False - await node.save(db=db) - - schema = registry.schema.get(name="TestCar", branch=branch) - schema.order_by = ["owner__name__value", "is_electric__value"] + profile_schema = registry.schema.get("ProfileTestPerson", branch=branch) + person_profile = await Node.init(db=db, schema=profile_schema) + await person_profile.new(db=db, profile_name="person_profile_1", height=172, profile_priority=1001) + await person_profile.save(db=db) + person_profile_2 = await Node.init(db=db, schema=profile_schema) + await person_profile_2.new(db=db, profile_name="person_profile_2", height=177, profile_priority=1002) + await person_profile_2.save(db=db) + person = await NodeManager.get_one(db=db, id=person_john_main.id, branch=branch) + await person.profiles.update(data=[person_profile, person_profile_2], db=db) + await person.save(db=db) - query = await NodeGetListQuery.init( - db=db, - branch=branch, - schema=schema, - filters={"owner__name__value": "John", "is_electric__value": False}, - ) + ids = [person_john_main.id, person_jim_main.id, person_albert_main.id] + query = await NodeListGetInfoQuery.init(db=db, branch=branch, ids=ids) await query.execute(db=db) - assert len(query.get_node_ids()) == 2 + async for node_to_process in query.get_nodes(duplicate=False): + if node_to_process.node_uuid != person_john_main.id: + assert node_to_process.profile_uuids == [] + else: + assert set(node_to_process.profile_uuids) == {person_profile.id, person_profile_2.id} -async def test_query_NodeGetListQuery_with_generics(db: InfrahubDatabase, group_group1_main, branch: Branch): - schema = registry.schema.get(name=InfrahubKind.GENERICGROUP, branch=branch) - query = await NodeGetListQuery.init( - db=db, - branch=branch, - schema=schema, - ) - await query.execute(db=db) - assert query.get_node_ids() == [group_group1_main.id] - -async def test_query_NodeListGetInfoQuery( +async def test_query_NodeListGetInfoQuery_with_profiles_some_deleted( db: InfrahubDatabase, person_john_main, person_jim_main, person_albert_main, person_alfred_main, branch: Branch ): - ids = [person_john_main.id, person_jim_main.id, person_albert_main.id] + profile_schema = registry.schema.get("ProfileTestPerson", branch=branch) + person_profile = await Node.init(db=db, schema=profile_schema) + await person_profile.new(db=db, profile_name="person_profile_1", height=172, profile_priority=1001) + await person_profile.save(db=db) + person_profile_2 = await Node.init(db=db, schema=profile_schema) + await person_profile_2.new(db=db, profile_name="person_profile_2", height=177, profile_priority=1002) + await person_profile_2.save(db=db) + for person_id in (person_albert_main.id, person_alfred_main.id, person_john_main.id): + person = await NodeManager.get_one(db=db, id=person_id, branch=branch) + await person.profiles.update(data=[person_profile, person_profile_2], db=db) + await person.save(db=db) + person_albert = await NodeManager.get_one(db=db, id=person_albert_main.id, branch=branch) + await person_albert.profiles.update(data=[person_profile_2], db=db) + await person_albert.save(db=db) + + ids = [person_john_main.id, person_jim_main.id, person_albert_main.id, person_alfred_main.id] query = await NodeListGetInfoQuery.init(db=db, branch=branch, ids=ids) await query.execute(db=db) - assert len(list(query.get_results_group_by(("n", "uuid")))) == 3 + + queried_nodes = [node async for node in query.get_nodes(duplicate=False)] + assert {qn.node_uuid for qn in queried_nodes} == { + person_john_main.id, + person_jim_main.id, + person_albert_main.id, + person_alfred_main.id, + } + for node_to_process in queried_nodes: + if node_to_process.node_uuid in (person_john_main.id, person_alfred_main.id): + assert set(node_to_process.profile_uuids) == {person_profile.id, person_profile_2.id} + elif node_to_process.node_uuid in (person_albert_main.id): + assert node_to_process.profile_uuids == [person_profile_2.id] + elif node_to_process.node_uuid in (person_jim_main.id): + assert node_to_process.profile_uuids == [] async def test_query_NodeListGetInfoQuery_renamed( @@ -261,8 +196,8 @@ async def test_query_NodeListGetInfoQuery_renamed( async def test_query_NodeListGetAttributeQuery_all_fields(db: InfrahubDatabase, base_dataset_02): - default_branch = await get_branch(db=db, branch="main") - branch1 = await get_branch(db=db, branch="branch1") + default_branch = await registry.get_branch(db=db, branch="main") + branch1 = await registry.get_branch(db=db, branch="branch1") # Query all the nodes in main but only c1 and c2 present # Expect 4 attributes per node(x2) = 8 attributes @@ -270,8 +205,8 @@ async def test_query_NodeListGetAttributeQuery_all_fields(db: InfrahubDatabase, await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2"] assert len(list(query.get_results())) == 8 - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 4 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 4 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 4 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 4 # Query all the nodes in branch1, c1, c2 and c3 present # Expect 15 attributes because each node has 4 but c1at2 has a value both in Main and Branch1 @@ -279,9 +214,9 @@ async def test_query_NodeListGetAttributeQuery_all_fields(db: InfrahubDatabase, await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2", "c3"] assert len(list(query.get_results())) == 15 - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 4 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 4 - assert len(query.get_attributes_group_by_node()["c3"]["attrs"]) == 4 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 4 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 4 + assert len(query.get_attributes_group_by_node()["c3"].attrs) == 4 # Query all the nodes in branch1 in isolated mode, only c1 and c3 present # Expect 9 attributes because each node has 4 but c1at2 has a value both in Main and Branch1 @@ -290,8 +225,8 @@ async def test_query_NodeListGetAttributeQuery_all_fields(db: InfrahubDatabase, await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c3"] assert len(list(query.get_results())) == 11 - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 4 - assert len(query.get_attributes_group_by_node()["c3"]["attrs"]) == 4 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 4 + assert len(query.get_attributes_group_by_node()["c3"].attrs) == 4 async def test_query_NodeListGetAttributeQuery_with_source( @@ -312,21 +247,27 @@ async def test_query_NodeListGetAttributeQuery_with_source( ) await obj2.save(db=db) - default_branch = await get_branch(db=db, branch="main") + default_branch = await registry.get_branch(db=db, branch="main") query = await NodeListGetAttributeQuery.init( db=db, ids=[obj1.id, obj2.id], branch=default_branch, include_source=True ) await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == sorted([obj1.id, obj2.id]) - assert query.get_attributes_group_by_node()[obj1.id]["attrs"]["name"].source_uuid == first_account.id - assert query.get_attributes_group_by_node()[obj2.id]["attrs"]["level"].source_uuid == second_account.id - assert query.get_attributes_group_by_node()[obj2.id]["attrs"]["name"].source_uuid == first_account.id + assert ( + query.get_attributes_group_by_node()[obj1.id].attrs["name"].node_properties["source"].uuid == first_account.id + ) + assert ( + query.get_attributes_group_by_node()[obj2.id].attrs["level"].node_properties["source"].uuid == second_account.id + ) + assert ( + query.get_attributes_group_by_node()[obj2.id].attrs["name"].node_properties["source"].uuid == first_account.id + ) async def test_query_NodeListGetAttributeQuery(db: InfrahubDatabase, base_dataset_02): - default_branch = await get_branch(db=db, branch="main") - branch1 = await get_branch(db=db, branch="branch1") + default_branch = await registry.get_branch(db=db, branch="main") + branch1 = await registry.get_branch(db=db, branch="branch1") # Query all the nodes in main but only c1 and c2 present # Expect 2 attributes per node(x2) = 4 attributes @@ -335,8 +276,8 @@ async def test_query_NodeListGetAttributeQuery(db: InfrahubDatabase, base_datase ) await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2"] - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 2 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 2 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 2 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 2 assert len(list(query.get_results())) == 4 # Query all the nodes in branch1: c1, c2 and c3 present @@ -346,9 +287,9 @@ async def test_query_NodeListGetAttributeQuery(db: InfrahubDatabase, base_datase ) await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2", "c3"] - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 1 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 1 - assert len(query.get_attributes_group_by_node()["c3"]["attrs"]) == 1 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 1 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 1 + assert len(query.get_attributes_group_by_node()["c3"].attrs) == 1 assert len(list(query.get_results())) == 6 # Query c1 in branch1 @@ -370,8 +311,8 @@ async def test_query_NodeListGetAttributeQuery(db: InfrahubDatabase, base_datase async def test_query_NodeListGetAttributeQuery_deleted(db: InfrahubDatabase, base_dataset_02): - default_branch = await get_branch(db=db, branch="main") - branch1 = await get_branch(db=db, branch="branch1") + default_branch = await registry.get_branch(db=db, branch="main") + branch1 = await registry.get_branch(db=db, branch="branch1") schema = registry.schema.get_schema_branch(name=branch1.name) car_schema = schema.get(name="TestCar") @@ -394,17 +335,17 @@ async def test_query_NodeListGetAttributeQuery_deleted(db: InfrahubDatabase, bas await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2"] - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 4 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 4 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 4 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 4 # Query all the nodes in branch1: c1, c2 and c3 present # Expect 6 attributes because each node has 1 but c1at2 has its value and its protected flag defined both in Main and Branch1 query = await NodeListGetAttributeQuery.init(db=db, ids=["c1", "c2", "c3"], branch=branch1) await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1", "c2", "c3"] - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 3 - assert len(query.get_attributes_group_by_node()["c2"]["attrs"]) == 3 - assert len(query.get_attributes_group_by_node()["c3"]["attrs"]) == 3 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 3 + assert len(query.get_attributes_group_by_node()["c2"].attrs) == 3 + assert len(query.get_attributes_group_by_node()["c3"].attrs) == 3 # Query c1 in branch1 # Expect 4 attributes because c1at2 has its value and its protected flag defined both in Main and Branch1 @@ -413,11 +354,11 @@ async def test_query_NodeListGetAttributeQuery_deleted(db: InfrahubDatabase, bas ) await query.execute(db=db) assert sorted(query.get_attributes_group_by_node().keys()) == ["c1"] - assert len(query.get_attributes_group_by_node()["c1"]["attrs"]) == 1 + assert len(query.get_attributes_group_by_node()["c1"].attrs) == 1 async def test_query_NodeListGetRelationshipsQuery(db: InfrahubDatabase, default_branch: Branch, person_jack_tags_main): - default_branch = await get_branch(db=db, branch="main") + default_branch = await registry.get_branch(db=db, branch="main") query = await NodeListGetRelationshipsQuery.init( db=db, ids=[person_jack_tags_main.id], diff --git a/backend/tests/unit/core/test_query_branch.py b/backend/tests/unit/core/test_query_branch.py index bf7365fb9c..6512e05176 100644 --- a/backend/tests/unit/core/test_query_branch.py +++ b/backend/tests/unit/core/test_query_branch.py @@ -1,11 +1,11 @@ -from infrahub.core import get_branch from infrahub.core.branch import Branch from infrahub.core.query.branch import GetAllBranchInternalRelationshipQuery +from infrahub.core.registry import registry from infrahub.database import InfrahubDatabase async def test_GetAllBranchInternalRelationshipQuery(db: InfrahubDatabase, default_branch: Branch, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) query = await GetAllBranchInternalRelationshipQuery.init(db=db, branch=branch1) await query.execute(db=db) diff --git a/backend/tests/unit/core/test_query_diff.py b/backend/tests/unit/core/test_query_diff.py index 2dcf63f5d5..a681273cfb 100644 --- a/backend/tests/unit/core/test_query_diff.py +++ b/backend/tests/unit/core/test_query_diff.py @@ -1,6 +1,5 @@ from collections import defaultdict -from infrahub.core import get_branch from infrahub.core.query.diff import ( DiffAttributeQuery, DiffNodePropertiesByIDSRangeQuery, @@ -9,6 +8,7 @@ DiffRelationshipPropertyQuery, DiffRelationshipQuery, ) +from infrahub.core.registry import registry from infrahub.core.timestamp import Timestamp from infrahub.database import InfrahubDatabase @@ -22,7 +22,7 @@ def group_results_per_node(results): async def test_diff_node_query(db: InfrahubDatabase, default_branch, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Query all nodes from the creation of the first nodes (m60) to now query = await DiffNodeQuery.init( @@ -119,7 +119,7 @@ async def test_diff_node_query(db: InfrahubDatabase, default_branch, base_datase async def test_diff_attribute_query(db: InfrahubDatabase, default_branch, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Query all attributes from the creation of the branch (m45) to now query = await DiffAttributeQuery.init( @@ -213,7 +213,7 @@ async def test_diff_attribute_query(db: InfrahubDatabase, default_branch, base_d async def test_diff_attribute_query_rebased_branch(db: InfrahubDatabase, default_branch, base_dataset_03): - branch2 = await get_branch(branch="branch2", db=db) + branch2 = await registry.get_branch(branch="branch2", db=db) # Query all attributes from the creation of the branch (m45) to now query = await DiffAttributeQuery.init( @@ -229,7 +229,7 @@ async def test_diff_attribute_query_rebased_branch(db: InfrahubDatabase, default async def test_diff_node_properties_ids_range_query(db: InfrahubDatabase, default_branch, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Query all Nodes from the creation of the first nodes (m60) to now query = await DiffNodePropertiesByIDSRangeQuery.init( @@ -274,7 +274,7 @@ async def test_diff_node_properties_ids_range_query(db: InfrahubDatabase, defaul async def test_diff_relationship_properties_ids_range_query(db: InfrahubDatabase, default_branch, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Query all Rels from the creation of the first nodes (m60) to now query = await DiffRelationshipPropertiesByIDSRangeQuery.init( @@ -318,7 +318,7 @@ async def test_diff_relationship_properties_ids_range_query(db: InfrahubDatabase async def test_DiffRelationshipQuery(db: InfrahubDatabase, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Execute the query with default timestamp from the creation of the branch to now query = await DiffRelationshipQuery.init(db=db, branch=branch1) @@ -375,7 +375,7 @@ async def test_DiffRelationshipQuery(db: InfrahubDatabase, base_dataset_02): async def test_DiffRelationshipPropertyQuery(db: InfrahubDatabase, base_dataset_02): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Execute the query with default timestamp from the creation of the branch to now # 4 changes are expected @@ -425,7 +425,7 @@ async def test_DiffRelationshipPropertyQuery(db: InfrahubDatabase, base_dataset_ async def test_DiffRelationshipPropertyQuery_both_branches(db: InfrahubDatabase, base_dataset_04): - branch1 = await get_branch(branch="branch1", db=db) + branch1 = await registry.get_branch(branch="branch1", db=db) # Execute the query with default timestamp from the creation of the branch to now # 4 changes are expected diff --git a/backend/tests/unit/core/test_query_subquery.py b/backend/tests/unit/core/test_query_subquery.py index e630d9691b..c97bbbfd86 100644 --- a/backend/tests/unit/core/test_query_subquery.py +++ b/backend/tests/unit/core/test_query_subquery.py @@ -25,10 +25,17 @@ async def test_build_subquery_filter_attribute_text( WITH n MATCH path = (n)-[:HAS_ATTRIBUTE]-(i:Attribute { name: $filter1_name })-[:HAS_VALUE]-(av:AttributeValue { value: $filter1_value }) WHERE all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter1 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -56,10 +63,17 @@ async def test_build_subquery_filter_attribute_int( WITH n MATCH path = (n)-[:HAS_ATTRIBUTE]-(i:Attribute { name: $filter2_name })-[:HAS_VALUE]-(av:AttributeValue { value: $filter2_value }) WHERE all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter2 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter2 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -87,10 +101,17 @@ async def test_build_subquery_filter_relationship(db: InfrahubDatabase, default_ WITH n MATCH path = (n)-[r1:IS_RELATED]->(rl:Relationship { name: $filter1_rel_name })-[r2:IS_RELATED]->(peer:Node)-[:HAS_ATTRIBUTE]-(i:Attribute { name: $filter1_name })-[:HAS_VALUE]-(av:AttributeValue { value: $filter1_value }) WHERE all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter1 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -122,10 +143,17 @@ async def test_build_subquery_filter_relationship_ids(db: InfrahubDatabase, defa WITH n MATCH path = (n)-[r1:IS_RELATED]->(rl:Relationship { name: $filter1_rel_name })-[r2:IS_RELATED]->(peer:Node) WHERE peer.uuid IN $filter1_peer_ids AND all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter1 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -149,12 +177,13 @@ async def test_build_subquery_order_relationship(db: InfrahubDatabase, default_b expected_query = """ WITH n - MATCH path = (n)-[:IS_RELATED]->(:Relationship { name: $order1_rel_name })-[:IS_RELATED]->(:Node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $order1_name })-[:HAS_VALUE]-(last:AttributeValue) + OPTIONAL MATCH path = (n)-[:IS_RELATED]->(:Relationship { name: $order1_rel_name })-[:IS_RELATED]->(:Node)-[:HAS_ATTRIBUTE]-(:Attribute { name: $order1_name })-[:HAS_VALUE]-(last:AttributeValue) WHERE all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH last, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN last.value as order1 + WITH last, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms, all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, last])) AS latest_node_details + WITH latest_node_details[0] AS is_active, latest_node_details[1] AS last + RETURN CASE WHEN is_active = TRUE THEN last.value ELSE NULL END AS order1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -182,10 +211,17 @@ async def test_build_subquery_filter_attribute_multiple_values( WITH n MATCH path = (n)-[:HAS_ATTRIBUTE]-(i:Attribute { name: $filter1_name })-[:HAS_VALUE]-(av:AttributeValue) WHERE av.value IN $filter1_value AND all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter1 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query @@ -215,10 +251,17 @@ async def test_build_subquery_filter_relationship_multiple_values( WITH n MATCH path = (n)-[r1:IS_RELATED]->(rl:Relationship { name: $filter1_rel_name })-[r2:IS_RELATED]->(peer:Node)-[:HAS_ATTRIBUTE]-(i:Attribute { name: $filter1_name })-[:HAS_VALUE]-(av:AttributeValue) WHERE av.value IN $filter1_value AND all(r IN relationships(path) WHERE (PLACEHOLDER)) - WITH n, path, reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, %(froms_var)s AS froms - RETURN n as filter1 + WITH + n, + path, + reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level) AS branch_level, + %(froms_var)s AS froms, + all(r IN relationships(path) WHERE r.status = "active") AS is_active ORDER BY branch_level DESC, froms[-1] DESC, froms[-2] DESC - LIMIT 1 + WITH head(collect([is_active, n])) AS latest_node_details + WHERE latest_node_details[0] = TRUE + WITH latest_node_details[1] AS n + RETURN n AS filter1 """ % {"froms_var": db.render_list_comprehension(items="relationships(path)", item_name="from")} assert query == expected_query diff --git a/backend/tests/unit/core/test_registry.py b/backend/tests/unit/core/test_registry.py index fe4f7e1c4f..f301c2d9ff 100644 --- a/backend/tests/unit/core/test_registry.py +++ b/backend/tests/unit/core/test_registry.py @@ -1,15 +1,15 @@ -from infrahub.core import get_branch, get_branch_from_registry, registry from infrahub.core.branch import Branch +from infrahub.core.registry import registry from infrahub.core.schema import SchemaRoot, internal_schema from infrahub.core.schema_manager import SchemaManager from infrahub.database import InfrahubDatabase async def test_get_branch_from_registry(db: InfrahubDatabase, default_branch: Branch): - br1 = get_branch_from_registry() + br1 = registry.get_branch_from_registry() assert br1.name == default_branch.name - br2 = get_branch_from_registry(default_branch.name) + br2 = registry.get_branch_from_registry(default_branch.name) assert br2.name == default_branch.name @@ -24,5 +24,5 @@ async def test_get_branch_not_in_registry(db: InfrahubDatabase, default_branch: branch1.update_schema_hash() await branch1.save(db=db) - br1 = await get_branch(branch=branch1.name, db=db) + br1 = await registry.get_branch(branch=branch1.name, db=db) assert br1.name == branch1.name diff --git a/backend/tests/unit/core/test_relationship_manager.py b/backend/tests/unit/core/test_relationship_manager.py index dd3c17145d..aa5398500c 100644 --- a/backend/tests/unit/core/test_relationship_manager.py +++ b/backend/tests/unit/core/test_relationship_manager.py @@ -246,3 +246,51 @@ async def test_many_update( db=db, source_id=tag_red_main.db_id, destination_id=person_jack_main.db_id, max_length=2 ) assert len(paths) == 2 + + +async def test_many_add( + db: InfrahubDatabase, tag_blue_main: Node, tag_red_main: Node, person_jack_main: Node, branch: Branch +): + person_schema = registry.schema.get(name="TestPerson") + rel_schema = person_schema.get_relationship("tags") + + relm = await RelationshipManager.init( + db=db, schema=rel_schema, branch=branch, at=Timestamp(), node=person_jack_main + ) + await relm.save(db=db) + + paths = await get_paths_between_nodes( + db=db, source_id=tag_blue_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 1 + + paths = await get_paths_between_nodes( + db=db, source_id=tag_red_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 1 + + await relm.add(db=db, data=tag_blue_main) + await relm.save(db=db) + + paths = await get_paths_between_nodes( + db=db, source_id=tag_blue_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 2 + + paths = await get_paths_between_nodes( + db=db, source_id=tag_red_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 1 + + await relm.add(db=db, data=tag_red_main) + await relm.save(db=db) + + paths = await get_paths_between_nodes( + db=db, source_id=tag_blue_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 2 + + paths = await get_paths_between_nodes( + db=db, source_id=tag_red_main.db_id, destination_id=person_jack_main.db_id, max_length=2 + ) + assert len(paths) == 2 diff --git a/backend/tests/unit/core/test_relationship_query.py b/backend/tests/unit/core/test_relationship_query.py index 7616097c0e..c58883ae1c 100644 --- a/backend/tests/unit/core/test_relationship_query.py +++ b/backend/tests/unit/core/test_relationship_query.py @@ -575,7 +575,7 @@ async def test_query_RelationshipGetByIdentifierQuery( query = await RelationshipGetByIdentifierQuery.init( db=db, branch=branch, identifiers=[], excluded_namespaces=[] ) - assert "identifiers cannot be an empty list" in str(exc.value) + assert "identifiers or full_identifiers is required" in str(exc.value) query = await RelationshipGetByIdentifierQuery.init( db=db, branch=branch, identifiers=["testcar__testperson"], excluded_namespaces=[] diff --git a/backend/tests/unit/core/test_utils.py b/backend/tests/unit/core/test_utils.py new file mode 100644 index 0000000000..64b9069c18 --- /dev/null +++ b/backend/tests/unit/core/test_utils.py @@ -0,0 +1,17 @@ +import ipaddress + +import pytest + +from infrahub.core.utils import convert_ip_to_binary_str + + +@pytest.mark.parametrize( + "input,response", + [ + (ipaddress.ip_network("10.10.0.0/22"), "00001010000010100000000000000000"), + (ipaddress.ip_interface("10.10.22.23/22"), "00001010000010100001011000010111"), + (ipaddress.ip_interface("192.0.22.23/22"), "11000000000000000001011000010111"), + ], +) +def test_convert_ip_to_binary_str(input, response): + assert convert_ip_to_binary_str(obj=input) == response diff --git a/backend/tests/unit/git/conftest.py b/backend/tests/unit/git/conftest.py index b7015e2ef7..0c56b37fb8 100644 --- a/backend/tests/unit/git/conftest.py +++ b/backend/tests/unit/git/conftest.py @@ -8,7 +8,7 @@ import pytest import ujson from git import Repo -from infrahub_sdk import UUIDT, InfrahubClient, InfrahubNode +from infrahub_sdk import UUIDT, Config, InfrahubClient, InfrahubNode from infrahub_sdk import SchemaRoot as ClientSchemaRoot from infrahub_sdk.branch import BranchData from pytest_httpx import HTTPXMock @@ -22,7 +22,7 @@ @pytest.fixture async def client() -> InfrahubClient: - return await InfrahubClient.init(address="http://mock", insert_tracker=True) + return await InfrahubClient.init(config=Config(address="http://mock", insert_tracker=True)) @pytest.fixture @@ -79,7 +79,7 @@ def git_upstream_repo_01(git_sources_dir) -> Dict[str, str]: file.extractall(git_sources_dir) file.close() - return dict(name=name, path=str(os.path.join(git_sources_dir, name))) + return {"name": name, "path": str(os.path.join(git_sources_dir, name))} @pytest.fixture @@ -123,7 +123,7 @@ def git_upstream_repo_10(helper, git_sources_dir) -> Dict[str, str]: file.extractall(git_sources_dir) file.close() - return dict(name=name, path=str(os.path.join(git_sources_dir, name))) + return {"name": name, "path": str(os.path.join(git_sources_dir, name))} @pytest.fixture diff --git a/backend/tests/unit/git/test_git_repository.py b/backend/tests/unit/git/test_git_repository.py index aed4475a3c..3183441f46 100644 --- a/backend/tests/unit/git/test_git_repository.py +++ b/backend/tests/unit/git/test_git_repository.py @@ -86,7 +86,7 @@ async def test_new_wrong_location(git_upstream_repo_01, git_repos_dir, tmp_path) with pytest.raises(RepositoryError) as exc: await InfrahubRepository.new(id=UUIDT.new(), name=git_upstream_repo_01["name"], location=str(tmp_path)) - assert "An error occured with GitRepository" in str(exc.value) + assert "An error occurred with GitRepository" in str(exc.value) async def test_new_wrong_branch(git_upstream_repo_01, git_repos_dir, tmp_path): diff --git a/backend/tests/unit/git/test_git_rpc.py b/backend/tests/unit/git/test_git_rpc.py index f1a7179a85..e298e88f03 100644 --- a/backend/tests/unit/git/test_git_rpc.py +++ b/backend/tests/unit/git/test_git_rpc.py @@ -220,7 +220,9 @@ async def test_git_rpc_add_read_only_success(self, git_upstream_repo_01: Dict[st class TestPullReadOnly: def setup_method(self): self.client = AsyncMock(spec=InfrahubClient) + self.task_report = AsyncContextManagerMock() self.services = InfrahubServices(client=self.client) + self.services.task_report = self.task_report self.commit = str(UUIDT()) self.infrahub_branch_name = "read-only-branch" self.repo_id = str(UUIDT()) @@ -271,6 +273,7 @@ async def test_existing_repository(self): client=self.client, ref=self.ref, infrahub_branch_name=self.infrahub_branch_name, + task_report=self.task_report, ) self.mock_repo.import_objects_from_files.assert_awaited_once_with( branch_name=self.infrahub_branch_name, commit=self.commit @@ -290,6 +293,7 @@ async def test_new_repository(self): client=self.client, ref=self.ref, infrahub_branch_name=self.infrahub_branch_name, + task_report=self.task_report, ) self.mock_repo_class.new.assert_awaited_once_with( id=self.repo_id, @@ -298,6 +302,7 @@ async def test_new_repository(self): client=self.client, ref=self.ref, infrahub_branch_name=self.infrahub_branch_name, + task_report=self.task_report, ) self.mock_repo.import_objects_from_files.assert_awaited_once_with( branch_name=self.infrahub_branch_name, commit=self.commit diff --git a/backend/tests/unit/graphql/mutations/test_branch.py b/backend/tests/unit/graphql/mutations/test_branch.py index aa0bffa2e3..5e94511773 100644 --- a/backend/tests/unit/graphql/mutations/test_branch.py +++ b/backend/tests/unit/graphql/mutations/test_branch.py @@ -79,7 +79,7 @@ async def test_branch_create( assert result.errors is None assert result.data assert result.data["BranchCreate"]["ok"] is True - assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # length of an UUID assert result.data["BranchCreate"]["object"]["name"] == "branch2" assert not result.data["BranchCreate"]["object"]["description"] assert result.data["BranchCreate"]["object"]["sync_with_git"] is False @@ -142,7 +142,7 @@ async def test_branch_create( assert result.errors is None assert result.data assert result.data["BranchCreate"]["ok"] is True - assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # length of an UUID assert result.data["BranchCreate"]["object"]["name"] == "branch3" assert result.data["BranchCreate"]["object"]["description"] == "my description" assert result.data["BranchCreate"]["object"]["sync_with_git"] is True @@ -324,7 +324,7 @@ async def test_branch_create_with_repositories( assert result.errors is None assert result.data assert result.data["BranchCreate"]["ok"] is True - assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["BranchCreate"]["object"]["id"]) == 36 # length of an UUID assert await Branch.get_by_name(db=db, name="branch2") diff --git a/backend/tests/unit/graphql/mutations/test_ipam.py b/backend/tests/unit/graphql/mutations/test_ipam.py new file mode 100644 index 0000000000..bf9401d8c8 --- /dev/null +++ b/backend/tests/unit/graphql/mutations/test_ipam.py @@ -0,0 +1,1127 @@ +import ipaddress + +from graphql import graphql + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import InfrahubKind +from infrahub.core.node import Node +from infrahub.core.schema_manager import SchemaBranch +from infrahub.database import InfrahubDatabase +from infrahub.graphql import prepare_graphql_params + +CREATE_IPPREFIX = """ +mutation CreatePrefix($prefix: String!) { + IpamIPPrefixCreate( + data: { + prefix: { + value: $prefix + } + } + ) { + ok + object { + id + } + } +} +""" + +UPDATE_IPPREFIX = """ +mutation UpdatePrefix($id: String!, $prefix: String!, $description: String!) { + IpamIPPrefixUpdate( + data: { + id: $id + prefix: { + value: $prefix + } + description: { + value: $description + } + } + ) { + ok + object { + id + } + } +} +""" + +UPSERT_IPPREFIX = """ +mutation UpsertPrefix($id: String!, $prefix: String!, $description: String!) { + IpamIPPrefixUpsert( + data: { + id: $id + prefix: { + value: $prefix + } + description: { + value: $description + } + } + ) { + ok + object { + id + description { + value + } + } + } +} +""" + +DELETE_IPPREFIX = """ +mutation DeletePrefix($id: String!) { + IpamIPPrefixDelete( + data: { + id: $id + } + ) { + ok + } +} +""" + +GET_IPPREFIX = """ +query GetPrefix($prefix: String!) { + IpamIPPrefix(prefix__value: $prefix) { + edges { + node { + id + prefix { + value + } + is_top_level { + value + } + parent { + node { + id + prefix { + value + } + } + } + children { + edges { + node { + id + prefix { + value + } + } + } + } + ip_addresses { + edges { + node { + id + address { + value + } + } + } + } + } + } + } +} +""" + +CREATE_IPADDRESS = """ +mutation CreateAddress($address: String!) { + IpamIPAddressCreate( + data: { + address: { + value: $address + } + } + ) { + ok + object { + id + } + } +} +""" + +UPDATE_IPADDRESS = """ +mutation UpdateAddress($id: String!, $address: String!, $description: String!) { + IpamIPAddressUpdate( + data: { + id: $id + address: { + value: $address + } + description: { + value: $description + } + } + ) { + ok + object { + id + } + } +} +""" + +UPSERT_IPADDRESS = """ +mutation UpsertAddress($id: String!, $address: String!, $description: String!) { + IpamIPAddressUpsert( + data: { + id: $id + address: { + value: $address + } + description: { + value: $description + } + } + ) { + ok + object { + id + description { + value + } + } + } +} +""" + +GET_IPADDRESS = """ +query GetAddress($address: String!) { + IpamIPAddress(address__value: $address) { + edges { + node { + id + address { + value + } + ip_prefix { + node { + id + prefix { + value + } + } + } + } + } + } +} +""" + +DELETE_IPNAMESPACE = """ +mutation NamespaceDelete($namespace_id: String!) { + IpamNamespaceDelete(data: {id: $namespace_id}) { + ok + } +} +""" + + +async def test_protected_default_ipnamespace(db: InfrahubDatabase, default_branch: Branch, default_ipnamespace: Node): + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=DELETE_IPNAMESPACE, + context_value=gql_params.context, + variable_values={"namespace_id": registry.default_ipnamespace}, + ) + + assert result.errors + assert result.errors[0].message == "Cannot delete default IPAM namespace" + + +async def test_delete_regular_ipnamespace(db: InfrahubDatabase, default_branch: Branch, default_ipnamespace: Node): + ns1 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns1.new(db=db, name="ns1") + await ns1.save(db=db) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=DELETE_IPNAMESPACE, + context_value=gql_params.context, + variable_values={"namespace_id": ns1.id}, + ) + + assert not result.errors + assert result.data["IpamNamespaceDelete"]["ok"] + + +async def test_ipprefix_create( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure prefix can be created and parent/children relationships are set.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + supernet = ipaddress.ip_network("2001:db8::/32") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + assert result.data["IpamIPPrefixCreate"]["object"]["id"] + + ip_prefix = await registry.manager.get_one(id=result.data["IpamIPPrefixCreate"]["object"]["id"], db=db) + ip_namespace = await ip_prefix.ip_namespace.get_peer(db=db) + assert ip_namespace.id == registry.default_ipnamespace + + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert not result.data["IpamIPPrefix"]["edges"][0]["node"]["parent"]["node"] + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["prefix"]["value"] == str(supernet) + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["is_top_level"]["value"] + + networks = list(supernet.subnets(new_prefix=36)) + for n in networks: + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(n)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(networks[0])}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["parent"]["node"]["prefix"]["value"] == str(supernet) + assert not result.data["IpamIPPrefix"]["edges"][0]["node"]["is_top_level"]["value"] + + +async def test_ipprefix_create_with_ipnamespace( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + ns = await Node.init(db=db, schema=InfrahubKind.NAMESPACE, branch=default_branch) + await ns.new(db=db, name="ns1") + await ns.save(db=db) + + query = """ + mutation CreatePrefix($prefix: String!, $namespace: String!) { + IpamIPPrefixCreate( + data: { + prefix: { + value: $prefix + } + ip_namespace: { + id: $namespace + } + } + ) { + ok + object { + id + } + } + } + """ + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + supernet = ipaddress.ip_network("2001:db8::/32") + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + variable_values={"prefix": str(supernet), "namespace": ns.id}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + assert result.data["IpamIPPrefixCreate"]["object"]["id"] + + ip_prefix = await registry.manager.get_one(id=result.data["IpamIPPrefixCreate"]["object"]["id"], db=db) + ip_namespace = await ip_prefix.ip_namespace.get_peer(db=db) + assert ip_namespace.id == ns.id + + +async def test_ipprefix_create_reverse( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure parent/children relationship are set when creating a parent after a child.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + subnet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(subnet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + supernet = ipaddress.ip_network("2001:db8::/32") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(subnet)}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["parent"]["node"]["prefix"]["value"] == str(supernet) + assert not result.data["IpamIPPrefix"]["edges"][0]["node"]["is_top_level"]["value"] + + +async def test_ipprefix_update( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure a prefix can be updated.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + subnet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(subnet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + subnet_id = result.data["IpamIPPrefixCreate"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=UPDATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": subnet_id, "prefix": str(subnet), "description": "RFC 3849"}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixUpdate"]["ok"] + + +async def test_ipprefix_update_within_namespace( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure a prefix can be updated within a namespace.""" + test_ns = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await test_ns.new(db=db, name="test") + await test_ns.save(db=db) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + subnet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=""" + mutation CreatePrefixInNamespace($prefix: String!, $namespace: String!) { + IpamIPPrefixCreate( + data: { + prefix: { + value: $prefix + } + ip_namespace: { + id: $namespace + } + } + ) { + ok + object { + id + ip_namespace { + node { + name { + value + } + } + } + } + } + } + """, + context_value=gql_params.context, + variable_values={"prefix": str(subnet), "namespace": test_ns.id}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + assert result.data["IpamIPPrefixCreate"]["object"]["ip_namespace"]["node"]["name"]["value"] == test_ns.name.value + + subnet_id = result.data["IpamIPPrefixCreate"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=""" + mutation UpdatePrefixInNamespace($id: String!) { + IpamIPPrefixUpdate( + data: { + id: $id + description: { + value: "Do not change namespace" + } + } + ) { + ok + object { + id + ip_namespace { + node { + name { + value + } + } + } + } + } + } + """, + context_value=gql_params.context, + variable_values={"id": subnet_id, "namespace": test_ns.name.value}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixUpdate"]["ok"] + assert result.data["IpamIPPrefixUpdate"]["object"]["ip_namespace"]["node"]["name"]["value"] == test_ns.name.value + + +async def test_ipprefix_upsert( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure a prefix can be upserted.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + subnet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=UPSERT_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": "", "prefix": str(subnet), "description": ""}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixUpsert"]["ok"] + assert not result.data["IpamIPPrefixUpsert"]["object"]["description"]["value"] + + subnet_id = result.data["IpamIPPrefixUpsert"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=UPSERT_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": subnet_id, "prefix": str(subnet), "description": "RFC 3849"}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixUpsert"]["ok"] + assert result.data["IpamIPPrefixUpsert"]["object"]["description"]["value"] == "RFC 3849" + + +async def test_ipprefix_delete( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure deleting a prefix relocates its children.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + networks = [ + ipaddress.ip_network("2001:db8::/32"), + ipaddress.ip_network("2001:db8::/48"), + ipaddress.ip_network("2001:db8::/56"), + ipaddress.ip_network("2001:db8::/64"), + ] + network_nodes = [] + for n in networks: + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(n)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + network_nodes.append(result.data["IpamIPPrefixCreate"]["object"]["id"]) + + result = await graphql( + schema=gql_params.schema, + source=DELETE_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": network_nodes[0]}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixDelete"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(networks[1])}, + ) + + # Removing the parent prefix means this prefix' parent should now be null + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert not result.data["IpamIPPrefix"]["edges"][0]["node"]["parent"]["node"] + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["is_top_level"]["value"] is True + + result = await graphql( + schema=gql_params.schema, + source=DELETE_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": network_nodes[2]}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixDelete"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(networks[3])}, + ) + + # Removing a node in the middle should relocate children prefixes to a new parent prefix + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["parent"]["node"]["id"] == network_nodes[1] + + +async def test_ipaddress_create( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure IP address is properly created and nested under a subnet.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + # Single IP address, no IP prefix + address = ipaddress.ip_interface("192.0.2.1/24") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert result.data["IpamIPAddressCreate"]["ok"] + assert result.data["IpamIPAddressCreate"]["object"]["id"] + + ip = await registry.manager.get_one(id=result.data["IpamIPAddressCreate"]["object"]["id"], db=db) + ip_namespace = await ip.ip_namespace.get_peer(db=db) + assert ip_namespace.id == registry.default_ipnamespace + + result = await graphql( + schema=gql_params.schema, + source=GET_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert len(result.data["IpamIPAddress"]["edges"]) == 1 + assert not result.data["IpamIPAddress"]["edges"][0]["node"]["ip_prefix"]["node"] + assert result.data["IpamIPAddress"]["edges"][0]["node"]["address"]["value"] == str(address) + + # Single IP address under an IP prefix + supernet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + address = ipaddress.ip_interface("2001:db8::1/64") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert result.data["IpamIPAddressCreate"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert len(result.data["IpamIPAddress"]["edges"]) == 1 + assert result.data["IpamIPAddress"]["edges"][0]["node"]["address"]["value"] == str(address) + assert result.data["IpamIPAddress"]["edges"][0]["node"]["ip_prefix"]["node"]["prefix"]["value"] == str(supernet) + + +async def test_ipaddress_update( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure an IP address can be updated.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + address = ipaddress.ip_interface("192.0.2.1/24") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert result.data["IpamIPAddressCreate"]["ok"] + + address_id = result.data["IpamIPAddressCreate"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=UPDATE_IPADDRESS, + context_value=gql_params.context, + variable_values={"id": address_id, "address": str(address), "description": "RFC 5735"}, + ) + + assert not result.errors + assert result.data["IpamIPAddressUpdate"]["ok"] + + +async def test_ipaddress_update_within_namespace( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure an IP address can be updated within a namespace.""" + test_ns = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await test_ns.new(db=db, name="test") + await test_ns.save(db=db) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + address = ipaddress.ip_interface("192.0.2.1/24") + result = await graphql( + schema=gql_params.schema, + source=""" + mutation CreateAddressInNamespace($address: String!, $namespace: String!) { + IpamIPAddressCreate( + data: { + address: { + value: $address + } + ip_namespace: { + id: $namespace + } + } + ) { + ok + object { + id + ip_namespace { + node { + name { + value + } + } + } + } + } + } + """, + context_value=gql_params.context, + variable_values={"address": str(address), "namespace": test_ns.id}, + ) + + assert not result.errors + assert result.data["IpamIPAddressCreate"]["ok"] + assert result.data["IpamIPAddressCreate"]["object"]["ip_namespace"]["node"]["name"]["value"] == test_ns.name.value + + address_id = result.data["IpamIPAddressCreate"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=""" + mutation UpdateAddressInNamespace($id: String!) { + IpamIPAddressUpdate( + data: { + id: $id + description: { + value: "Do not change namespace" + } + } + ) { + ok + object { + id + ip_namespace { + node { + name { + value + } + } + } + } + } + } + """, + context_value=gql_params.context, + variable_values={"id": address_id, "namespace": test_ns.name.value}, + ) + + assert not result.errors + assert result.data["IpamIPAddressUpdate"]["ok"] + assert result.data["IpamIPAddressUpdate"]["object"]["ip_namespace"]["node"]["name"]["value"] == test_ns.name.value + + +async def test_ipaddress_upsert( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure an IP address can be upsert.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + address = ipaddress.ip_interface("192.0.2.1/24") + result = await graphql( + schema=gql_params.schema, + source=UPSERT_IPADDRESS, + context_value=gql_params.context, + variable_values={"id": "", "address": str(address), "description": ""}, + ) + + assert not result.errors + assert result.data["IpamIPAddressUpsert"]["ok"] + assert not result.data["IpamIPAddressUpsert"]["object"]["description"]["value"] + + address_id = result.data["IpamIPAddressUpsert"]["object"]["id"] + result = await graphql( + schema=gql_params.schema, + source=UPSERT_IPADDRESS, + context_value=gql_params.context, + variable_values={"id": address_id, "address": str(address), "description": "RFC 5735"}, + ) + + assert not result.errors + assert result.data["IpamIPAddressUpsert"]["ok"] + assert result.data["IpamIPAddressUpsert"]["object"]["description"]["value"] == "RFC 5735" + + +async def test_ipaddress_change_ipprefix( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + """Make sure relationship between an address and its prefix is properly managed.""" + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + + address = ipaddress.ip_interface("2001:db8::1/64") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert result.data["IpamIPAddressCreate"]["ok"] + + # Create subnet which contains the previously created IP should set relationships + supernet = ipaddress.ip_network("2001:db8::/48") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert result.data["IpamIPPrefixCreate"]["ok"] + + result = await graphql( + schema=gql_params.schema, + source=GET_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert len(result.data["IpamIPAddress"]["edges"]) == 1 + assert result.data["IpamIPAddress"]["edges"][0]["node"]["ip_prefix"]["node"]["prefix"]["value"] == str(supernet) + + # Check that the prefix now has an IP address + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert len(result.data["IpamIPPrefix"]["edges"][0]["node"]["ip_addresses"]["edges"]) == 1 + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["ip_addresses"]["edges"][0]["node"]["address"][ + "value" + ] == str(address) + + # Create subnet of the original one which contains the address, it should relocate it + subnet = ipaddress.ip_network("2001:db8::/64") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(subnet)}, + ) + + result = await graphql( + schema=gql_params.schema, + source=GET_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert len(result.data["IpamIPAddress"]["edges"]) == 1 + assert result.data["IpamIPAddress"]["edges"][0]["node"]["ip_prefix"]["node"]["prefix"]["value"] == str(subnet) + + # Check that the subnet has the IP address now + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(subnet)}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert result.data["IpamIPPrefix"]["edges"][0]["node"]["ip_addresses"]["edges"][0]["node"]["address"][ + "value" + ] == str(address) + + # Check that the supernet does not have an IP address anymore + result = await graphql( + schema=gql_params.schema, + source=GET_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(supernet)}, + ) + + assert not result.errors + assert len(result.data["IpamIPPrefix"]["edges"]) == 1 + assert not result.data["IpamIPPrefix"]["edges"][0]["node"]["ip_addresses"]["edges"] + + # Create a less specific subnet, IP address should not be relocated + middle = ipaddress.ip_network("2001:db8::/56") + result = await graphql( + schema=gql_params.schema, + source=CREATE_IPPREFIX, + context_value=gql_params.context, + variable_values={"prefix": str(middle)}, + ) + + result = await graphql( + schema=gql_params.schema, + source=GET_IPADDRESS, + context_value=gql_params.context, + variable_values={"address": str(address)}, + ) + + assert not result.errors + assert len(result.data["IpamIPAddress"]["edges"]) == 1 + assert result.data["IpamIPAddress"]["edges"][0]["node"]["ip_prefix"]["node"]["prefix"]["value"] == str(subnet) + + +GET_PREFIX_HIERARCHY = """ +query GetPrefixHierarchy($prefix: String!) { + IpamIPPrefix(prefix__value: $prefix) { + edges { + node { + id + prefix { value } + ancestors { edges { node { id } } } + parent { node { id } } + children { edges { node { id } } } + descendants { edges { node { id } } } + } + } + } +} +""" + + +async def test_prefix_ancestors_descendants( + db: InfrahubDatabase, + default_branch: Branch, + default_ipnamespace: Node, + register_core_models_schema: SchemaBranch, + register_ipam_schema: SchemaBranch, +): + prefix_schema = registry.schema.get_node_schema(name="IpamIPPrefix", branch=default_branch) + + ns1 = await Node.init(db=db, schema=InfrahubKind.NAMESPACE) + await ns1.new(db=db, name="ns1") + await ns1.save(db=db) + net8 = await Node.init(db=db, schema=prefix_schema) + await net8.new(db=db, prefix="10.0.0.0/8", ip_namespace=ns1) + await net8.save(db=db) + net10 = await Node.init(db=db, schema=prefix_schema) + await net10.new(db=db, prefix="10.0.0.0/10", parent=net8, ip_namespace=ns1) + await net10.save(db=db) + net12 = await Node.init(db=db, schema=prefix_schema) + await net12.new(db=db, prefix="10.0.0.0/12", parent=net10, ip_namespace=ns1) + await net12.save(db=db) + net14 = await Node.init(db=db, schema=prefix_schema) + await net14.new(db=db, prefix="10.0.0.0/14", parent=net12, ip_namespace=ns1) + await net14.save(db=db) + net16 = await Node.init(db=db, schema=prefix_schema) + await net16.new(db=db, prefix="10.0.0.0/16", parent=net14, ip_namespace=ns1) + await net16.save(db=db) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + check_before = await graphql( + schema=gql_params.schema, + source=GET_PREFIX_HIERARCHY, + context_value=gql_params.context, + variable_values={"prefix": str(net12.prefix.value)}, + ) + assert not check_before.errors + assert len(check_before.data["IpamIPPrefix"]["edges"]) == 1 + prefix_details = check_before.data["IpamIPPrefix"]["edges"][0]["node"] + assert prefix_details["id"] == net12.id + assert prefix_details["prefix"]["value"] == net12.prefix.value + ancestors = prefix_details["ancestors"]["edges"] + assert len(ancestors) == 2 + assert {"node": {"id": net8.id}} in ancestors + assert {"node": {"id": net10.id}} in ancestors + parent = prefix_details["parent"] + assert parent == {"node": {"id": net10.id}} + children = prefix_details["children"]["edges"] + assert len(children) == 1 + assert {"node": {"id": net14.id}} in children + descendants = prefix_details["descendants"]["edges"] + assert len(descendants) == 2 + assert {"node": {"id": net14.id}} in descendants + assert {"node": {"id": net16.id}} in descendants + + delete_middle = await graphql( + schema=gql_params.schema, + source=DELETE_IPPREFIX, + context_value=gql_params.context, + variable_values={"id": str(net12.id)}, + ) + assert not delete_middle.errors + assert delete_middle.data["IpamIPPrefixDelete"]["ok"] is True + + check_previous_parent = await graphql( + schema=gql_params.schema, + source=GET_PREFIX_HIERARCHY, + context_value=gql_params.context, + variable_values={"prefix": str(net10.prefix.value)}, + ) + + assert not check_previous_parent.errors + assert len(check_previous_parent.data["IpamIPPrefix"]["edges"]) == 1 + prefix_details = check_previous_parent.data["IpamIPPrefix"]["edges"][0]["node"] + assert prefix_details["id"] == net10.id + assert prefix_details["prefix"]["value"] == net10.prefix.value + ancestors = prefix_details["ancestors"]["edges"] + assert ancestors == [{"node": {"id": net8.id}}] + parent = prefix_details["parent"] + assert parent == {"node": {"id": net8.id}} + children = prefix_details["children"]["edges"] + assert children == [{"node": {"id": net14.id}}] + descendants = prefix_details["descendants"]["edges"] + assert len(descendants) == 2 + assert {"node": {"id": net14.id}} in descendants + assert {"node": {"id": net16.id}} in descendants + + check_previous_child = await graphql( + schema=gql_params.schema, + source=GET_PREFIX_HIERARCHY, + context_value=gql_params.context, + variable_values={"prefix": str(net14.prefix.value)}, + ) + + assert not check_previous_child.errors + assert len(check_previous_child.data["IpamIPPrefix"]["edges"]) == 1 + prefix_details = check_previous_child.data["IpamIPPrefix"]["edges"][0]["node"] + assert prefix_details["id"] == net14.id + assert prefix_details["prefix"]["value"] == net14.prefix.value + ancestors = prefix_details["ancestors"]["edges"] + assert len(ancestors) == 2 + assert {"node": {"id": net8.id}} in ancestors + assert {"node": {"id": net10.id}} in ancestors + parent = prefix_details["parent"] + assert parent == {"node": {"id": net10.id}} + children = prefix_details["children"]["edges"] + assert children == [{"node": {"id": net16.id}}] + descendants = prefix_details["descendants"]["edges"] + assert descendants == [{"node": {"id": net16.id}}] diff --git a/backend/tests/unit/graphql/mutations/test_proposed_change.py b/backend/tests/unit/graphql/mutations/test_proposed_change.py index fb86c9d014..136fd5c983 100644 --- a/backend/tests/unit/graphql/mutations/test_proposed_change.py +++ b/backend/tests/unit/graphql/mutations/test_proposed_change.py @@ -130,6 +130,8 @@ async def test_trigger_proposed_change(db: InfrahubDatabase, register_core_model all_result = await graphql_mutation( query=RUN_CHECK, db=db, variables={"proposed_change": proposed_change.id}, service=service ) + assert all_result.data + assert not all_result.errors artifact_recorder = BusRecorder() service = InfrahubServices(database=db, message_bus=artifact_recorder) @@ -153,8 +155,6 @@ async def test_trigger_proposed_change(db: InfrahubDatabase, register_core_model service=service, ) - assert all_result.data - assert not all_result.errors assert len(all_recorder.messages) == 1 assert isinstance(all_recorder.messages[0], messages.RequestProposedChangePipeline) message = all_recorder.messages[0] diff --git a/backend/tests/unit/graphql/profiles/__init__.py b/backend/tests/unit/graphql/profiles/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/tests/unit/graphql/profiles/test_mutation_create.py b/backend/tests/unit/graphql/profiles/test_mutation_create.py new file mode 100644 index 0000000000..8e936d7949 --- /dev/null +++ b/backend/tests/unit/graphql/profiles/test_mutation_create.py @@ -0,0 +1,39 @@ +from graphql import graphql + +from infrahub.core.manager import NodeManager +from infrahub.database import InfrahubDatabase +from infrahub.graphql import prepare_graphql_params + + +async def test_create_profile(db: InfrahubDatabase, default_branch, car_person_schema): + query = """ + mutation { + ProfileTestPersonCreate(data: { + profile_name: { value: "profile1" }, + profile_priority: { value: 1000 }, + height: { value: 182 }, + }) { + ok + object { + id + } + } + } + """ + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["ProfileTestPersonCreate"]["ok"] is True + + person_id = result.data["ProfileTestPersonCreate"]["object"]["id"] + assert len(person_id) == 36 # length of an UUID + + profile = await NodeManager.get_one(db=db, id=person_id) + assert profile.height.value == 182 diff --git a/backend/tests/unit/graphql/profiles/test_query.py b/backend/tests/unit/graphql/profiles/test_query.py new file mode 100644 index 0000000000..22a4c3ceb8 --- /dev/null +++ b/backend/tests/unit/graphql/profiles/test_query.py @@ -0,0 +1,372 @@ +import pytest +from graphql import graphql + +from infrahub.core import registry +from infrahub.core.branch import Branch +from infrahub.core.constants import BranchSupportType +from infrahub.core.manager import NodeManager +from infrahub.core.node import Node +from infrahub.core.schema import NodeSchema +from infrahub.database import InfrahubDatabase +from infrahub.graphql import prepare_graphql_params + + +@pytest.fixture +def criticality_schema(default_branch: Branch, data_schema): + SCHEMA = { + "name": "Criticality", + "namespace": "Test", + "branch": BranchSupportType.AWARE.value, + "attributes": [ + {"name": "name", "kind": "Text", "unique": True}, + {"name": "level", "kind": "Number", "optional": True}, + {"name": "fancy", "kind": "Text", "optional": True}, + ], + } + + tmp_schema = NodeSchema(**SCHEMA) + registry.schema.set(name=tmp_schema.kind, schema=tmp_schema) + registry.schema.process_schema_branch(name=default_branch.name) + + +async def test_create_profile_in_schema(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + profile = registry.schema.get("ProfileTestCriticality", branch=default_branch) + + obj1 = await Node.init(db=db, schema=profile) + await obj1.new(db=db, profile_name="prof1", level=8) + await obj1.save(db=db) + + query = """ + query { + ProfileTestCriticality { + edges { + node { + id + display_label + } + } + } + } + """ + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert len(result.data["ProfileTestCriticality"]["edges"]) == 1 + assert result.data["ProfileTestCriticality"]["edges"][0]["node"]["display_label"] == obj1.profile_name.value + + +async def test_upsert_profile_in_schema(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + profile = registry.schema.get("ProfileTestCriticality", branch=default_branch) + + obj1 = await Node.init(db=db, schema=profile) + await obj1.new(db=db, profile_name="prof1", level=8) + await obj1.save(db=db) + + query = """ + mutation { + ProfileTestCriticalityUpsert( + data: { + profile_name: { value: "prof1"}, + level: { value: 10 } + profile_priority: { value: 1234 } + } + ) { + ok + object { + profile_name { value } + level { value } + profile_priority { value } + } + } + } + """ + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["ProfileTestCriticalityUpsert"]["ok"] is True + gql_object = result.data["ProfileTestCriticalityUpsert"]["object"] + assert gql_object["profile_name"]["value"] == "prof1" + assert gql_object["level"]["value"] == 10 + assert gql_object["profile_priority"]["value"] == 1234 + retrieved_object = await NodeManager.get_one(db=db, id=obj1.id) + assert retrieved_object.profile_name.value == "prof1" + assert retrieved_object.level.value == 10 + assert retrieved_object.profile_priority.value == 1234 + + +async def test_profile_apply(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + profile_schema = registry.schema.get("ProfileTestCriticality", branch=default_branch) + prof_1 = await Node.init(db=db, schema=profile_schema) + await prof_1.new(db=db, profile_name="prof1", profile_priority=1, level=8) + await prof_1.save(db=db) + prof_2 = await Node.init(db=db, schema=profile_schema) + await prof_2.new(db=db, profile_name="prof2", profile_priority=2, level=9) + await prof_2.save(db=db) + + crit_schema = registry.schema.get("TestCriticality", branch=default_branch) + crit_1 = await Node.init(db=db, schema=crit_schema) + await crit_1.new(db=db, name="crit_1") + crit_1.level.is_default = True + await crit_1.profiles.update(db=db, data=[prof_1]) + await crit_1.save(db=db) + crit_2 = await Node.init(db=db, schema=crit_schema) + await crit_2.new(db=db, name="crit_2") + crit_2.level.is_default = True + await crit_2.profiles.update(db=db, data=[prof_2]) + await crit_2.save(db=db) + + query = """ + query { + TestCriticality { + edges { + node { + name { value } + level { value } + id + profiles{ + edges { + node{ id } + } + } + } + } + } + } + """ + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + crits = result.data["TestCriticality"]["edges"] + assert len(crits) == 2 + assert { + "node": { + "name": {"value": "crit_1"}, + "level": {"value": 8}, + "id": crit_1.id, + "profiles": {"edges": [{"node": {"id": prof_1.id}}]}, + } + } in crits + assert { + "node": { + "name": {"value": "crit_2"}, + "level": {"value": 9}, + "id": crit_2.id, + "profiles": {"edges": [{"node": {"id": prof_2.id}}]}, + } + } in crits + + +async def test_is_from_profile_set_correctly(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + profile_schema = registry.schema.get("ProfileTestCriticality", branch=default_branch) + prof_1 = await Node.init(db=db, schema=profile_schema) + await prof_1.new(db=db, profile_name="prof1", profile_priority=1, level=8) + await prof_1.save(db=db) + prof_2 = await Node.init(db=db, schema=profile_schema) + await prof_2.new(db=db, profile_name="prof2", profile_priority=2, level=9, fancy="sometimes") + await prof_2.save(db=db) + + crit_schema = registry.schema.get("TestCriticality", branch=default_branch) + crit_no_profile = await Node.init(db=db, schema=crit_schema) + await crit_no_profile.new(db=db, name="crit_no_profile", fancy="always") + await crit_no_profile.save(db=db) + + crit_1_profile = await Node.init(db=db, schema=crit_schema) + await crit_1_profile.new(db=db, name="crit_1_profile", fancy="never") + await crit_1_profile.profiles.update(db=db, data=[prof_1]) + await crit_1_profile.save(db=db) + + crit_2_profile = await Node.init(db=db, schema=crit_schema) + await crit_2_profile.new(db=db, name="crit_2_profile", level=7) + await crit_2_profile.profiles.update(db=db, data=[prof_1, prof_2]) + await crit_2_profile.save(db=db) + + query = """ + query { + TestCriticality { + edges { + node { + name { value, is_from_profile } + level { value, is_from_profile } + fancy { value, is_from_profile } + id + } + } + } + } + """ + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + crits = result.data["TestCriticality"]["edges"] + assert len(crits) == 3 + crits_by_id = {crit["node"]["id"]: crit["node"] for crit in crits} + assert crits_by_id[crit_no_profile.id] == { + "name": {"value": "crit_no_profile", "is_from_profile": False}, + "level": {"value": None, "is_from_profile": False}, + "fancy": {"value": "always", "is_from_profile": False}, + "id": crit_no_profile.id, + } + + assert crits_by_id[crit_1_profile.id] == { + "name": {"value": "crit_1_profile", "is_from_profile": False}, + "level": {"value": 8, "is_from_profile": True}, + "fancy": {"value": "never", "is_from_profile": False}, + "id": crit_1_profile.id, + } + + assert crits_by_id[crit_2_profile.id] == { + "name": {"value": "crit_2_profile", "is_from_profile": False}, + "level": {"value": 7, "is_from_profile": False}, + "fancy": {"value": "sometimes", "is_from_profile": True}, + "id": crit_2_profile.id, + } + + +async def test_is_profile_source_set_correctly(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + profile_schema = registry.schema.get("ProfileTestCriticality", branch=default_branch) + prof_1 = await Node.init(db=db, schema=profile_schema) + await prof_1.new(db=db, profile_name="prof1", profile_priority=1, level=8) + await prof_1.save(db=db) + prof_2 = await Node.init(db=db, schema=profile_schema) + await prof_2.new(db=db, profile_name="prof2", profile_priority=2, level=9, fancy="sometimes") + await prof_2.save(db=db) + + crit_schema = registry.schema.get("TestCriticality", branch=default_branch) + crit_no_profile = await Node.init(db=db, schema=crit_schema) + await crit_no_profile.new(db=db, name="crit_no_profile", fancy="always") + await crit_no_profile.save(db=db) + + crit_1_profile = await Node.init(db=db, schema=crit_schema) + await crit_1_profile.new(db=db, name="crit_1_profile", fancy="never") + await crit_1_profile.profiles.update(db=db, data=[prof_1]) + await crit_1_profile.save(db=db) + + crit_2_profile = await Node.init(db=db, schema=crit_schema) + await crit_2_profile.new(db=db, name="crit_2_profile", level=7) + await crit_2_profile.profiles.update(db=db, data=[prof_1, prof_2]) + await crit_2_profile.save(db=db) + + query = """ + query { + TestCriticality { + edges { + node { + id + name { + value + is_from_profile + source { + id + display_label + __typename + } + } + level { + value + is_from_profile + source { + id + display_label + __typename + } + } + fancy { + value + is_from_profile + source { + id + display_label + __typename + } + } + } + } + } + } + """ + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + crits = result.data["TestCriticality"]["edges"] + assert len(crits) == 3 + crits_by_id = {crit["node"]["id"]: crit["node"] for crit in crits} + + assert crits_by_id[crit_no_profile.id] == { + "name": {"value": "crit_no_profile", "is_from_profile": False, "source": None}, + "level": {"value": None, "is_from_profile": False, "source": None}, + "fancy": {"value": "always", "is_from_profile": False, "source": None}, + "id": crit_no_profile.id, + } + + assert crits_by_id[crit_1_profile.id] == { + "name": {"value": "crit_1_profile", "is_from_profile": False, "source": None}, + "level": { + "value": 8, + "is_from_profile": True, + "source": { + "id": prof_1.id, + "display_label": await prof_1.render_display_label(db=db), + "__typename": "ProfileTestCriticality", + }, + }, + "fancy": {"value": "never", "is_from_profile": False, "source": None}, + "id": crit_1_profile.id, + } + + assert crits_by_id[crit_2_profile.id] == { + "name": {"value": "crit_2_profile", "is_from_profile": False, "source": None}, + "level": {"value": 7, "is_from_profile": False, "source": None}, + "fancy": { + "value": "sometimes", + "is_from_profile": True, + "source": { + "id": prof_2.id, + "display_label": await prof_2.render_display_label(db=db), + "__typename": "ProfileTestCriticality", + }, + }, + "id": crit_2_profile.id, + } diff --git a/backend/tests/unit/graphql/queries/test_status.py b/backend/tests/unit/graphql/queries/test_status.py new file mode 100644 index 0000000000..23971aab46 --- /dev/null +++ b/backend/tests/unit/graphql/queries/test_status.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from infrahub.components import ComponentType +from infrahub.core.registry import registry +from infrahub.services import InfrahubServices +from infrahub.worker import WORKER_IDENTITY +from tests.adapters.cache import MemoryCache +from tests.adapters.message_bus import BusRecorder +from tests.helpers.graphql import graphql_query + +if TYPE_CHECKING: + from infrahub.core.branch import Branch + from infrahub.database import InfrahubDatabase + + +async def test_status_query(db: InfrahubDatabase, default_branch: Branch, register_core_models_schema: None): + cache = MemoryCache() + bus = BusRecorder() + service = InfrahubServices(cache=cache, database=db, component_type=ComponentType.API_SERVER, message_bus=bus) + schema_branch = registry.schema.get_schema_branch(name=default_branch.name) + await service.component.initialize(service=service) + await service.component.refresh_heartbeat() + await service.component.refresh_schema_hash() + response = await graphql_query(query=STATUS_QUERY, db=db, branch=default_branch, service=service) + assert not response.errors + assert response.data + status = response.data["InfrahubStatus"] + assert status["summary"]["schema_hash_synced"] + nodes = status["workers"]["edges"] + assert len(nodes) == 1 + assert nodes[0]["node"]["active"] + assert nodes[0]["node"]["id"] == WORKER_IDENTITY + assert nodes[0]["node"]["schema_hash"] == schema_branch.get_hash() + + +STATUS_QUERY = """ +query InfrahubStatus { + InfrahubStatus { + workers { + edges { + node { + active + id + schema_hash + } + } + } + summary { + schema_hash_synced + } + } +} +""" diff --git a/backend/tests/unit/graphql/test_graphql_partial_match.py b/backend/tests/unit/graphql/test_graphql_partial_match.py index c992be49d3..fd5d2f5410 100644 --- a/backend/tests/unit/graphql/test_graphql_partial_match.py +++ b/backend/tests/unit/graphql/test_graphql_partial_match.py @@ -7,7 +7,7 @@ from tests.helpers.graphql import graphql_query -@pytest.mark.parametrize("filter_value", ["l", "o", "w", "low"]) +@pytest.mark.parametrize("filter_value", ["l", "o", "w", "low", "L", "LOW"]) async def test_query_filter_local_attrs_partial_match( db: InfrahubDatabase, default_branch: Branch, criticality_schema, filter_value ): diff --git a/backend/tests/unit/graphql/test_graphql_query.py b/backend/tests/unit/graphql/test_graphql_query.py index a3c87ed3d6..de5b4d85a2 100644 --- a/backend/tests/unit/graphql/test_graphql_query.py +++ b/backend/tests/unit/graphql/test_graphql_query.py @@ -275,6 +275,8 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch: Branch, data myint=123, mylist=["1", 2, False], myjson={"key1": "bill"}, + ipaddress="10.5.0.1/27", + prefix="10.1.0.0/22", ) await obj1.save(db=db) @@ -293,6 +295,16 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch: Branch, data myint { value } mylist { value } myjson { value } + ipaddress { + value + prefixlen + netmask + } + prefix { + value + prefixlen + netmask + } } } } @@ -319,12 +331,24 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch: Branch, data assert results["obj1"]["myint"]["value"] == obj1.myint.value assert results["obj1"]["mylist"]["value"] == obj1.mylist.value assert results["obj1"]["myjson"]["value"] == obj1.myjson.value + assert results["obj1"]["ipaddress"]["value"] == obj1.ipaddress.value + assert results["obj1"]["ipaddress"]["netmask"] == obj1.ipaddress.netmask + assert results["obj1"]["ipaddress"]["prefixlen"] == obj1.ipaddress.prefixlen + assert results["obj1"]["prefix"]["value"] == obj1.prefix.value + assert results["obj1"]["prefix"]["netmask"] == obj1.prefix.netmask + assert results["obj1"]["prefix"]["prefixlen"] == obj1.prefix.prefixlen assert results["obj2"]["mystring"]["value"] == obj2.mystring.value assert results["obj2"]["mybool"]["value"] == obj2.mybool.value assert results["obj2"]["myint"]["value"] == obj2.myint.value assert results["obj2"]["mylist"]["value"] == obj2.mylist.value assert results["obj2"]["myjson"]["value"] == obj2.myjson.value + assert results["obj2"]["ipaddress"]["value"] == obj2.ipaddress.value + assert results["obj2"]["ipaddress"]["netmask"] is None + assert results["obj2"]["ipaddress"]["prefixlen"] is None + assert results["obj2"]["prefix"]["value"] == obj2.prefix.value + assert results["obj2"]["prefix"]["netmask"] is None + assert results["obj2"]["prefix"]["prefixlen"] is None async def test_nested_query(db: InfrahubDatabase, default_branch: Branch, car_person_schema): @@ -1974,9 +1998,7 @@ async def test_query_attribute_node_property_source( firstname { value source { - name { - value - } + id } } } @@ -1997,10 +2019,7 @@ async def test_query_attribute_node_property_source( assert result1.errors is None assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["source"] - assert ( - result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["source"]["name"]["value"] - == first_account.name.value - ) + assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["source"]["id"] == first_account.id assert gql_params.context.related_node_ids == {p1.id, first_account.id} @@ -2020,10 +2039,10 @@ async def test_query_attribute_node_property_owner( firstname { value owner { - name { - value - } + id + display_label } + is_from_profile } } } @@ -2043,10 +2062,11 @@ async def test_query_attribute_node_property_owner( assert result1.errors is None assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["owner"] - assert ( - result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["owner"]["name"]["value"] - == first_account.name.value - ) + assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["owner"]["id"] == first_account.id + assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["owner"][ + "display_label" + ] == await first_account.render_display_label(db=db) + assert result1.data["TestPerson"]["edges"][0]["node"]["firstname"]["is_from_profile"] is False assert gql_params.context.related_node_ids == {p1.id, first_account.id} @@ -2665,7 +2685,7 @@ async def test_hierarchical_location_parent_filter( nodes = [node["node"]["name"]["value"] for node in result.data["LocationRack"]["edges"]] assert result.errors is None - assert nodes == ["paris-r1", "paris-r2", "london-r1", "london-r2"] + assert nodes == ["london-r1", "london-r2", "paris-r1", "paris-r2"] async def test_hierarchical_location_ancestors( diff --git a/backend/tests/unit/graphql/test_graphql_utils.py b/backend/tests/unit/graphql/test_graphql_utils.py index 86b4c0bd96..11ad5f7b4d 100644 --- a/backend/tests/unit/graphql/test_graphql_utils.py +++ b/backend/tests/unit/graphql/test_graphql_utils.py @@ -34,6 +34,7 @@ async def test_schema_models_generics( fields = await extract_fields(document.definitions[0].selection_set) expected_response = { + InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP, InfrahubKind.GENERICGROUP, InfrahubKind.STANDARDGROUP, diff --git a/backend/tests/unit/graphql/test_manager.py b/backend/tests/unit/graphql/test_manager.py index 82d4500e6a..1ab60a655d 100644 --- a/backend/tests/unit/graphql/test_manager.py +++ b/backend/tests/unit/graphql/test_manager.py @@ -123,6 +123,7 @@ async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branc "name", "nbr_seats", "owner", + "profiles", "subscriber_of_groups", "transmission", ] @@ -141,6 +142,7 @@ async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branc "id", "member_of_groups", "name", + "profiles", "subscriber_of_groups", ] assert sorted(list(edged_person._meta.fields.keys())) == ["node"] @@ -212,6 +214,25 @@ async def test_generate_filters(db: InfrahubDatabase, default_branch: Branch, da "name__source__id", "name__value", "name__values", + "profiles__height__is_protected", + "profiles__height__is_visible", + "profiles__height__owner__id", + "profiles__height__source__id", + "profiles__height__value", + "profiles__height__values", + "profiles__ids", + "profiles__profile_name__is_protected", + "profiles__profile_name__is_visible", + "profiles__profile_name__owner__id", + "profiles__profile_name__source__id", + "profiles__profile_name__value", + "profiles__profile_name__values", + "profiles__profile_priority__is_protected", + "profiles__profile_priority__is_visible", + "profiles__profile_priority__owner__id", + "profiles__profile_priority__source__id", + "profiles__profile_priority__value", + "profiles__profile_priority__values", "subscriber_of_groups__description__value", "subscriber_of_groups__description__values", "subscriber_of_groups__ids", diff --git a/backend/tests/unit/graphql/test_mutation_artifact_definition.py b/backend/tests/unit/graphql/test_mutation_artifact_definition.py index c8d94af856..25b07d8779 100644 --- a/backend/tests/unit/graphql/test_mutation_artifact_definition.py +++ b/backend/tests/unit/graphql/test_mutation_artifact_definition.py @@ -10,7 +10,8 @@ from infrahub.database import InfrahubDatabase from infrahub.graphql import prepare_graphql_params from infrahub.message_bus import messages -from infrahub.services import services +from infrahub.services import InfrahubServices +from tests.adapters.message_bus import BusRecorder @pytest.fixture @@ -54,7 +55,7 @@ async def definition1( transformation=transformation1, content_type="application/json", artifact_name="myartifact", - parameters='{"name": "name__value"}', + parameters={"value": {"name": "name__value"}}, ) await ad1.save(db=db) return ad1 @@ -90,7 +91,10 @@ async def test_create_artifact_definition( group1.id, transformation1.id, ) - gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=services.service) + recorder = BusRecorder() + service = InfrahubServices(message_bus=recorder) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=service) result = await graphql( schema=gql_params.schema, source=query, @@ -109,7 +113,7 @@ async def test_create_artifact_definition( assert ( messages.RequestArtifactDefinitionGenerate(artifact_definition=ad_id, branch=branch.name, limit=[]) - in services.service.message_bus.messages + in service.message_bus.messages ) @@ -135,7 +139,10 @@ async def test_update_artifact_definition( } """ % (definition1.id) - gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=services.service) + recorder = BusRecorder() + service = InfrahubServices(message_bus=recorder) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch, service=service) result = await graphql( schema=gql_params.schema, source=query, @@ -155,5 +162,5 @@ async def test_update_artifact_definition( assert ( messages.RequestArtifactDefinitionGenerate(artifact_definition=definition1.id, branch=branch.name, limit=[]) - in services.service.message_bus.messages + in service.message_bus.messages ) diff --git a/backend/tests/unit/graphql/test_mutation_create.py b/backend/tests/unit/graphql/test_mutation_create.py index dc3da7b488..097e6cba25 100644 --- a/backend/tests/unit/graphql/test_mutation_create.py +++ b/backend/tests/unit/graphql/test_mutation_create.py @@ -33,7 +33,13 @@ async def test_create_simple_object(db: InfrahubDatabase, default_branch, car_pe assert result.errors is None assert result.data["TestPersonCreate"]["ok"] is True - assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # lenght of an UUID + + person_id = result.data["TestPersonCreate"]["object"]["id"] + assert len(person_id) == 36 # length of an UUID + + person = await NodeManager.get_one(db=db, id=person_id) + assert person.name.is_default is False + assert person.height.is_default is False async def test_create_simple_object_with_ok_return(db: InfrahubDatabase, default_branch, car_person_schema): @@ -103,10 +109,7 @@ async def test_create_simple_object_with_enum( car_id = result.data["TestCarCreate"]["object"]["id"] database_car = await NodeManager.get_one(db=db, id=car_id) - if graphql_enums_on: - assert database_car.transmission.value.value == "manual" - else: - assert database_car.transmission.value == "manual" + assert database_car.transmission.value.value == "manual" async def test_create_enum_when_enums_off_fails( @@ -341,6 +344,8 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch, all_attribut mybool: { value: false } myint: { value: 123 } mylist: { value: [ "1", 2, false ] } + ipaddress: { value: "10.3.4.254/24" } + prefix: { value: "10.3.4.0/24" } } ){ ok @@ -361,15 +366,86 @@ async def test_all_attributes(db: InfrahubDatabase, default_branch, all_attribut assert result.errors is None assert result.data["TestAllAttributeTypesCreate"]["ok"] is True - assert len(result.data["TestAllAttributeTypesCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["TestAllAttributeTypesCreate"]["object"]["id"]) == 36 # length of an UUID objs = await NodeManager.query(db=db, schema="TestAllAttributeTypes") obj1 = objs[0] assert obj1.mystring.value == "abc" + assert obj1.mystring.is_default is False + assert obj1.mybool.value is False + assert obj1.mybool.is_default is False + assert obj1.myint.value == 123 + assert obj1.myint.is_default is False + assert obj1.mylist.value == ["1", 2, False] + assert obj1.mylist.is_default is False + assert obj1.ipaddress.value == "10.3.4.254/24" + assert obj1.ipaddress.is_default is False + assert obj1.prefix.value == "10.3.4.0/24" + assert obj1.prefix.is_default is False + + +async def test_all_attributes_default_value(db: InfrahubDatabase, default_branch, all_attribute_default_types_schema): + query = """ + mutation { + TestAllAttributeTypesCreate( + data: { + name: { value: "obj1" } + mystring: { value: "abc" } + mybool: { value: false } + myint: { value: 123 } + mylist: { value: [ "1", 2, false ] } + } + ){ + ok + object { + id + } + } + } + """ + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["TestAllAttributeTypesCreate"]["ok"] is True + obj_id = result.data["TestAllAttributeTypesCreate"]["object"]["id"] + assert len(obj_id) == 36 # length of an UUID + + obj1 = await NodeManager.get_one(db=db, id=obj_id) + + assert obj1.mystring.value == "abc" + assert obj1.mystring.is_default is False assert obj1.mybool.value is False + assert obj1.mybool.is_default is False assert obj1.myint.value == 123 + assert obj1.myint.is_default is False assert obj1.mylist.value == ["1", 2, False] + assert obj1.mylist.is_default is False + + assert obj1.mystring_default.value == "a string" + assert obj1.mystring_default.is_default is True + assert obj1.mybool_default.value is False + assert obj1.mybool_default.is_default is True + assert obj1.myint_default.value == 10 + assert obj1.myint_default.is_default is True + assert obj1.mylist_default.value == [10, 11, 12] + assert obj1.mylist_default.is_default is True + + assert obj1.mystring_none.value is None + assert obj1.mystring_none.is_default is True + assert obj1.mybool_none.value is None + assert obj1.mybool_none.is_default is True + assert obj1.myint_none.value is None + assert obj1.myint_none.is_default is True + assert obj1.mylist_none.value is None + assert obj1.mylist_none.is_default is True async def test_create_object_with_flag_property(db: InfrahubDatabase, default_branch, car_person_schema): @@ -399,7 +475,7 @@ async def test_create_object_with_flag_property(db: InfrahubDatabase, default_br assert result.errors is None assert result.data["TestPersonCreate"]["ok"] is True - assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # length of an UUID # Query the newly created Node to ensure everything is as expected query = """ @@ -467,7 +543,7 @@ async def test_create_object_with_node_property( assert result.errors is None assert result.data["TestPersonCreate"]["ok"] is True - assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["TestPersonCreate"]["object"]["id"]) == 36 # length of an UUID # Query the newly created Node to ensure everything is as expected query = """ @@ -479,17 +555,15 @@ async def test_create_object_with_node_property( name { value source { - name { - value - } + id + display_label } } height { id owner { - name { - value - } + id + display_label } } } @@ -507,8 +581,14 @@ async def test_create_object_with_node_property( ) assert result1.errors is None - assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["source"]["name"]["value"] == "First Account" - assert result1.data["TestPerson"]["edges"][0]["node"]["height"]["owner"]["name"]["value"] == "Second Account" + assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["source"]["id"] == first_account.id + assert result1.data["TestPerson"]["edges"][0]["node"]["name"]["source"][ + "display_label" + ] == await first_account.render_display_label(db=db) + assert result1.data["TestPerson"]["edges"][0]["node"]["height"]["owner"]["id"] == second_account.id + assert result1.data["TestPerson"]["edges"][0]["node"]["height"]["owner"][ + "display_label" + ] == await second_account.render_display_label(db=db) async def test_create_object_with_single_relationship(db: InfrahubDatabase, default_branch, car_person_schema): @@ -544,7 +624,7 @@ async def test_create_object_with_single_relationship(db: InfrahubDatabase, defa assert result.errors is None assert result.data["TestCarCreate"]["ok"] is True - assert len(result.data["TestCarCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["TestCarCreate"]["object"]["id"]) == 36 # length of an UUID async def test_create_object_with_single_relationship_flag_property( @@ -671,7 +751,7 @@ async def test_create_object_with_multiple_relationships(db: InfrahubDatabase, d assert result.errors is None assert result.data["GardenFruitCreate"]["ok"] is True - assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID fruit = await NodeManager.get_one(db=db, id=result.data["GardenFruitCreate"]["object"]["id"]) assert len(await fruit.tags.get(db=db)) == 3 @@ -725,7 +805,7 @@ async def test_create_object_with_multiple_relationships_with_node_property( assert result.errors is None assert result.data["GardenFruitCreate"]["ok"] is True - assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID fruit = await NodeManager.get_one( db=db, id=result.data["GardenFruitCreate"]["object"]["id"], include_owner=True, include_source=True @@ -795,7 +875,7 @@ async def test_create_object_with_multiple_relationships_flag_property( assert result.errors is None assert result.data["GardenFruitCreate"]["ok"] is True - assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # lenght of an UUID + assert len(result.data["GardenFruitCreate"]["object"]["id"]) == 36 # length of an UUID fruit = await NodeManager.get_one(db=db, id=result.data["GardenFruitCreate"]["object"]["id"]) rels = await fruit.tags.get(db=db) diff --git a/backend/tests/unit/graphql/test_mutation_delete.py b/backend/tests/unit/graphql/test_mutation_delete.py index 2fa4f2c21d..361b6934a7 100644 --- a/backend/tests/unit/graphql/test_mutation_delete.py +++ b/backend/tests/unit/graphql/test_mutation_delete.py @@ -40,3 +40,37 @@ async def test_delete_object(db: InfrahubDatabase, default_branch, car_person_sc assert result.data["TestPersonDelete"]["ok"] is True assert not await NodeManager.get_one(db=db, id=obj1.id) + + +async def test_delete_prevented( + db: InfrahubDatabase, default_branch, car_person_schema, car_camry_main, person_jane_main +): + query = ( + """ + mutation { + TestPersonDelete(data: {id: "%s"}) { + ok + } + } + """ + % person_jane_main.id + ) + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors + assert len(result.errors) == 1 + assert f"Cannot delete TestPerson '{person_jane_main.id}'." in result.errors[0].message + assert ( + f"It is linked to mandatory relationship owner on node TestCar '{car_camry_main.id}'" + in result.errors[0].message + ) + assert result.data["TestPersonDelete"] is None + + assert await NodeManager.get_one(db=db, id=person_jane_main.id) is not None diff --git a/backend/tests/unit/graphql/test_mutation_graphqlquery.py b/backend/tests/unit/graphql/test_mutation_graphqlquery.py index 061cfac11b..f45c4e5900 100644 --- a/backend/tests/unit/graphql/test_mutation_graphqlquery.py +++ b/backend/tests/unit/graphql/test_mutation_graphqlquery.py @@ -56,7 +56,7 @@ async def test_create_query_no_vars(db: InfrahubDatabase, default_branch, regist assert result.errors is None assert result.data["CoreGraphQLQueryCreate"]["ok"] is True query_id = result.data["CoreGraphQLQueryCreate"]["object"]["id"] - assert len(query_id) == 36 # lenght of an UUID + assert len(query_id) == 36 # length of an UUID query1 = await registry.manager.get_one(id=query_id, db=db) assert query1.depth.value == 6 @@ -122,7 +122,7 @@ async def test_create_query_with_vars(db: InfrahubDatabase, default_branch, regi assert result.errors is None assert result.data["CoreGraphQLQueryCreate"]["ok"] is True query_id = result.data["CoreGraphQLQueryCreate"]["object"]["id"] - assert len(query_id) == 36 # lenght of an UUID + assert len(query_id) == 36 # length of an UUID query2 = await registry.manager.get_one(id=query_id, db=db) assert query2.depth.value == 8 diff --git a/backend/tests/unit/graphql/test_mutation_relationship.py b/backend/tests/unit/graphql/test_mutation_relationship.py index fed7c3e5c2..132e0d5549 100644 --- a/backend/tests/unit/graphql/test_mutation_relationship.py +++ b/backend/tests/unit/graphql/test_mutation_relationship.py @@ -622,3 +622,34 @@ async def test_relationship_groups_add_remove(db: InfrahubDatabase, default_bran group2 = await NodeManager.get_one(db=db, id=g2.id, branch=default_branch) members = await group2.members.get(db=db) assert len(members) == 1 + + +async def test_relationship_add_busy(db: InfrahubDatabase, default_branch: Branch, car_person_generics_data): + c1 = car_person_generics_data["c1"] + p2 = car_person_generics_data["p2"] + + query = """ + mutation { + RelationshipAdd(data: { + id: "%s", + name: "cars", + nodes: [{id: "%s"}], + }) { + ok + } + } + """ % ( + p2.id, + c1.id, + ) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + assert result.errors + assert "'TestElectricCar' is already related to another peer on 'owner'" in str(result.errors[0]) diff --git a/backend/tests/unit/graphql/test_mutation_update.py b/backend/tests/unit/graphql/test_mutation_update.py index 82d19e44de..44ff4c5aa4 100644 --- a/backend/tests/unit/graphql/test_mutation_update.py +++ b/backend/tests/unit/graphql/test_mutation_update.py @@ -141,10 +141,7 @@ async def test_update_simple_object_with_enum( assert result.data["TestCarUpdate"]["object"]["transmission"]["value"] == response_value updated_car = await NodeManager.get_one(db=db, id=car_id) - if graphql_enums_on: - assert updated_car.transmission.value.value == "flintstone-feet" - else: - assert updated_car.transmission.value == "flintstone-feet" + assert updated_car.transmission.value.value == "flintstone-feet" async def test_update_check_unique(db: InfrahubDatabase, person_john_main: Node, person_jim_main: Node, branch: Branch): @@ -216,6 +213,71 @@ async def test_update_object_with_flag_property(db: InfrahubDatabase, person_joh assert obj1.height.is_visible is False +async def test_update_all_attributes(db: InfrahubDatabase, default_branch, all_attribute_types_schema): + obj1 = await Node.init(db=db, schema="TestAllAttributeTypes") + await obj1.new( + db=db, + name="obj1", + mystring="abc", + mybool=False, + myint=123, + mylist=["1", 2, False], + myjson={"key1": "bill"}, + ipaddress="10.5.0.1/27", + prefix="10.1.0.0/22", + ) + await obj1.save(db=db) + + query = ( + """ + mutation { + TestAllAttributeTypesUpdate( + data: { + id: "%s" + name: { value: "obj1" } + mystring: { value: "def" } + mybool: { value: true } + myint: { value: 456 } + mylist: { value: [ "2", "4", "6" ] } + ipaddress: { value: "10.3.4.254/24" } + prefix: { value: "10.3.4.0/24" } + } + ){ + ok + } + } + """ + % obj1.id + ) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=default_branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={"id": obj1.id}, + ) + + assert result.errors is None + assert result.data["TestAllAttributeTypesUpdate"]["ok"] is True + + objs = await NodeManager.query(db=db, schema="TestAllAttributeTypes") + obj = objs[0] + + assert obj.mystring.value == "def" + assert obj.mybool.value is True + assert obj.mybool.is_default is False + assert obj.myint.value == 456 + assert obj.myint.is_default is False + assert obj.mylist.value == ["2", "4", "6"] + assert obj.mylist.is_default is False + assert obj.ipaddress.value == "10.3.4.254/24" + assert obj.ipaddress.is_default is False + assert obj.prefix.value == "10.3.4.0/24" + assert obj.prefix.is_default is False + + @pytest.fixture async def person_john_with_source_main( db: InfrahubDatabase, default_branch: Branch, car_person_schema, first_account @@ -363,6 +425,118 @@ async def test_update_single_relationship( assert car_peer.id == person_jim_main.id +async def test_update_default_value( + db: InfrahubDatabase, person_john_main: Node, person_jim_main: Node, car_accord_main: Node, branch: Branch +): + assert car_accord_main.color.is_default is True + + query = """ + mutation { + TestCarUpdate(data: {id: "%s", color: { value: "#333333" }}) { + ok + object { + id + color { + value + is_default + } + } + } + } + """ % (car_accord_main.id) + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["TestCarUpdate"]["ok"] is True + assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is False + + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + assert car.color.value == "#333333" + assert car.color.is_default is False + + # Set the is_default flag with a non default value, flag should be ignored + query = """ + mutation { + TestCarUpdate(data: {id: "%s", color: { value: "#222222", is_default: true }}) { + ok + object { + id + color { + value + is_default + } + } + } + } + """ % (car_accord_main.id) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["TestCarUpdate"]["ok"] is True + assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is False + + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + assert car.color.value == "#222222" + assert car.color.is_default is False + + # Set the is_default flag to re-initialize the value + query = """ + mutation { + TestCarUpdate(data: {id: "%s", color: { is_default: true }, transmission: { is_default: true } }) { + ok + object { + id + color { + value + is_default + } + transmission { + value + is_default + } + } + } + } + """ % (car_accord_main.id) + + gql_params = prepare_graphql_params(db=db, include_subscription=False, branch=branch) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data["TestCarUpdate"]["ok"] is True + assert result.data["TestCarUpdate"]["object"]["color"]["is_default"] is True + assert result.data["TestCarUpdate"]["object"]["transmission"]["value"] is None + assert result.data["TestCarUpdate"]["object"]["transmission"]["is_default"] is True + + car = await NodeManager.get_one(db=db, id=car_accord_main.id, branch=branch) + assert car.color.value == "#444444" + assert car.color.is_default is True + + assert car.transmission.value is None + assert car.transmission.is_default is True + + async def test_update_new_single_relationship_flag_property( db: InfrahubDatabase, person_john_main: Node, person_jim_main: Node, car_accord_main: Node, branch: Branch ): diff --git a/backend/tests/unit/graphql/test_mutation_upsert.py b/backend/tests/unit/graphql/test_mutation_upsert.py index 76355c08d1..adf6cf1d0d 100644 --- a/backend/tests/unit/graphql/test_mutation_upsert.py +++ b/backend/tests/unit/graphql/test_mutation_upsert.py @@ -147,8 +147,8 @@ async def test_cannot_upsert_new_object_without_required_fields(db: InfrahubData variable_values={}, ) - expected_error = "Field 'TestPersonCreateInput.name' of required type 'TextAttributeInput!' was not provided." - assert any([expected_error in error.message for error in result.errors]) + expected_error = "Field 'TestPersonUpsertInput.name' of required type 'TextAttributeUpdate!' was not provided." + assert any(expected_error in error.message for error in result.errors) assert await NodeManager.get_one(db=db, id=fresh_id, branch=branch) is None @@ -176,7 +176,7 @@ async def test_id_for_other_schema_raises_error( ) expected_error = f"Node with id {car_accord_main.id} exists, but it is a TestCar, not TestPerson" - assert any([expected_error in error.message for error in result.errors]) + assert any(expected_error in error.message for error in result.errors) async def test_update_by_id_to_nonunique_value_raises_error( @@ -202,4 +202,4 @@ async def test_update_by_id_to_nonunique_value_raises_error( ) expected_error = "An object already exist with this value: name: Jim at name" - assert any([expected_error in error.message for error in result.errors]) + assert any(expected_error in error.message for error in result.errors) diff --git a/backend/tests/unit/graphql/test_parser.py b/backend/tests/unit/graphql/test_parser.py new file mode 100644 index 0000000000..fe83a359b9 --- /dev/null +++ b/backend/tests/unit/graphql/test_parser.py @@ -0,0 +1,178 @@ +from graphql import graphql + +from infrahub.core.branch import Branch +from infrahub.core.node import Node +from infrahub.database import InfrahubDatabase +from infrahub.graphql import prepare_graphql_params + + +async def test_simple_directive(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + obj1 = await Node.init(db=db, schema=criticality_schema) + await obj1.new(db=db, name="low", level=4) + await obj1.save(db=db) + obj2 = await Node.init(db=db, schema=criticality_schema) + await obj2.new(db=db, name="medium", level=3, description="My desc", color="#333333") + await obj2.save(db=db) + + query = """ + query { + TestCriticality { + count + edges { + node @expand { + id + level { + __typename + } + label { + value + } + } + } + } + } + """ + + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestCriticality"]["count"] == 2 + assert len(result.data["TestCriticality"]["edges"]) == 2 + assert gql_params.context.related_node_ids == {obj1.id, obj2.id} + assert { + "node": { + "id": obj1.id, + "level": {"__typename": "NumberAttribute", "value": 4, "is_default": False, "is_from_profile": False}, + "label": {"value": "Low", "is_default": False, "is_from_profile": False}, + "__typename": "TestCriticality", + "name": {"value": "low", "is_default": False, "is_from_profile": False}, + "color": {"value": "#444444", "is_default": True, "is_from_profile": False}, + "mylist": {"value": ["one", "two"], "is_default": True, "is_from_profile": False}, + "is_true": {"value": True, "is_default": True, "is_from_profile": False}, + "is_false": {"value": False, "is_default": True, "is_from_profile": False}, + "json_no_default": {"value": None, "is_default": True, "is_from_profile": False}, + "json_default": {"value": {"value": "bob"}, "is_default": True, "is_from_profile": False}, + "description": {"value": None, "is_default": True, "is_from_profile": False}, + "status": {"value": None, "is_default": True, "is_from_profile": False}, + } + } in result.data["TestCriticality"]["edges"] + + +async def test_directive_exclude(db: InfrahubDatabase, default_branch: Branch, criticality_schema): + obj1 = await Node.init(db=db, schema=criticality_schema) + await obj1.new(db=db, name="low", level=4) + await obj1.save(db=db) + obj2 = await Node.init(db=db, schema=criticality_schema) + await obj2.new(db=db, name="medium", level=3, description="My desc", color="#333333") + await obj2.save(db=db) + + query = """ + query { + TestCriticality { + count + edges { + node @expand(exclude: ["color", "mylist"]) { + id + } + } + } + } + """ + + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + + assert result.errors is None + assert result.data + assert result.data["TestCriticality"]["count"] == 2 + assert len(result.data["TestCriticality"]["edges"]) == 2 + assert gql_params.context.related_node_ids == {obj1.id, obj2.id} + assert { + "node": { + "id": obj1.id, + "__typename": "TestCriticality", + "name": {"value": "low", "is_default": False, "is_from_profile": False}, + "label": {"value": "Low", "is_default": False, "is_from_profile": False}, + "level": {"value": 4, "is_default": False, "is_from_profile": False}, + "is_true": {"value": True, "is_default": True, "is_from_profile": False}, + "is_false": {"value": False, "is_default": True, "is_from_profile": False}, + "json_no_default": {"value": None, "is_default": True, "is_from_profile": False}, + "json_default": {"value": {"value": "bob"}, "is_default": True, "is_from_profile": False}, + "description": {"value": None, "is_default": True, "is_from_profile": False}, + "status": {"value": None, "is_default": True, "is_from_profile": False}, + } + } in result.data["TestCriticality"]["edges"] + + +async def test_directive_merge_fields( + db: InfrahubDatabase, default_branch: Branch, register_core_models_schema, person_tag_schema, first_account +): + """This test validates that the @expand directive doesn't override the source field under username.""" + p1 = await Node.init(db=db, schema="TestPerson") + await p1.new(db=db, firstname="John", lastname="Doe", _source=first_account) + await p1.save(db=db) + + query = """ + query { + TestPerson { + edges { + node @expand { + id + firstname { + source { + id + } + } + } + } + } + } + """ + gql_params = prepare_graphql_params( + db=db, include_mutation=False, include_subscription=False, branch=default_branch + ) + result = await graphql( + schema=gql_params.schema, + source=query, + context_value=gql_params.context, + root_value=None, + variable_values={}, + ) + assert result.errors is None + assert result.data + assert len(result.data["TestPerson"]["edges"]) == 1 + assert result.data["TestPerson"]["edges"][0] == { + "node": { + "id": p1.id, + "firstname": { + "source": {"id": first_account.id}, + "value": "John", + "is_default": False, + "is_from_profile": False, + }, + "__typename": "TestPerson", + "lastname": { + "value": "Doe", + "is_default": False, + "is_from_profile": False, + }, + } + } diff --git a/backend/tests/unit/graphql/test_query_analyzer.py b/backend/tests/unit/graphql/test_query_analyzer.py index e03bed0aa5..9adbbe6ca6 100644 --- a/backend/tests/unit/graphql/test_query_analyzer.py +++ b/backend/tests/unit/graphql/test_query_analyzer.py @@ -94,6 +94,7 @@ async def test_get_models_in_use( gqa = InfrahubGraphQLQueryAnalyzer(query=query_02, schema=gql_params.schema, branch=default_branch) assert await gqa.get_models_in_use(types=gql_params.context.types) == { + InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP, InfrahubKind.GENERICGROUP, InfrahubKind.STANDARDGROUP, diff --git a/backend/tests/unit/message_bus/operations/requests/test_graphql_query_group.py b/backend/tests/unit/message_bus/operations/requests/test_graphql_query_group.py index 21f21166c4..52960a4349 100644 --- a/backend/tests/unit/message_bus/operations/requests/test_graphql_query_group.py +++ b/backend/tests/unit/message_bus/operations/requests/test_graphql_query_group.py @@ -40,8 +40,10 @@ async def test_graphql_group_update(db: InfrahubDatabase, httpx_mock: HTTPXMock, subscribers={r1}, params={"name": "John"}, ) - config = Config(address="http://mock") - client = InfrahubClient(config=config, insert_tracker=True) + config = Config(address="http://mock", insert_tracker=True) + client = InfrahubClient( + config=config, + ) service = InfrahubServices(client=client) response1 = { diff --git a/backend/tests/unit/message_bus/operations/requests/test_proposed_change.py b/backend/tests/unit/message_bus/operations/requests/test_proposed_change.py index 77440c7c61..215020334d 100644 --- a/backend/tests/unit/message_bus/operations/requests/test_proposed_change.py +++ b/backend/tests/unit/message_bus/operations/requests/test_proposed_change.py @@ -21,8 +21,8 @@ @pytest.fixture def service_all(db: InfrahubDatabase, helper): - config = Config(address="http://mock") - client = InfrahubClient(config=config, insert_tracker=True) + config = Config(address="http://mock", insert_tracker=True) + client = InfrahubClient(config=config) bus_simulator = helper.get_message_bus_simulator() service = InfrahubServices(message_bus=bus_simulator, client=client, database=db) bus_simulator.service = service @@ -126,7 +126,7 @@ async def test_get_proposed_change_schema_integrity_constraints( constraints = await proposed_change._get_proposed_change_schema_integrity_constraints( message=schema_integrity_01, schema=schema ) - assert len(constraints) == 15 + assert len(constraints) == 17 dumped_constraints = [c.model_dump() for c in constraints] assert { "constraint_name": "relationship.optional.update", @@ -291,7 +291,7 @@ async def test_schema_integrity( checks = await registry.manager.query(db=db, schema=InfrahubKind.SCHEMACHECK) assert len(checks) == 1 check = checks[0] - assert check.conclusion.value == "failure" + assert check.conclusion.value.value == "failure" assert check.conflicts.value == [ { "branch": "placeholder", diff --git a/backend/tests/unit/test_cli.py b/backend/tests/unit/test_cli.py index 56deeada18..45517c5adb 100644 --- a/backend/tests/unit/test_cli.py +++ b/backend/tests/unit/test_cli.py @@ -27,9 +27,3 @@ def test_server_app(): result = runner.invoke(app, ["server", "--help"]) assert result.exit_code == 0 assert "[OPTIONS] COMMAND [ARGS]" in result.stdout - - -def test_generate_schema_app(): - result = runner.invoke(app, ["generate-schema", "--help"]) - assert result.exit_code == 0 - assert "[OPTIONS] COMMAND [ARGS]" in result.stdout diff --git a/development/Dockerfile b/development/Dockerfile index 4ef8783e43..ee72b64b7f 100644 --- a/development/Dockerfile +++ b/development/Dockerfile @@ -2,7 +2,7 @@ # STAGE : Base Python Image # **************************************************************** ARG PYTHON_VER=3.12 -FROM docker.io/python:${PYTHON_VER} AS base +FROM docker.io/python:${PYTHON_VER}-slim AS base ENV PYTHONUNBUFFERED 1 @@ -17,6 +17,8 @@ RUN apt-get update && \ apt-get upgrade -y && \ apt-get install --no-install-recommends -y curl git pkg-config build-essential ca-certificates && \ curl -sSL https://install.python-poetry.org | python3 - && \ + apt-get autoremove -y && \ + apt-get clean all && \ rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \ pip --no-cache-dir install --no-compile --upgrade pip wheel @@ -90,17 +92,11 @@ COPY --from=frontend /docs/build/ /opt/infrahub/docs/build # Copy in only pyproject.toml/poetry.lock to help with caching this layer if no updates to dependencies # -------------------------------------------- COPY poetry.lock pyproject.toml /source/ -RUN poetry install --no-interaction --no-ansi --no-root --no-directory +RUN poetry install --no-interaction --no-ansi --no-root --no-directory && \ + rm -rf /root/.cache # -------------------------------------------- # Copy in the rest of the source code and install the project # -------------------------------------------- COPY . ./ RUN poetry install --no-interaction --no-ansi - -# -------------------------------------------- -# Purge & Cleanup -# -------------------------------------------- -RUN apt-get autoremove -y && \ - apt-get clean all && \ - rm -rf /var/lib/apt/lists/* diff --git a/development/docker-compose-database-memgraph.yml b/development/docker-compose-database-memgraph.yml index 91d9222194..e6317c94e3 100644 --- a/development/docker-compose-database-memgraph.yml +++ b/development/docker-compose-database-memgraph.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: image: "${DATABASE_DOCKER_IMAGE:-memgraph/memgraph-platform:latest}" @@ -21,6 +20,8 @@ services: start_period: 3s labels: infrahub_role: "database" + com.github.run_id: "${GITHUB_RUN_ID:-unknown}" + com.github.job: "${JOB_NAME:-unknown}" volumes: diff --git a/development/docker-compose-database-neo4j.yml b/development/docker-compose-database-neo4j.yml index 29231a1e43..6cd0ba8729 100644 --- a/development/docker-compose-database-neo4j.yml +++ b/development/docker-compose-database-neo4j.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: image: "${DATABASE_DOCKER_IMAGE:-neo4j:enterprise}" @@ -24,6 +23,8 @@ services: start_period: 3s labels: infrahub_role: "database" + com.github.run_id: "${GITHUB_RUN_ID:-unknown}" + com.github.job: "${JOB_NAME:-unknown}" ports: - "${INFRAHUB_DB_BACKUP_PORT:-6362}:6362" diff --git a/development/docker-compose-deps.yml b/development/docker-compose-deps.yml index 18f4d6243f..0d53579d58 100644 --- a/development/docker-compose-deps.yml +++ b/development/docker-compose-deps.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: message-queue: image: "${MESSAGE_QUEUE_DOCKER_IMAGE:-rabbitmq:latest}" diff --git a/development/docker-compose-test-cache.yml b/development/docker-compose-test-cache.yml index 1731b048c1..8306345976 100644 --- a/development/docker-compose-test-cache.yml +++ b/development/docker-compose-test-cache.yml @@ -1,7 +1,5 @@ --- # yamllint disable rule:line-length - -version: "3.4" services: cache: deploy: diff --git a/development/docker-compose-test-database-memgraph.yml b/development/docker-compose-test-database-memgraph.yml index 4233303330..ba36a0c1f2 100644 --- a/development/docker-compose-test-database-memgraph.yml +++ b/development/docker-compose-test-database-memgraph.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: deploy: diff --git a/development/docker-compose-test-database-neo4j.yml b/development/docker-compose-test-database-neo4j.yml index d67b78ab06..b4dc16c057 100644 --- a/development/docker-compose-test-database-neo4j.yml +++ b/development/docker-compose-test-database-neo4j.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: deploy: diff --git a/development/docker-compose-test-message-queue.yml b/development/docker-compose-test-message-queue.yml index fb88620048..5465a9afbd 100644 --- a/development/docker-compose-test-message-queue.yml +++ b/development/docker-compose-test-message-queue.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: message-queue: deploy: diff --git a/development/docker-compose-test-metrics.yml b/development/docker-compose-test-metrics.yml new file mode 100644 index 0000000000..14132db734 --- /dev/null +++ b/development/docker-compose-test-metrics.yml @@ -0,0 +1,27 @@ +--- +# yamllint disable rule:line-length +services: + # memgraph metrics requires enterprise license + # memgraph-exporter: + # build: + # context: ./memgraph_exporter + # image: memgraph-exporter + # hostname: "memgraph_exp" + vmagent: + image: victoriametrics/vmagent:v1.99.0 + volumes: + - vmagentdata:/vmagentdata + - ./vmagent.yml:/etc/prometheus/prometheus.yml:ro + command: + - "--promscrape.config=/etc/prometheus/prometheus.yml" + - "--remoteWrite.sendTimeout=0s" # https://github.com/golang/go/issues/59017 + - "--remoteWrite.url=${METRICS_ENDPOINT:-http://127.0.0.1:8424}" + - "--remoteWrite.label=job=${JOB_NAME}" + - "--remoteWrite.label=run_id=${GITHUB_RUN_ID}" + - "--remoteWrite.label=run_number=${GITHUB_RUN_NUMBER}" + - "--remoteWrite.label=pr_number=${GITHUB_PR_NUMBER}" + ports: + - "${VMAGENT_PORT:-8429}:8429" + +volumes: + vmagentdata: diff --git a/development/docker-compose-test-scale.yml b/development/docker-compose-test-scale.yml index 1763c7e682..9997b688b3 100644 --- a/development/docker-compose-test-scale.yml +++ b/development/docker-compose-test-scale.yml @@ -1,7 +1,85 @@ --- # yamllint disable rule:line-length +x-infrahub-config: &infrahub_config + AWS_ACCESS_KEY_ID: + AWS_DEFAULT_ACL: + AWS_QUERYSTRING_AUTH: + AWS_S3_BUCKET_NAME: + AWS_S3_CUSTOM_DOMAIN: + AWS_S3_ENDPOINT_URL: + AWS_S3_USE_SSL: + AWS_SECRET_ACCESS_KEY: + DB_TYPE: + INFRAHUB_ADDRESS: + INFRAHUB_ALLOW_ANONYMOUS_ACCESS: + INFRAHUB_ANALYTICS_ADDRESS: + INFRAHUB_ANALYTICS_API_KEY: + INFRAHUB_ANALYTICS_ENABLE: + INFRAHUB_API_CORS_ALLOW_CREDENTIALS: + INFRAHUB_API_CORS_ALLOW_HEADERS: + INFRAHUB_API_CORS_ALLOW_METHODS: + INFRAHUB_API_CORS_ALLOW_ORIGINS: + INFRAHUB_BROKER_ADDRESS: + INFRAHUB_BROKER_ENABLE: + INFRAHUB_BROKER_MAXIMUM_CONCURRENT_MESSAGES: + INFRAHUB_BROKER_MAXIMUM_MESSAGE_RETRIES: + INFRAHUB_BROKER_NAMESPACE: + INFRAHUB_BROKER_PASSWORD: + INFRAHUB_BROKER_PORT: + INFRAHUB_BROKER_TLS_ENABLED: + INFRAHUB_BROKER_USERNAME: + INFRAHUB_BROKER_VIRTUALHOST: + INFRAHUB_CACHE_ADDRESS: + INFRAHUB_CACHE_DATABASE: + INFRAHUB_CACHE_ENABLE: + INFRAHUB_CACHE_PORT: + INFRAHUB_CONFIG: + INFRAHUB_DB_ADDRESS: + INFRAHUB_DB_DATABASE: + INFRAHUB_DB_MAX_DEPTH_SEARCH_HIERARCHY: + INFRAHUB_DB_PASSWORD: + INFRAHUB_DB_PORT: + INFRAHUB_DB_PROTOCOL: + INFRAHUB_DB_QUERY_SIZE_LIMIT: + INFRAHUB_DB_RETRY_LIMIT: + INFRAHUB_DB_TYPE: + INFRAHUB_DB_USERNAME: + INFRAHUB_DOCS_INDEX_PATH: + INFRAHUB_EXPERIMENTAL_GRAPHQL_ENUMS: + INFRAHUB_EXPERIMENTAL_PULL_REQUEST: + INFRAHUB_GIT_REPOSITORIES_DIRECTORY: + INFRAHUB_GIT_SYNC_INTERVAL: + INFRAHUB_INITIAL_DEFAULT_BRANCH: + INFRAHUB_INTERNAL_ADDRESS: + INFRAHUB_LOGGING_REMOTE_API_SERVER_DSN: + INFRAHUB_LOGGING_REMOTE_ENABLE: + INFRAHUB_LOGGING_REMOTE_FRONTEND_DSN: + INFRAHUB_LOGGING_REMOTE_GIT_AGENT_DSN: + INFRAHUB_LOG_LEVEL: + INFRAHUB_MISC_MAXIMUM_VALIDATOR_EXECUTION_TIME: + INFRAHUB_MISC_PRINT_QUERY_DETAILS: + INFRAHUB_MISC_START_BACKGROUND_RUNNER: + INFRAHUB_PRODUCTION: + INFRAHUB_SECURITY_ACCESS_TOKEN_LIFETIME: + INFRAHUB_SECURITY_INITIAL_ADMIN_PASSWORD: + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: + INFRAHUB_SECURITY_REFRESH_TOKEN_LIFETIME: + INFRAHUB_SECURITY_SECRET_KEY: + INFRAHUB_STORAGE_BUCKET_NAME: + INFRAHUB_STORAGE_CUSTOM_DOMAIN: + INFRAHUB_STORAGE_DEFAULT_ACL: + INFRAHUB_STORAGE_DRIVER: + INFRAHUB_STORAGE_ENDPOINT_URL: + INFRAHUB_STORAGE_LOCAL_PATH: + INFRAHUB_STORAGE_QUERYTSTRING_AUTH: + INFRAHUB_STORAGE_USE_SSL: + INFRAHUB_TRACE_ENABLE: + INFRAHUB_TRACE_EXPORTER_ENDPOINT: + INFRAHUB_TRACE_EXPORTER_PROTOCOL: + INFRAHUB_TRACE_EXPORTER_TYPE: + INFRAHUB_TRACE_INSECURE: + OTEL_RESOURCE_ATTRIBUTES: -version: "3.4" services: infrahub-server: build: @@ -21,19 +99,21 @@ services: ports: - "${INFRAHUB_SERVER_PORT:-8000}:8000" environment: - - "INFRAHUB_CONFIG=/source/development/infrahub.toml" - - "INFRAHUB_PRODUCTION=false" - - "INFRAHUB_LOG_LEVEL=INFO" - - "INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN=06438eb2-8019-4776-878c-0941b1f1d1ec" - - "INFRAHUB_SECURITY_SECRET_KEY=327f747f-efac-42be-9e73-999f08f86b92" - - "INFRAHUB_ALLOW_ANONYMOUS_ACCESS=true" - - "INFRAHUB_DB_TYPE=${INFRAHUB_DB_TYPE}" + <<: *infrahub_config + INFRAHUB_CONFIG: /source/development/infrahub.toml + INFRAHUB_PRODUCTION: false + INFRAHUB_LOG_LEVEL: INFO + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: 06438eb2-8019-4776-878c-0941b1f1d1ec + INFRAHUB_SECURITY_SECRET_KEY: 327f747f-efac-42be-9e73-999f08f86b92 volumes: - ../:/source - "storage_data:/opt/infrahub/storage" tty: true + labels: + com.github.run_id: "${GITHUB_RUN_ID:-unknown}" + com.github.job: "${JOB_NAME:-unknown}" healthcheck: - test: wget -O /dev/null http://localhost:8000/api/schema/summary || exit 1 + test: curl -s -f -o /dev/null http://localhost:8000/api/schema/summary || exit 1 interval: 5s timeout: 5s retries: 20 @@ -41,20 +121,6 @@ services: database: ports: - "${INFRAHUB_DB_PORT:-7687}:7687" - vmagent: - image: victoriametrics/vmagent:v1.97.1 - volumes: - - vmagentdata:/vmagentdata - - ./scale_test_vmagent.yml:/etc/prometheus/prometheus.yml:ro - command: - - "--promscrape.config=/etc/prometheus/prometheus.yml" - - "--remoteWrite.url=${METRICS_ENDPOINT:-http://127.0.0.1:8424}" - - "--remoteWrite.label=job=scale-test" - - "--remoteWrite.label=run_id=${GITHUB_RUN_ID}" - - "--remoteWrite.label=run_number=${GITHUB_RUN_NUMBER}" - ports: - - "${VMAGENT_PORT:-8429}:8429" volumes: storage_data: - vmagentdata: diff --git a/development/docker-compose-test.yml b/development/docker-compose-test.yml index 89b1d33d98..70f47b1322 100644 --- a/development/docker-compose-test.yml +++ b/development/docker-compose-test.yml @@ -1,7 +1,85 @@ --- # yamllint disable rule:line-length +x-infrahub-config: &infrahub_config + AWS_ACCESS_KEY_ID: + AWS_DEFAULT_ACL: + AWS_QUERYSTRING_AUTH: + AWS_S3_BUCKET_NAME: + AWS_S3_CUSTOM_DOMAIN: + AWS_S3_ENDPOINT_URL: + AWS_S3_USE_SSL: + AWS_SECRET_ACCESS_KEY: + DB_TYPE: + INFRAHUB_ADDRESS: + INFRAHUB_ALLOW_ANONYMOUS_ACCESS: + INFRAHUB_ANALYTICS_ADDRESS: + INFRAHUB_ANALYTICS_API_KEY: + INFRAHUB_ANALYTICS_ENABLE: + INFRAHUB_API_CORS_ALLOW_CREDENTIALS: + INFRAHUB_API_CORS_ALLOW_HEADERS: + INFRAHUB_API_CORS_ALLOW_METHODS: + INFRAHUB_API_CORS_ALLOW_ORIGINS: + INFRAHUB_BROKER_ADDRESS: + INFRAHUB_BROKER_ENABLE: + INFRAHUB_BROKER_MAXIMUM_CONCURRENT_MESSAGES: + INFRAHUB_BROKER_MAXIMUM_MESSAGE_RETRIES: + INFRAHUB_BROKER_NAMESPACE: + INFRAHUB_BROKER_PASSWORD: + INFRAHUB_BROKER_PORT: + INFRAHUB_BROKER_TLS_ENABLED: + INFRAHUB_BROKER_USERNAME: + INFRAHUB_BROKER_VIRTUALHOST: + INFRAHUB_CACHE_ADDRESS: + INFRAHUB_CACHE_DATABASE: + INFRAHUB_CACHE_ENABLE: + INFRAHUB_CACHE_PORT: + INFRAHUB_CONFIG: + INFRAHUB_DB_ADDRESS: + INFRAHUB_DB_DATABASE: + INFRAHUB_DB_MAX_DEPTH_SEARCH_HIERARCHY: + INFRAHUB_DB_PASSWORD: + INFRAHUB_DB_PORT: + INFRAHUB_DB_PROTOCOL: + INFRAHUB_DB_QUERY_SIZE_LIMIT: + INFRAHUB_DB_RETRY_LIMIT: + INFRAHUB_DB_TYPE: + INFRAHUB_DB_USERNAME: + INFRAHUB_DOCS_INDEX_PATH: + INFRAHUB_EXPERIMENTAL_GRAPHQL_ENUMS: + INFRAHUB_EXPERIMENTAL_PULL_REQUEST: + INFRAHUB_GIT_REPOSITORIES_DIRECTORY: + INFRAHUB_GIT_SYNC_INTERVAL: + INFRAHUB_INITIAL_DEFAULT_BRANCH: + INFRAHUB_INTERNAL_ADDRESS: + INFRAHUB_LOGGING_REMOTE_API_SERVER_DSN: + INFRAHUB_LOGGING_REMOTE_ENABLE: + INFRAHUB_LOGGING_REMOTE_FRONTEND_DSN: + INFRAHUB_LOGGING_REMOTE_GIT_AGENT_DSN: + INFRAHUB_LOG_LEVEL: + INFRAHUB_MISC_MAXIMUM_VALIDATOR_EXECUTION_TIME: + INFRAHUB_MISC_PRINT_QUERY_DETAILS: + INFRAHUB_MISC_START_BACKGROUND_RUNNER: + INFRAHUB_PRODUCTION: + INFRAHUB_SECURITY_ACCESS_TOKEN_LIFETIME: + INFRAHUB_SECURITY_INITIAL_ADMIN_PASSWORD: + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: + INFRAHUB_SECURITY_REFRESH_TOKEN_LIFETIME: + INFRAHUB_SECURITY_SECRET_KEY: + INFRAHUB_STORAGE_BUCKET_NAME: + INFRAHUB_STORAGE_CUSTOM_DOMAIN: + INFRAHUB_STORAGE_DEFAULT_ACL: + INFRAHUB_STORAGE_DRIVER: + INFRAHUB_STORAGE_ENDPOINT_URL: + INFRAHUB_STORAGE_LOCAL_PATH: + INFRAHUB_STORAGE_QUERYTSTRING_AUTH: + INFRAHUB_STORAGE_USE_SSL: + INFRAHUB_TRACE_ENABLE: + INFRAHUB_TRACE_EXPORTER_ENDPOINT: + INFRAHUB_TRACE_EXPORTER_PROTOCOL: + INFRAHUB_TRACE_EXPORTER_TYPE: + INFRAHUB_TRACE_INSECURE: + OTEL_RESOURCE_ATTRIBUTES: -version: "3.4" services: infrahub-test: build: @@ -10,12 +88,12 @@ services: target: backend image: "${IMAGE_NAME}:${IMAGE_VER}" environment: - - "INFRAHUB_BUILD_NAME=${INFRAHUB_BUILD_NAME}" - - "INFRAHUB_CONFIG=/source/development/infrahub.toml" - - "INFRAHUB_PRODUCTION=false" - - "INFRAHUB_LOG_LEVEL=CRITICAL" - - "INFRAHUB_TEST_IN_DOCKER=1" - - "INFRAHUB_DB_TYPE=${INFRAHUB_DB_TYPE}" + <<: *infrahub_config + INFRAHUB_BUILD_NAME: + INFRAHUB_CONFIG: /source/development/infrahub.toml + INFRAHUB_PRODUCTION: false + INFRAHUB_LOG_LEVEL: CRITICAL + INFRAHUB_TEST_IN_DOCKER: 1 volumes: - ../:/source tty: true diff --git a/development/docker-compose.default.yml b/development/docker-compose.default.yml index 93a25e798a..02281d6fe6 100644 --- a/development/docker-compose.default.yml +++ b/development/docker-compose.default.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: infrahub-server: ports: diff --git a/development/docker-compose.dev-override-benchmark.yml b/development/docker-compose.dev-override-benchmark.yml index 34e76ffc36..4479f30332 100644 --- a/development/docker-compose.dev-override-benchmark.yml +++ b/development/docker-compose.dev-override-benchmark.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: ports: diff --git a/development/docker-compose.dev-override.yml.tmp b/development/docker-compose.dev-override.yml.tmp index 23fe9f31a8..59348e84a8 100644 --- a/development/docker-compose.dev-override.yml.tmp +++ b/development/docker-compose.dev-override.yml.tmp @@ -1,5 +1,4 @@ --- -version: "3.4" services: database: ports: diff --git a/development/docker-compose.local-build.yml b/development/docker-compose.local-build.yml index d2e949aecf..928bc2f6dd 100644 --- a/development/docker-compose.local-build.yml +++ b/development/docker-compose.local-build.yml @@ -1,5 +1,4 @@ --- -version: "3.4" services: infrahub-server: volumes: diff --git a/development/docker-compose.override.yml.tmp b/development/docker-compose.override.yml.tmp index aacccba4a6..0967decb3d 100644 --- a/development/docker-compose.override.yml.tmp +++ b/development/docker-compose.override.yml.tmp @@ -1,9 +1,8 @@ --- -version: "3.4" services: # -------------------------------------------------------------------------------- # - Prometheus to collect all metrics endpoints - # - Tempo to receive traces + # - Tempo or Jaeger to receive traces # - Grafana to visualize these metrics # - Loki to receive logs from promtail # - Promtail to parse logs from different source @@ -43,6 +42,13 @@ services: ports: - "3200:3200" + # jaeger: + # image: jaegertracing/all-in-one:1.53 + # environment: + # COLLECTOR_ZIPKIN_HOST_PORT: ":9411" + # ports: + # - "16686:16686" + prometheus: image: prom/prometheus:latest volumes: diff --git a/development/docker-compose.yml b/development/docker-compose.yml index 492c5e862c..3589badb8c 100644 --- a/development/docker-compose.yml +++ b/development/docker-compose.yml @@ -1,7 +1,85 @@ --- # yamllint disable rule:line-length +x-infrahub-config: &infrahub_config + AWS_ACCESS_KEY_ID: + AWS_DEFAULT_ACL: + AWS_QUERYSTRING_AUTH: + AWS_S3_BUCKET_NAME: + AWS_S3_CUSTOM_DOMAIN: + AWS_S3_ENDPOINT_URL: + AWS_S3_USE_SSL: + AWS_SECRET_ACCESS_KEY: + DB_TYPE: + INFRAHUB_ADDRESS: + INFRAHUB_ALLOW_ANONYMOUS_ACCESS: + INFRAHUB_ANALYTICS_ADDRESS: + INFRAHUB_ANALYTICS_API_KEY: + INFRAHUB_ANALYTICS_ENABLE: + INFRAHUB_API_CORS_ALLOW_CREDENTIALS: + INFRAHUB_API_CORS_ALLOW_HEADERS: + INFRAHUB_API_CORS_ALLOW_METHODS: + INFRAHUB_API_CORS_ALLOW_ORIGINS: + INFRAHUB_BROKER_ADDRESS: + INFRAHUB_BROKER_ENABLE: + INFRAHUB_BROKER_MAXIMUM_CONCURRENT_MESSAGES: + INFRAHUB_BROKER_MAXIMUM_MESSAGE_RETRIES: + INFRAHUB_BROKER_NAMESPACE: + INFRAHUB_BROKER_PASSWORD: + INFRAHUB_BROKER_PORT: + INFRAHUB_BROKER_TLS_ENABLED: + INFRAHUB_BROKER_USERNAME: + INFRAHUB_BROKER_VIRTUALHOST: + INFRAHUB_CACHE_ADDRESS: + INFRAHUB_CACHE_DATABASE: + INFRAHUB_CACHE_ENABLE: + INFRAHUB_CACHE_PORT: + INFRAHUB_CONFIG: + INFRAHUB_DB_ADDRESS: + INFRAHUB_DB_DATABASE: + INFRAHUB_DB_MAX_DEPTH_SEARCH_HIERARCHY: + INFRAHUB_DB_PASSWORD: + INFRAHUB_DB_PORT: + INFRAHUB_DB_PROTOCOL: + INFRAHUB_DB_QUERY_SIZE_LIMIT: + INFRAHUB_DB_RETRY_LIMIT: + INFRAHUB_DB_TYPE: + INFRAHUB_DB_USERNAME: + INFRAHUB_DOCS_INDEX_PATH: + INFRAHUB_EXPERIMENTAL_GRAPHQL_ENUMS: + INFRAHUB_EXPERIMENTAL_PULL_REQUEST: + INFRAHUB_GIT_REPOSITORIES_DIRECTORY: + INFRAHUB_GIT_SYNC_INTERVAL: + INFRAHUB_INITIAL_DEFAULT_BRANCH: + INFRAHUB_INTERNAL_ADDRESS: + INFRAHUB_LOGGING_REMOTE_API_SERVER_DSN: + INFRAHUB_LOGGING_REMOTE_ENABLE: + INFRAHUB_LOGGING_REMOTE_FRONTEND_DSN: + INFRAHUB_LOGGING_REMOTE_GIT_AGENT_DSN: + INFRAHUB_LOG_LEVEL: + INFRAHUB_MISC_MAXIMUM_VALIDATOR_EXECUTION_TIME: + INFRAHUB_MISC_PRINT_QUERY_DETAILS: + INFRAHUB_MISC_START_BACKGROUND_RUNNER: + INFRAHUB_PRODUCTION: + INFRAHUB_SECURITY_ACCESS_TOKEN_LIFETIME: + INFRAHUB_SECURITY_INITIAL_ADMIN_PASSWORD: + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: + INFRAHUB_SECURITY_REFRESH_TOKEN_LIFETIME: + INFRAHUB_SECURITY_SECRET_KEY: + INFRAHUB_STORAGE_BUCKET_NAME: + INFRAHUB_STORAGE_CUSTOM_DOMAIN: + INFRAHUB_STORAGE_DEFAULT_ACL: + INFRAHUB_STORAGE_DRIVER: + INFRAHUB_STORAGE_ENDPOINT_URL: + INFRAHUB_STORAGE_LOCAL_PATH: + INFRAHUB_STORAGE_QUERYTSTRING_AUTH: + INFRAHUB_STORAGE_USE_SSL: + INFRAHUB_TRACE_ENABLE: + INFRAHUB_TRACE_EXPORTER_ENDPOINT: + INFRAHUB_TRACE_EXPORTER_PROTOCOL: + INFRAHUB_TRACE_EXPORTER_TYPE: + INFRAHUB_TRACE_INSECURE: + OTEL_RESOURCE_ATTRIBUTES: -version: "3.4" services: infrahub-server: build: @@ -20,18 +98,19 @@ services: cache: condition: service_healthy environment: - - "INFRAHUB_CONFIG=/source/development/infrahub.toml" - - "INFRAHUB_PRODUCTION=false" - - "INFRAHUB_LOG_LEVEL=INFO" - - "INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN=06438eb2-8019-4776-878c-0941b1f1d1ec" - - "INFRAHUB_SECURITY_SECRET_KEY=327f747f-efac-42be-9e73-999f08f86b92" - - "INFRAHUB_ALLOW_ANONYMOUS_ACCESS=true" - - "INFRAHUB_DB_TYPE=${INFRAHUB_DB_TYPE}" + <<: *infrahub_config + INFRAHUB_CONFIG: /source/development/infrahub.toml + INFRAHUB_PRODUCTION: false + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: 06438eb2-8019-4776-878c-0941b1f1d1ec + INFRAHUB_SECURITY_SECRET_KEY: 327f747f-efac-42be-9e73-999f08f86b92 volumes: - "storage_data:/opt/infrahub/storage" tty: true + labels: + com.github.run_id: "${GITHUB_RUN_ID:-unknown}" + com.github.job: "${JOB_NAME:-unknown}" healthcheck: - test: wget -O /dev/null http://localhost:8000/api/schema/summary || exit 1 + test: curl -s -f -o /dev/null http://localhost:8000/api/schema/summary || exit 1 interval: 5s timeout: 5s retries: 20 @@ -51,17 +130,20 @@ services: depends_on: - infrahub-server environment: - - "INFRAHUB_CONFIG=/source/development/infrahub.toml" - - "INFRAHUB_ADDRESS=http://infrahub-server:8000" - - "INFRAHUB_PRODUCTION=false" - - "INFRAHUB_LOG_LEVEL=DEBUG" - - "INFRAHUB_SDK_API_TOKEN=06438eb2-8019-4776-878c-0941b1f1d1ec" - - "INFRAHUB_SDK_TIMEOUT=20" - - "INFRAHUB_DB_TYPE=${INFRAHUB_DB_TYPE}" + <<: *infrahub_config + INFRAHUB_CONFIG: /source/development/infrahub.toml + INFRAHUB_ADDRESS: http://infrahub-server:8000 + INFRAHUB_PRODUCTION: false + INFRAHUB_LOG_LEVEL: DEBUG + INFRAHUB_SDK_API_TOKEN: 06438eb2-8019-4776-878c-0941b1f1d1ec + INFRAHUB_SDK_TIMEOUT: 20 volumes: - "git_data:/opt/infrahub/git" - "git_remote_data:/remote" tty: true + labels: + com.github.run_id: "${GITHUB_RUN_ID:-unknown}" + com.github.job: "${JOB_NAME:-unknown}" volumes: git_data: diff --git a/development/infrahub.toml b/development/infrahub.toml index 7aa6b1121f..3a22ebe91a 100644 --- a/development/infrahub.toml +++ b/development/infrahub.toml @@ -25,14 +25,5 @@ driver = "local" [storage.local] path = "/opt/infrahub/storage" -[trace] -enable = false -insecure = "True" -exporter_type = "otlp" -exporter_protocol = "grpc" -exporter_endpoint = "tempo" -exporter_port = 4317 - - # [experimental_features] # pull_request = true diff --git a/development/scale_test_vmagent.yml b/development/vmagent.yml similarity index 68% rename from development/scale_test_vmagent.yml rename to development/vmagent.yml index 375c6349b9..c09179cf08 100644 --- a/development/scale_test_vmagent.yml +++ b/development/vmagent.yml @@ -3,9 +3,10 @@ global: scrape_interval: 2s scrape_configs: - - job_name: "scale-test" + - job_name: "ci" static_configs: - targets: - "database:2004" + # - "memgraph-exporter:2004" # requires enterprise license - "infrahub-server:8000/metrics" - "infrahub-git:8000" diff --git a/docs/_templates/message-bus-events.j2 b/docs/_templates/message-bus-events.j2 new file mode 100644 index 0000000000..e9f573d8ae --- /dev/null +++ b/docs/_templates/message-bus-events.j2 @@ -0,0 +1,70 @@ +--- +title: Message Bus Events +--- + +# Message bus events + +This document provides detailed documentation for all events used in the Infrahub message bus system. + +:::info + +For more detailed explanations on how to use these events within Infrahub, see the [event handling](/topics/event-handling) topic. + +::: + +## Messages events + +{% for primary, secondaries in message_classes.items() -%} +{% for secondary, events in secondaries.items() -%} + +### {{ primary | replace("_", " ") | title }} {{ secondary.split('.')[-1] | replace("_", " ") | title }} + + +{% for event in events -%} + +#### Event {{ event.event_name }} + +{% if event.description %} +**Description**: {{ event.description }} +{% endif %} +**Priority**: {{ event.priority }} + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +{% for field in event.fields -%} +{{ "| **{}** | {} | {} | {} |".format(field.name, field.description | replace("\n", " "), field.type , field.default if field.default else "None") }} +{% endfor -%} + +{% endfor %} +{% endfor %} +{% endfor %} + +## Responses events + +{% for primary, secondaries in message_classes.items() -%} +{% for secondary, events in secondaries.items() -%} + +### {{ primary | replace("_", " ") | title }} {{ secondary.split('.')[-1] | replace("_", " ") | title }} + + +{% for event in events -%} + +#### Event {{ event.event_name }} + +{% if event.description %} +**Description**: {{ event.description }} +{% endif %} +**Priority**: {{ event.priority }} + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +{% for field in event.fields -%} +{{ "| **{}** | {} | {} | {} |".format(field.name, field.description | replace("\n", " "), field.type , field.default if field.default else "None") }} +{% endfor -%} + +{% endfor %} +{% endfor %} +{% endfor %} diff --git a/docs/docs/development/backend.mdx b/docs/docs/development/backend.mdx index 509fa067ed..7d2356e4ea 100644 --- a/docs/docs/development/backend.mdx +++ b/docs/docs/development/backend.mdx @@ -33,13 +33,12 @@ export INFRAHUB_METRICS_PORT=8001 export INFRAHUB_DB_TYPE=memgraph # Accepts neo4j or memgraph export INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN="${ADMIN_TOKEN}" # Random string which can be generated using: openssl rand -hex 16 export INFRAHUB_STORAGE_LOCAL_PATH="${HOME}/Development/infrahub-storage" +export INFRAHUB_API_CORS_ALLOW_ORIGINS='["http://localhost:8080"]' # Allow frontend/backend communications without CORS issues export INFRAHUB_IMAGE_VER=local # Force building a local image instead of pulling one from the registry ``` The exported environment variables are very important and must be set before moving to another step. Without these, you will likely face some errors or issues later. -Setting `INFRAHUB_IMAGE_VER=local` is important because it tells Docker to do a fresh build and to bind mount the Infrahub repository inside the containers (this ensures containers are in sync with your local code). - ## Required services Infrahub uses several external services to work: @@ -64,13 +63,13 @@ poetry install Now it's time to bring the required services up, and for that we have demo commands: ```bash -invoke demo.destroy demo.dev-start +invoke dev.destroy dev.deps ``` This will actually pass two commands, one to destroy any remains of a previous run and one to start services. So this will effectively bring up clean services without leftovers. We can see which services are running by using: ```bash -poetry run invoke demo.status +poetry run invoke dev.status ``` This should yield a Docker output like the following: @@ -82,7 +81,7 @@ infrahub-database-1 memgraph/memgraph:2.13.0 "/usr/lib/memgraph/m…" infrahub-message-queue-1 rabbitmq:3.12-management "docker-entrypoint.s…" message-queue 2 hours ago Up 2 hours (healthy) 4369/tcp, 5671/tcp, 0.0.0.0:5672->5672/tcp, 15671/tcp, 15691-15692/tcp, 25672/tcp, 0.0.0.0:15672->15672/tcp ``` -When following a guide, like the [installation guide](/guides/installation.mdx), the command `demo.start` is mentioned. It is slightly different from the `demo.dev-start` that is mentioned here. The `demo.start` will bring up a demo environment as a whole including services and Infrahub while the `demo.dev-start` will only start the services as seen in the code block above. +When following a guide, like the [installation guide](/guides/installation.mdx), the command `demo.start` is mentioned. It is slightly different from the `dev.deps` that is mentioned here. The `demo.start` will bring up a demo environment as a whole including services and Infrahub while the `dev.deps` will only start the services as seen in the code block above. ## Running Infrahub test suite diff --git a/docs/docs/development/docs.mdx b/docs/docs/development/docs.mdx index 042e452521..53e4c51100 100644 --- a/docs/docs/development/docs.mdx +++ b/docs/docs/development/docs.mdx @@ -45,7 +45,7 @@ The preferred way to work on the documentation with Vale and markdownlint is dir You can disable Vale and markdownlint in-line with the following markdown comments: -```md +```markdown Ignored Specialized Phrase ignored by vale @@ -73,9 +73,9 @@ Add special case capitalization words to the `branded-terms-case-swap.yml` rule. ## Writing markdown -Pages are written in markdown or generated by the app source. For markdown pages, Retype supports [most standard markdown syntax](https://retype.com/guides/formatting/). +Pages are written in MDX, which is a superset of markdown or generated by the app source. -In addition, Retype has its own [markdown-inspired components](https://retype.com/components/). You'll often find reference links, panels, and snippets used throughout the Infrahub docs. +In addition, Docusaurus has its own [markdown-inspired components](https://docusaurus.io/docs/markdown-features). You'll often find reference links, panels, and snippets used throughout the Infrahub docs. ### Markdown tips @@ -83,9 +83,35 @@ In addition, Retype has its own [markdown-inspired components](https://retype.co Use two full returns between paragraphs (one empty line). This ensures a new paragraph is created. -#### Ensure proper h1 tags +#### Notification blocks + +When writing documentation, it's essential to guide the reader's attention to specific types of information. Notification blocks are a powerful tool to achieve this, allowing you to highlight information based on its nature and importance. Here are the types of notification blocks and how to use them: + +- **Info:** Use info blocks for additional, helpful information that isn't required to complete the task but offers more context or useful tips. + + ```markdown + > **Info**: This feature is available in version 2.1 and later. + ``` + +- **Success:** Use success blocks to highlight expected outcomes and "status checks" to ensure the reader is on track with the guide. These blocks can reinforce the reader's progress and provide positive feedback. + + ```markdown + > **Success**: If you've followed the steps correctly, your installation should now be complete. + ``` + +- **Warning:** Warning blocks should be used to highlight common errors or mistakes that may occur during the process. They serve as preventive measures to help the reader avoid potential pitfalls. -Many pages include a *greymatter* or *metadata* block at the top, denoted by two sets of `---`. This is used by Retype to [configure the page](https://retype.com/faq/#what-is-page-metadata). If using the `title` attribute, also use a top-level heading `#` on the page. They do not need to be the same. If `title` is omitted, Retype will use the top-level heading (h1). + ```markdown + > **Warning**: Ensure you've backed up your files before proceeding with this step to prevent data loss. + ``` + +- **Danger:** Use danger blocks to highlight irreversible or breaking actions. These notifications are critical for steps that could significantly affect the system or data if mishandled. + + ```markdown + > **Danger**: This action will permanently delete your data and cannot be undone. + ``` + +Incorporating these blocks into your documentation makes it more interactive and user-friendly, guiding the reader through different stages of their learning or implementation process with visual cues that emphasize the significance of each piece of information. ## Organizing new pages @@ -214,7 +240,7 @@ Every page should have a top-level heading. Additional heading tiers can only ex **:x: Don't do this**: -```md +```markdown # Page title @@ -235,7 +261,7 @@ Capitalize the first letter of each list item. If an item is a complete sentence When listing items and descriptions, prefer the use of a colon (:) instead of a dash (-). -```md +```markdown - Not - this - Or - this @@ -249,7 +275,7 @@ When listing items and descriptions, prefer the use of a colon (:) instead of a Avoid extra spaces before a colon. -```md +```markdown Feature : Explanation of feature diff --git a/docs/docs/development/frontend/testing-guidelines.mdx b/docs/docs/development/frontend/testing-guidelines.mdx index 2e3fb3a6ec..e6f1e73c62 100644 --- a/docs/docs/development/frontend/testing-guidelines.mdx +++ b/docs/docs/development/frontend/testing-guidelines.mdx @@ -24,9 +24,7 @@ Infrahub uses [Playwright](https://playwright.dev/) for e2e testing. ### Folder structure -e2e tests are located in `/frontend/tests/e2e` and are structured based on routing. - -For example, a test linked to the route `/objects/:objectname/:objectid` will be found in the `/objects/[objectname]/[objectid]` folder. +E2E tests are located in `/frontend/tests/e2e`. ### Writing e2e tests diff --git a/docs/docs/development/readme.mdx b/docs/docs/development/readme.mdx index 6823a7f7fa..efa3578ef2 100644 --- a/docs/docs/development/readme.mdx +++ b/docs/docs/development/readme.mdx @@ -18,9 +18,9 @@ title: Development To help format the code correctly, the project is also recommending: -- **autoflake** to automatically remove all unused variables and all unused import +- **ruff** to automatically remove all unused variables and all unused import -> `invoke format` will run Ruff and autoflake together to ensure all files are as close as possible to the expected format. +> `invoke format` will run Ruff to ensure all files are as close as possible to the expected format. ### Run tests diff --git a/docs/docs/guides/create-schema.mdx b/docs/docs/guides/create-schema.mdx index 2bc325538d..8b5171da01 100644 --- a/docs/docs/guides/create-schema.mdx +++ b/docs/docs/guides/create-schema.mdx @@ -39,7 +39,13 @@ The `NetworkDevice` node will have the following attributes: The `NetworkInterface` node will have the following attributes: - `name` (Text): the name of the interface, which is a required attribute -- `description` (Text): a description for the interface, which is a required +- `description` (Text): a description for the interface, which is a required attribute + +:::note + +We define a `default_filter` on the `hostname` attribute of the `NetworkDevice`. This way we can use the `hostname` as an alternative for the `id` in the queries and mutations in this guide. + +::: ```yaml --- @@ -47,6 +53,7 @@ version: "1.0" nodes: - name: Device namespace: Network + default_filter: hostname__value attributes: - name: hostname kind: Text @@ -66,7 +73,7 @@ nodes: Create a branch `network-device-schema` in Infrahub ```bash -infrahubctl branch create network-device-schema` +infrahubctl branch create network-device-schema ``` Load the schema into Infrahub in the `network-device-schema` branch @@ -115,6 +122,7 @@ version: "1.0" nodes: - name: Device namespace: Network + default_filter: hostname__value attributes: - name: hostname kind: Text @@ -138,6 +146,7 @@ nodes: - name: device cardinality: one peer: NetworkDevice + optional: false kind: Parent ``` @@ -165,7 +174,7 @@ mutation { id } } - NetworkInterfaceCreate(data: {name: {value: "Ethernet1"}, description: {value: "WAN interface"}}) { + NetworkInterfaceCreate(data: {name: {value: "Ethernet1"}, description: {value: "WAN interface"}, device: {id: "atl1-edge1"} }) { ok object { id @@ -174,21 +183,6 @@ mutation { } ``` -:::note - -The id's returned from the result of this mutation need to be used within the next mutation, they will be different then the id's we use here in the guide - -::: - -We can add interface `Ethernet1` as a related node for the interfaces relation on device `atl1-edge1`. - -```graphql -mutation { - NetworkDeviceUpdate(data: {id: "17bcf363-9cf6-5f9d-38aa-c513c290aa53", interfaces: [{id: "17bcf363-d53c-323e-38aa-c5183839b28d"}]}) { - ok - } -``` - In the Web UI we can now see that the device has a relation to the Ethernet1 interface. ## 3. Abstracting nodes into generics @@ -224,9 +218,11 @@ generics: cardinality: one peer: NetworkDevice kind: Parent + optional: false nodes: - name: Device namespace: Network + default_filter: hostname__value attributes: - name: hostname kind: Text @@ -275,34 +271,13 @@ mutation { id } } - NetworkPhysicalInterfaceCreate(data: {name: {value: "Ethernet1"}, description: {value: "WAN interface"}, speed: {value: 1000000000}}) { + NetworkPhysicalInterfaceCreate(data: {name: {value: "Ethernet1"}, description: {value: "WAN interface"}, speed: {value: 1000000000}, device: {id: "atl1-edge1"}}) { ok object { id } } - NetworkLogicalInterfaceCreate(data: {name: {value: "Vlan1"}, description: {value: "SVI for Vlan 1"}}) { - ok - object { - id - } - } -} -``` - -:::note - -The id's returned from the result of this mutation need to be used within the next mutation, they will be different then the id's we use here in the guide - -::: - -We can add `Ethernet1` and `Vlan1` interfaces as related nodes to the interfaces relation of device `atl1-edge1`: - -```graphql -mutation { - NetworkDeviceUpdate( - data: {id: "17bcf93a-2840-bfd2-329a-c515a0ee3c4d", interfaces: [{id: "17bcf93a-5af4-aa38-329f-c51355ef5d16"}, {id: "17bcf93a-6f6a-b583-3298-c51216de9ea8"}]} - ) { + NetworkLogicalInterfaceCreate(data: {name: {value: "Vlan1"}, description: {value: "SVI for Vlan 1"}, device: {id: "atl1-edge1"}}) { ok object { id @@ -315,7 +290,7 @@ In the detailed view of the device in the Web UI, we can now see that the device ## 4. Improving our schema -Although the schema is already a close representation of what we wanted to achieve, there is still a few improvements we would like to make. For this we are going to make use of the schema migrations feature in Infrahub. More details can be found in [the Schema topic](/topics/schema#schema-update--data-migrations). +Although the schema is already a close representation of what we wanted to achieve, there is still a few improvements we would like to make. For this we are going to make use of the schema migrations feature in Infrahub. More details can be found in [the Schema topic](/topics/schema#schema-update-and-data-migrations). 1. adding and `mtu` and `enabled` attribute on the generic `NetworkInterface` 4. deleting the `description` attribute of the generic `NetworkInterface` @@ -354,8 +329,8 @@ generics: label: MTU optional: false default_value: 1500 - - name: enabeld - label: Enabeld + - name: enabled + label: Enabled kind: Boolean optional: false default_value: false @@ -365,9 +340,11 @@ generics: cardinality: one peer: NetworkDevice kind: Parent + optional: false nodes: - name: Device namespace: Network + default_filter: hostname__value attributes: - name: hostname kind: Text @@ -431,7 +408,7 @@ diff: changed: attributes: added: - enabeld: null + enabled: null mtu: null changed: {} removed: @@ -442,7 +419,7 @@ diff: changed: attributes: added: - enabeld: null + enabled: null mtu: null changed: {} removed: {} @@ -453,7 +430,7 @@ diff: changed: attributes: added: - enabeld: null + enabled: null mtu: null changed: speed: diff --git a/docs/docs/guides/generator.mdx b/docs/docs/guides/generator.mdx new file mode 100644 index 0000000000..cc0b3ca524 --- /dev/null +++ b/docs/docs/guides/generator.mdx @@ -0,0 +1,191 @@ +--- +title: Creating a Generator +--- + +# Creating a generator in Infrahub + +Within Infrahub a generator is defined in an [external repository](/topics/repository). However, during development and troubleshooting it is easiest to start from your local computer and run the transform using [infrahubctl generator](/infrahubctl/infrahubctl-generator). + +The goal of this guide is to develop a Generator and add it to Infrahub, we will achieve this by following these steps. + +1. Identify the relevant data you want to extract from the database using a [GraphQL query](/topics/graphql), that can take an input parameter to filter the data +2. Write a Python script that uses the GraphQL query to read information from the system and generates new data based on the response +3. Create an entry for the generator within an .infrahub.yml file. +4. Create a Git repository +5. Test the generator with infrahubctl +6. Add the repository to Infrahub as an external repository +7. Validate that the generator works by triggering it through a proposed change + +## Preparations + +What your generator will look like will depend on what your schema looks like and the intended outcome. The generator described here will be very generic and also not useful in a real world scenario, it is only meant to describe how the generators work. + +As the default Infrahub schema doesn't have a lot of object types to use as a test, we will illustrate how this could work by adding two nodes to the schema. + +Load the following schema using the [infrahubctl schema](/infrahubctl/infrahubctl-schema) command. + +```yaml +# yaml-language-server: $schema=https://schema.infrahub.app/infrahub/schema/latest.json +--- +version: '1.0' + +nodes: + - name: Widget + namespace: Test + label: "Widget" + default_filter: name__value + display_labels: + - name__value + attributes: + - name: name + kind: Text + unique: true + - name: count + kind: Number + - name: Resource + namespace: Test + label: "Resource" + default_filter: name__value + display_labels: + - name__value + attributes: + - name: name + kind: Text + unique: true +``` + +Perform these steps in the frontend. + +1. Create two new widget objects + - One with the name `widget1` and count 1 + - One with the name `widget2` and count 2 +2. Create a Standard group called "widgets" +3. Add both of the created objects to the new group + +## 1. Identify the relevant data + +Here we define a GraphQL query that we will use to gather information. + +```graphql +query Widgets($name: String!) { + TestWidget(name__value: $name) { + edges { + node { + name { + value + } + count { + value + } + } + } + } +} +``` + +Create a local directory on your computer where we will store the generator files. + +```bash +mkdir example_generator +```` + +Within that directory store the above GraphQL query as widget_query.gql. + +## 2. Create a Python Generator + +The Generator class needs to implement a `generate` function that receives a `data` parameter that contains the response from the GraphQL query. + +The goal of this generator will be to create a number of resources that depends on the set count of the widgets. + +```python +from infrahub_sdk.generator import InfrahubGenerator + + +class WidgetGenerator(InfrahubGenerator): + async def generate(self, data: dict) -> None: + widget = data["TestWidget"]["edges"][0]["node"] + widget_name: str = widget["name"]["value"] + widget_count: str = widget["count"]["value"] + + for count in range(1, widget_count + 1): + + payload = { + "name": f"{widget_name.lower()}-{count}", + } + obj = await self.client.create(kind="TestResource", data=payload) + await obj.save(allow_upsert=True) +``` + +Store this class within a new file called widget_generator.py. + +## 3. Create an .infrahub.yml file + +The [.infrahub.yml](/topics/infrahub-yml) file allows you to tie the different [components of a generator](/topics/generator) together into a working unit. + +```yaml +# yaml-language-server: $schema=https://schema.infrahub.app/python-sdk/repository-config/latest.json +--- +generator_definitions: + - name: widget_generator + file_path: "widget_generator.py" + targets: widgets + query: widget_query + class_name: WidgetGenerator + parameters: + name: "name__value" +``` + +This defines a generator definition with the following properties: + +- **name**: a unique name for the generator +- **file_path**: the relative file path to the file containing the generator as seen from within a git repository +- **targets**: the name of a group of which the members will be a target for this generator +- **query**: the name of the GraphQL query used within this generator +- **parameters**: the parameter to pass to the generator GraphQL query, in this case this we will pass the name of the object (widget) as the name parameter +- **query**: the name of the GraphQL query used within this generator + +## 4. Create a Git repository + +Within the `example_generator` folder you should now have 3 files: + +- `widget_query.gql`: Contains the GraphQL query +- `generator.py`: Contains the Python code for the generator +- `.infrahub.yml`: Contains the definition for the generator + +Before we can test our generator we must add the files to a local Git repository. + +```bash +git init --initial-branch=main +git add . +git commit -m "First commit" +``` + +## 5. Test the generator using infrahubctl + +Using infrahubctl you can first verify that the `.infrahub.yml` file is formatted correctly by listing available generators. + +```bash title="❯ infrahubctl generator --list" +Generators defined in repository: 1 +widget_generator (widget_generator.py::Generator) Target: widgets +``` + +:::note + +When running a generator with `infrahubctl` the [SDK tracking](/python-sdk/topics/tracking) feature isn't used. The reason for this is that internally Infrahub uses the ID of the generator_definition to control the tracking, this isn't available from the outside. For this reason it is recommended to create test branches when developing generators and validating the results. + +::: + +```bash +infrahubctl branch create test-branch1 +``` + +Then we can try to run the generator within this branch. + +```bash +infrahubctl generator widget_generator --branch=test-branch1 name=widget1 +infrahubctl generator widget_generator --branch=test-branch1 name=widget2 +``` + +Now you should see the tree TestResource objects within `test-branch1` one for the first widget and two for the second one. + +With this step completed you can [add your repository to Infrahub](/guides/repository) and then the generators will be executed as part of the proposed change pipeline. diff --git a/docs/docs/guides/installation.mdx b/docs/docs/guides/installation.mdx index 400777070e..47056452bd 100644 --- a/docs/docs/guides/installation.mdx +++ b/docs/docs/guides/installation.mdx @@ -26,7 +26,7 @@ Systems on which you want to install Infrahub, have to meet the [hardware requir ## From Git repository -Create the base directory for the Infrahub installation. For this guide, we'll use /opt/infrahub. +Create the base directory for the Infrahub installation. For this guide, we'll use `/opt/infrahub`. ```bash sudo mkdir -p /opt/infrahub/ @@ -35,7 +35,7 @@ cd /opt/infrahub/ :::warning -Depending on your system configuration, you might have to give other users write permissions to the /opt/infrahub directory. +Depending on your system configuration, you might have to give other users write permissions to the `/opt/infrahub` directory. ::: @@ -156,3 +156,80 @@ The default devcontainer `.devcontainer/devcontainer.json` is the bare-bones one A first version of our K8S helm-chart is available in our repository. + +The following are required for production deployments using Helm: + +- data persistence must be enabled (except for the Infrahub API Server if using S3 storage) +- multiple replicas of the Infrahub API Server and Infrahub Git Agents should be deployed: you can make use of the `affinity` variable to define the affinity policy for the pods +- a shared storage should be available for use by the Git Agents (through a StorageClass that supports RWX accesses) +- S3 storage should be configured for the Infrahub API Server + +You can use the following values example: + +```yaml +global: + infrahubTag: stable + imagePullPolicy: Always + +infrahubServer: + replicas: 3 + affinity: + podAntiAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + - labelSelector: + matchExpressions: + - key: service + operator: In + values: + - infrahub-server + topologyKey: topology.kubernetes.io/zone + persistence: + enabled: false + ingress: + enabled: true + infrahubServer: + env: + INFRAHUB_ALLOW_ANONYMOUS_ACCESS: "true" + INFRAHUB_CACHE_PORT: 6379 + INFRAHUB_CONFIG: /config/infrahub.toml + INFRAHUB_DB_TYPE: neo4j + INFRAHUB_LOG_LEVEL: INFO + INFRAHUB_PRODUCTION: "true" + INFRAHUB_SECURITY_INITIAL_ADMIN_TOKEN: 06438eb2-8019-4776-878c-0941b1f1d1ec + INFRAHUB_SECURITY_SECRET_KEY: 327f747f-efac-42be-9e73-999f08f86b92 + INFRAHUB_STORAGE_DRIVER: s3 + AWS_ACCESS_KEY_ID: xxxx + AWS_SECRET_ACCESS_KEY: xxxx + AWS_S3_BUCKET_NAME: infrahub-data + AWS_S3_ENDPOINT_URL: https://s3 + +infrahubGit: + replicas: 3 + affinity: + podAntiAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + - labelSelector: + matchExpressions: + - key: service + operator: In + values: + - infrahub-git + topologyKey: topology.kubernetes.io/zone + persistence: + enabled: true + storageClassName: standard-rwx # using GCP Filestore + +database: + persistence: + data: + enabled: true + logs: + enabled: true + +nfs-server-provisioner: + enabled: false +``` + +```bash +helm install infrahub -f values.yml path/to/infrahub/chart +``` diff --git a/docs/docs/guides/jinja2-transform.mdx b/docs/docs/guides/jinja2-transform.mdx index bb25728e1c..5db92173ec 100644 --- a/docs/docs/guides/jinja2-transform.mdx +++ b/docs/docs/guides/jinja2-transform.mdx @@ -140,11 +140,11 @@ The next step is to create the actual Jinja Template file. Create a file called ```jinja2 {% if data.BuiltinTag.edges and data.BuiltinTag.edges is iterable %} -{% for tag in ["BuiltinTag"]["edges"][0]["node"] %} -{% set tag_name = tag.name.value %} -{% set tag_description = tag.description.value %} - {{ "{{ tag_name }}" }} - description: {{ "{{ tag_description }}" }} +{% for tag in data["BuiltinTag"]["edges"] %} +{% set tag_name = tag.node.name.value %} +{% set tag_description = tag.node.description.value %} +{{ tag_name }} + description: {{ tag_description }} {% endfor %} {% endif %} ``` diff --git a/docs/docs/guides/profiles.mdx b/docs/docs/guides/profiles.mdx new file mode 100644 index 0000000000..88ca38f140 --- /dev/null +++ b/docs/docs/guides/profiles.mdx @@ -0,0 +1,253 @@ +--- +title: Creating and assigning profiles +--- + +# Creating and assigning profiles + +In this tutorial we will be creating a Profile for a network device interface, and then create multiple interfaces using that profile. + +The profile will be used to create interfaces that will connect end-users. We want all the end-user interfaces in our network to have exactly the same configuration. + +The following configuration attributes need to be defined in the profile: + +- Profile: end-user-interface +- Status: active +- Enabled: true +- Auto negotiation: enabled +- MTU: 1500 bytes +- Mode: Access +- Untagged VLAN: 10 +- Role: end-user + +We will be assuming the following schema has been loaded into Infrahub, this is a requirement to be able to complete the rest of this guide. + +```yaml +--- +version: "1.0" +nodes: + - name: Interface + namespace: Infra + description: "Network Interface" + label: "Interface" + include_in_menu: true + display_labels: + - name__value + order_by: + - name__value + uniqueness_constraints: + attributes: + - name: name + kind: Text + optional: false + unique: true + - name: description + kind: Text + optional: true + - name: speed + kind: Number + optional: true + - name: auto_negotiation + kind: Boolean + default_value: false + - name: mtu + label: MTU + kind: Number + default_value: 1500 + - name: enabled + kind: Boolean + default_value: false + - name: untagged_vlan + label: Untagged VLAN + kind: Number + optional: true + - name: mode + kind: Dropdown + default_value: access + choices: + - name: access + label: Access + - name: trunk + label: Trunk + - name: status + kind: Dropdown + optional: true + choices: + - name: active + label: Active + description: "Fully operational and currently in service" + color: "#7fbf7f" + - name: provisioning + label: Provisioning + description: "In the process of being set up and configured" + color: "#ffff7f" + - name: maintenance + label: Maintenance + description: "Undergoing routine maintenance or repairs" + color: "#ffd27f" + - name: drained + label: Drained + description: "Temporarily taken out of service" + color: "#bfbfbf" + - name: role + kind: Dropdown + optional: true + choices: + - name: uplink + label: Uplink + description: "Interface connected to other network device" + color: "#9090de" + - name: end_user + label: End user + description: "Interface to connect end-user devices" + color: "#ffa07a" + - name: server + label: Server + description: "Interfaces to connect to servers" + color: "#98b2d1" +``` + +From this schema Infrahub will generate a `ProfileInfraInterface` schema. Infrahub will also generate a GraphQL query `ProfileInfraInterface` and the GraphQL mutations `ProfileInfraInterfaceCreate`, `ProfileInfraInterfaceDelete`, `ProfileInfraInterfaceUpdate` and `ProfileInfraInterfaceUpsert`. + +Profiles, at this stage, can be only be used from the GraphQL API or the Python SDK. In a future release we will add support for managing profiles using the web frontend. + +## Creating the profile + +We can create the profile by executing the following GraphQL mutation: + +```graphql +mutation { + ProfileInfraInterfaceCreate( + data: { + profile_name: {value: "end-user-interface"}, + status: {value: "active"}, + enabled: {value: true}, + auto_negotiation: {value: true}, + mtu: {value:1500}, + mode: {value: "access"}, + untagged_vlan: {value: 10}, + role: {value: "end_user"}, + } + ) { + ok + object { + id + } + } +} +``` + +## Retrieving profiles from Infrahub + +We can query Infrahub to retrieve the interface profile using the following GraphQL query + +```graphql +query { + ProfileInfraInterface(profile_name__value: "end-user-interface") { + edges { + node { + id + profile_name { + value + } + } + } + } +} +``` + +## Creating a node using a profile + +We can now create multiple interfaces that will inherited the attributes we defined in our `end-user-interface` profile. + +```graphql +mutation { + int0: InfraInterfaceCreate( + data: {name: {value: "GigabitEthernet0/0/0"}, profiles: [{id: "end-user-interface"}]} + ) { + ok + object { + id + } + } + int1: InfraInterfaceCreate( + data: {name: {value: "GigabitEthernet0/0/1"}, profiles: [{id: "end-user-interface"}]} + ) { + ok + object { + id + } + } + int2: InfraInterfaceCreate( + data: {name: {value: "GigabitEthernet0/0/2"}, profiles: [{id: "end-user-interface"}]} + ) { + ok + object { + id + } + } +} +``` + +We will now query for one of the interfaces we created and check the values of the name and status attribute and the metadata. + +```graphql +query { + InfraInterface(name__value: "GigabitEthernet0/0/0") { + edges { + node { + name { + value + is_from_profile + } + status { + value + is_default + is_from_profile + source { + id + display_label + } + } + } + } + } +} +``` + +On the name attribute we can see that the `is_from_profile` property is `False`, indicating the name attribute was not inherited from a profile. The status attribute has the `is_from_profile` property set to `True` and the `source__id` property is set to the id of the profile from which the value was inherited. + +The status attribute value is inherited from the profile, because we didn't specify a value for that attribute when we created the interface, hence the `is_default` property of the status attribute is `True`. + +## Overriding profile attribute values + +We will now create another interface `GigabitEthernet0/0/3` using the profile. But we want to override the value of the MTU attribute, which is defined in the profile. To do this we have to explicitly provide a value for the attribute. + +```graphql +mutation { + InfraInterfaceCreate( + data: {name: {value: "GigabitEthernet0/0/3"}, mtu: {value: 9000}, profiles: [{id: "end-user-interface"}]} + ) { + ok + object { + id + } + } +} +``` + +Using this query we can verify that the value for the MTU attribute and that it was not inherited from a profile. + +```query +query { + InfraInterface(name__value: "GigabitEthernet0/0/3") { + edges { + node { + mtu { + value + is_from_profile + } + } + } + } +} +``` diff --git a/docs/docs/guides/python-transform.mdx b/docs/docs/guides/python-transform.mdx index b84addf708..d28ac773ba 100644 --- a/docs/docs/guides/python-transform.mdx +++ b/docs/docs/guides/python-transform.mdx @@ -260,7 +260,7 @@ tags_transform (tags_transform.py::TagsTransform) Trying to run the transform with just the name will produce an error. ```bash title="❯ infrahubctl transform tags_transform" -1 error(s) occured while executing the query +1 error(s) occurred while executing the query - Message: Variable '$tag' of required type 'String!' was not provided. Location: [{'line': 1, 'column': 17}] Aborted. diff --git a/docs/docs/guides/readme.mdx b/docs/docs/guides/readme.mdx deleted file mode 100644 index 5bfa2c0540..0000000000 --- a/docs/docs/guides/readme.mdx +++ /dev/null @@ -1,5 +0,0 @@ -# Guides - -Guides explain the steps to complete tasks within Infrahub. If you haven't already, start with the [getting started](/tutorials/getting-started/) tutorial. - -- [Installing Infrahub](installation.mdx) diff --git a/docs/docs/guides/repository.mdx b/docs/docs/guides/repository.mdx index 60f9c2fe58..be44e62719 100644 --- a/docs/docs/guides/repository.mdx +++ b/docs/docs/guides/repository.mdx @@ -19,9 +19,10 @@ Infrahub supports two different types of connections to external Git repositorie 1. Go to settings > Developer Settings > Personal access tokens [New GitHub token](https://github.com/settings/personal-access-tokens/new) 2. Select Fine-grained tokens 3. Limit the scope of the token in **Repository Access** > **Only Select Repositories** - 4. Grant the token permission - a. If you want to create a CoreRepository using this token, then you will need to give it `Read/Write` access for the **Content** of the repository - b. If you want to create a Read-only Repository using this token, then you will only need to give it `Read` access for the **Content** of the repository. + 4. Grant the token permission: + + - a. If you want to create a CoreRepository using this token, then you will need to give it `Read/Write` access for the **Content** of the repository. + - b. If you want to create a Read-only Repository using this token, then you will only need to give it `Read` access for the **Content** of the repository. ![Fine-Grained Token](../media/github_fined_grain_access_token_setup.png) @@ -58,7 +59,7 @@ You will need to submit an access token with your request to create a repository (Optional): A description or comment about the repository used for informational purposes. - The URL of the external repository, e.g. `https://github.com/opsmill/infrahub.git`. + The URL of the external repository, for example `https://github.com/opsmill/infrahub.git`. Your username on the external Git provider. @@ -93,47 +94,48 @@ If you are using GitHub as your Git Server, you need to have a [fine-grained per + ```GraphQL - # Endpoint: http://127.0.0.1:8000/graphql/main - mutation { - CoreRepositoryCreate( - data: { - name: { value: "YOUR_REPOSITORY_NAME" } - location: { value: "https://GIT_SERVER/YOUR_GIT_USERNAME/YOUR_REPOSITORY_NAME.git" } - username: { value: "YOUR_GIT_USERNAME" } - password: { value: "YOUR_PERSONAL_ACCESS_TOKEN" } - # default_branch: { value: "main" } <-- optional - } - ) { - ok - object { - id + # Endpoint: http://127.0.0.1:8000/graphql/main + mutation { + CoreRepositoryCreate( + data: { + name: { value: "YOUR_REPOSITORY_NAME" } + location: { value: "https://GIT_SERVER/YOUR_GIT_USERNAME/YOUR_REPOSITORY_NAME.git" } + username: { value: "YOUR_GIT_USERNAME" } + password: { value: "YOUR_PERSONAL_ACCESS_TOKEN" } + # default_branch: { value: "main" } <-- optional + } + ) { + ok + object { + id + } } } - } ``` **Make sure that you are on the correct Infrahub branch.** Unlike a CoreRepository, a Read-only Repository will only pull files into the Infrahub branch on which it was created. ```GraphQL - # Endpoint : http://127.0.0.1:8000/graphql/:branch - mutation { - CoreReadOnlyRepositoryCreate( - data: { - name: { value: "YOUR_REPOSITORY_NAME" } - location: { value: "https://GIT_SERVER/YOUR_GIT_USERNAME/YOUR_REPOSITORY_NAME.git" } - username: { value: "YOUR_GIT_USERNAME" } - password: { value: "YOUR_PERSONAL_ACCESS_TOKEN" } - ref: { value: "BRANCH/TAG/COMMIT TO TRACK" } - } - ) { - ok - object { - id + # Endpoint : http://127.0.0.1:8000/graphql/ + mutation { + CoreReadOnlyRepositoryCreate( + data: { + name: { value: "YOUR_REPOSITORY_NAME" } + location: { value: "https://GIT_SERVER/YOUR_GIT_USERNAME/YOUR_REPOSITORY_NAME.git" } + username: { value: "YOUR_GIT_USERNAME" } + password: { value: "YOUR_PERSONAL_ACCESS_TOKEN" } + ref: { value: "BRANCH/TAG/COMMIT TO TRACK" } + } + ) { + ok + object { + id + } } } - } ``` @@ -162,21 +164,21 @@ Infrahub does not automatically update Read-only Repositories with changes on th Example update mutation ```GraphQL - # Endpoint : http://127.0.0.1:8000/graphql/main - mutation { - CoreReadOnlyRepositoryUpdate( - data: { - id: "ID_OF_THE_REPOSITORY" - ref: { value: "BRANCH/TAG/COMMIT TO TRACK" } - commit: { value: "NEW COMMIT ON THE REF TO PULL" } - } - ) { - ok - object { - id + # Endpoint : http://127.0.0.1:8000/graphql/main + mutation { + CoreReadOnlyRepositoryUpdate( + data: { + id: "ID_OF_THE_REPOSITORY" + ref: { value: "BRANCH/TAG/COMMIT TO TRACK" } + commit: { value: "NEW COMMIT ON THE REF TO PULL" } + } + ) { + ok + object { + id + } } } - } ``` diff --git a/docs/docs/guides/schema.mdx b/docs/docs/guides/schema.mdx index 5c0934f313..99c574d91a 100644 --- a/docs/docs/guides/schema.mdx +++ b/docs/docs/guides/schema.mdx @@ -3,7 +3,7 @@ title: Schema file --- import CodeBlock from "@theme/CodeBlock"; -import infrastructureExtensionRackYaml from '!!raw-loader!../../../models/infrastructure_extension_rack.yml'; +import infrastructureExtensionRackYaml from '!!raw-loader!../../../models/examples/extension_rack.yml'; # Schema file @@ -25,7 +25,7 @@ extensions: ```
- Example of schema file that is defining new nodes and adding a relationship to an existing one + Example of schema file that is defining new nodes and adding a relationship to an existing one {infrastructureExtensionRackYaml}
diff --git a/docs/docs/infrahubctl/infrahubctl-branch.mdx b/docs/docs/infrahubctl/infrahubctl-branch.mdx index 7066a260a8..dfcebaebf9 100644 --- a/docs/docs/infrahubctl/infrahubctl-branch.mdx +++ b/docs/docs/infrahubctl/infrahubctl-branch.mdx @@ -44,7 +44,7 @@ $ infrahubctl branch create [OPTIONS] BRANCH_NAME * `--description TEXT`: Description of the branch * `--sync-with-git / --no-sync-with-git`: Extend the branch to Git and have Infrahub create the branch in connected repositories. [default: no-sync-with-git] * `--isolated / --no-isolated`: Set the branch to isolated mode [default: no-isolated] -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. ## `infrahubctl branch delete` @@ -63,7 +63,7 @@ $ infrahubctl branch delete [OPTIONS] BRANCH_NAME **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. ## `infrahubctl branch list` @@ -78,7 +78,7 @@ $ infrahubctl branch list [OPTIONS] **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. ## `infrahubctl branch merge` @@ -97,7 +97,7 @@ $ infrahubctl branch merge [OPTIONS] BRANCH_NAME **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. ## `infrahubctl branch rebase` @@ -116,7 +116,7 @@ $ infrahubctl branch rebase [OPTIONS] BRANCH_NAME **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. ## `infrahubctl branch validate` @@ -135,5 +135,5 @@ $ infrahubctl branch validate [OPTIONS] BRANCH_NAME **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-generator.mdx b/docs/docs/infrahubctl/infrahubctl-generator.mdx new file mode 100644 index 0000000000..522e57ff42 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-generator.mdx @@ -0,0 +1,25 @@ +# `infrahubctl generator` + +Run a generator script. + +**Usage**: + +```console +$ infrahubctl generator [OPTIONS] [GENERATOR_NAME] [VARIABLES]... +``` + +**Arguments**: + +* `[GENERATOR_NAME]`: Name of the Generator +* `[VARIABLES]...`: Variables to pass along with the query. Format key=value key=value. + +**Options**: + +* `--branch TEXT` +* `--path TEXT`: Root directory [default: .] +* `--debug / --no-debug`: [default: no-debug] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--list`: Show available Generators +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-render.mdx b/docs/docs/infrahubctl/infrahubctl-render.mdx index 3be7ac95c0..81801ee6fb 100644 --- a/docs/docs/infrahubctl/infrahubctl-render.mdx +++ b/docs/docs/infrahubctl/infrahubctl-render.mdx @@ -5,12 +5,12 @@ Render a local Jinja2 Transform for debugging purpose. **Usage**: ```console -$ infrahubctl render [OPTIONS] TRANSFORM_NAME [VARIABLES]... +$ infrahubctl render [OPTIONS] [TRANSFORM_NAME] [VARIABLES]... ``` **Arguments**: -* `TRANSFORM_NAME`: [required] +* `[TRANSFORM_NAME]`: Name of the Python transformation * `[VARIABLES]...`: Variables to pass along with the query. Format key=value key=value. **Options**: @@ -18,6 +18,7 @@ $ infrahubctl render [OPTIONS] TRANSFORM_NAME [VARIABLES]... * `--branch TEXT`: Branch on which to render the transform. * `--debug / --no-debug`: [default: no-debug] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--list`: Show available transforms * `--out TEXT`: Path to a file to save the result. * `--install-completion`: Install completion for the current shell. * `--show-completion`: Show completion for the current shell, to copy it or customize the installation. diff --git a/docs/docs/infrahubctl/infrahubctl-schema.mdx b/docs/docs/infrahubctl/infrahubctl-schema.mdx index 6ee6140ae8..5b9cf0599c 100644 --- a/docs/docs/infrahubctl/infrahubctl-schema.mdx +++ b/docs/docs/infrahubctl/infrahubctl-schema.mdx @@ -58,5 +58,6 @@ $ infrahubctl schema load [OPTIONS] SCHEMAS... * `--debug / --no-debug`: [default: no-debug] * `--branch TEXT`: Branch on which to load the schema. [default: main] +* `--wait INTEGER`: Time in seconds to wait until the schema has converged across all workers [default: 0] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-validate.mdx b/docs/docs/infrahubctl/infrahubctl-validate.mdx index 8814e52174..f96f59f140 100644 --- a/docs/docs/infrahubctl/infrahubctl-validate.mdx +++ b/docs/docs/infrahubctl/infrahubctl-validate.mdx @@ -38,7 +38,7 @@ $ infrahubctl validate graphql-query [OPTIONS] QUERY [VARIABLES]... * `--debug / --no-debug`: Display more troubleshooting information. [default: no-debug] * `--branch TEXT`: Branch on which to validate the GraphQL Query. -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--out TEXT`: Path to a file to save the result. * `--help`: Show this message and exit. @@ -58,5 +58,5 @@ $ infrahubctl validate schema [OPTIONS] SCHEMA **Options**: -* `--config-file PATH`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. diff --git a/docs/docs/media/infrahub_sync_process.excalidraw.svg b/docs/docs/media/infrahub_sync_process.excalidraw.svg new file mode 100644 index 0000000000..61ac0cbec9 --- /dev/null +++ b/docs/docs/media/infrahub_sync_process.excalidraw.svg @@ -0,0 +1,21 @@ + + + + + + + + Infrahub SyncSoT & CMDBInfrahubFetchPush14UserWrite config.ymlGeneratepython files23 \ No newline at end of file diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/filter_in_list.png b/docs/docs/media/release_notes/infrahub_0_13_0/filter_in_list.png new file mode 100644 index 0000000000..d25aed5e0a Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/filter_in_list.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/graphiql.png b/docs/docs/media/release_notes/infrahub_0_13_0/graphiql.png new file mode 100644 index 0000000000..ef4392b00f Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/graphiql.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/help_panel.png b/docs/docs/media/release_notes/infrahub_0_13_0/help_panel.png new file mode 100644 index 0000000000..acd65a2e5c Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/help_panel.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/ipam_01.png b/docs/docs/media/release_notes/infrahub_0_13_0/ipam_01.png new file mode 100644 index 0000000000..3d6d5f5d36 Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/ipam_01.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/ipam_02.png b/docs/docs/media/release_notes/infrahub_0_13_0/ipam_02.png new file mode 100644 index 0000000000..de1f59df85 Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/ipam_02.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/proposed_change_form.png b/docs/docs/media/release_notes/infrahub_0_13_0/proposed_change_form.png new file mode 100644 index 0000000000..ee53d5a5de Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/proposed_change_form.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/search_in_list.png b/docs/docs/media/release_notes/infrahub_0_13_0/search_in_list.png new file mode 100644 index 0000000000..cee9e63154 Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/search_in_list.png differ diff --git a/docs/docs/media/release_notes/infrahub_0_13_0/search_schema.png b/docs/docs/media/release_notes/infrahub_0_13_0/search_schema.png new file mode 100644 index 0000000000..078479c39a Binary files /dev/null and b/docs/docs/media/release_notes/infrahub_0_13_0/search_schema.png differ diff --git a/docs/docs/media/topics/generator/generator_overview.excalidraw.svg b/docs/docs/media/topics/generator/generator_overview.excalidraw.svg new file mode 100644 index 0000000000..2f49a2b368 --- /dev/null +++ b/docs/docs/media/topics/generator/generator_overview.excalidraw.svg @@ -0,0 +1,21 @@ + + + eyJ2ZXJzaW9uIjoiMSIsImVuY29kaW5nIjoiYnN0cmluZyIsImNvbXByZXNzZWQiOnRydWUsImVuY29kZWQiOiJ4nO2daVPbyFx1MDAxNoa/51dQzNeg6X2Zqlu3MIvBXHUwMDE4zFx1MDAxYSC3pijZXHUwMDEytrxjy2xT+e9zWiFIliUvIDHK3HhSXHUwMDE50GK1Wuft5z3drc5fn9bW1v2nobv+x9q6+9iwu54zslx1MDAxZtY/m+337mjsXHL6sItcdTAwMDS/j1x1MDAwN5NRIziy5fvD8Vx1MDAxZr//3rNHXHUwMDFk11x1MDAxZnbthmvde+OJ3Vx1MDAxZPtcdTAwMTPHXHUwMDFiWI1B73fPd3vj/5q/j+ye+5/hoOf4Iyu8yIbreP5g9P1abtftuX1/XGbf/j/4fW3tr+DvSOlGbsO3+82uXHUwMDFinFx1MDAxMOxcblx1MDAwYsiFiG89XHUwMDFh9IPCYkJcdGaIUfJ6hDfehuv5rlx1MDAwM7tvocxuuMdcdC7V0s+Vrf32XHUwMDE5qvZP21x1MDAxYjfO1VXbXHUwMDBmL3vrdbtn/lM3KNZ4XHUwMDAwd1x1MDAxM+5cdTAwMWL7o0HHvfRcdTAwMWO/Za5cdTAwMWTb/nqWY49bbuS00WDSbPXd8XjqpMHQbnj+k9mG0OvW75Xwx1q45Vx1MDAxMX6jVFtcdTAwMWNcdTAwMGKBkFx1MDAxMlpwXHUwMDFkfos5n1NkIS1cdTAwMTXSTGMldaxcXFuDLjxcYijXbyj4hFx1MDAwNavbjU5cdTAwMTNK13dej/FHdn88tEfwuMLjXHUwMDFlXu5YXCJhMcopXHUwMDEzgjPClXo9ouV6zZZcdTAwMGaHMM0tTLliWGlcIrWkYWHc4JFgyaliXGKx8Fx1MDAxZUxcdTAwMTGG+05cdTAwMTBcdTAwMWV/hlx1MDAwZmJcdTAwMDSBtW9O6U+63Whl9p2XypzaUTc7duKRXHUwMDE2jbapiPPdx/BcdTAwMGUj4YFcdTAwMWX8int94FduqljdXGJ3g1x1MDAxZk3q66/HfXv5KSznZOjY/sutYUo5R4QhzF73d71+J17W7qDRXHRcdTAwMDM02Prtc7IypspcdTAwMTlcdTAwMTFFpGZjmlBcdTAwMWFzoSFcdTAwMTaWlkTyPSdLomU3WpORW1x1MDAwMFFwii3BXHUwMDE0pVx1MDAwMjQhlWIxUfCPXHUwMDExXHUwMDA1JcSCJ68kT1BcdTAwMDO22IxcdTAwMDBcdTAwMTiDXHUwMDEwXHUwMDExIJK8XHUwMDA1MCdGXHUwMDE1QtBqrFx1MDAxMKNhqVx1MDAwNn3/zHs2j4OKqa27ds/rPv1cdTAwMDDK9yA2wVx1MDAwYlx1MDAxNVh2++7IXHUwMDA2XHUwMDFlrG27t17f801cZk9cdTAwMWS12fWaJqzXXHUwMDFiUHp3NFx1MDAxNfG+XHUwMDA3VHk9wFx1MDAxZlxmw71ccriu7cGX7y/Ttlx1MDAwZkZe0+vb3fOlilx1MDAwNZXi7v14ktjCPFx1MDAxMlwiY9fsXHJa5rnqncs1ilV861x1MDAwZlxyXHUwMDAz1LhAmorwXHUwMDE5LVx1MDAxMnH5XHUwMDFhb4y/qDNa71xyaqd6XHUwMDA3tVt7j5lyLXbW+1x1MDAxNSxcdTAwMDBcdTAwMTZSYKwlXHUwMDE2XHUwMDA0yVxixM35glJcdTAwMGJcdKVcdTAwMTCQXHUwMDBmw1x1MDAxZlx1MDAxZSvXSlx1MDAxMv6t4TrMsVx1MDAxM+SLoVx1MDAxNVx1MDAwMV5BbVx1MDAxM0U4XHUwMDEzclbGVEiLXHUwMDEzTaiU0LhcdTAwMTJcdTAwMTTG/U9cdTAwMDI1h3y9+/LluY9cdTAwMWJXT/Xj7c6Q7e5cdTAwMWRcdTAwMTVcdTAwMTBqnKdcdFx1MDAwMmJEMlwiXHUwMDE040tcdTAwMGJcIvmmi041XHUwMDAwmcW5IExqqiSKxNp3TahcZjVBiKq7YlZcdTAwMTOYXGJzXHUwMDE1yjVNolx1MDAxYVx1MDAwNeDp6GdWXHUwMDBmjFx1MDAxMIFAXHUwMDExMm89zGVcdTAwMWOjkaK9jXFETW1dhnF5gG1B455cbrZcdTAwMTVpRsTbacZwqiOVSEpcdMFcdTAwMTRcdTAwMWWwSLu7Pq0+O4+b5/W9w/Z+t9Z9rvZcdTAwMGaKXHIzXHQ5mlx1MDAwMK+JXHUwMDExJkorXHUwMDFlhr45X4KkKEFMXHUwMDAxQaAhY1wiVq7VhHtr18HAzVxul9BcdTAwMDS5ikhcdTAwMWS8yJNgXHIuVEVcdTAwMWXIm+VcdTAwMTly6EeY0Jct37Km2JY8QO2L8y/nXHUwMDFil1cnx7eVs6/bj/1VKCYhRrHQUU3kQTEqWZpcdTAwMTIwNEmEaomWT86S7zpbjGUvXHUwMDA2XHUwMDA1XHUwMDA0QYJDyCM6K1x1MDAwNsazXHUwMDEzw9zkXGZAZFx1MDAxMcUhISZcdMogs71cdTAwMTPawJVIot+vjLeCa/UoTVx1MDAwNFx1MDAxN5raOlx1MDAwZlxcI3vYOqmunUzc0dNb4dXzXHUwMDFjJ8qFXHUwMDE4v1x1MDAxNrTnM/xKLtJSXGbD6lx1MDAxZFxmUyS+9YdyOXhgrpaXbe2iXFx3Lv3d8mGls3/Fb7xcdTAwMGI5vi04wVx1MDAxOLOYyceIlloqOS1aRbSlXHUwMDA0KENxbnSrYuVajWCKNDR2/99cYoZcdTAwMDe97tn27c3loF/b31x1MDAxZLdcdTAwMGafXGI5XpFgkktcdTAwMTJVRFx1MDAxZVx1MDAwNGMqtWeCSsiCNaJyaSUk33TRXHUwMDAxppFFhFx1MDAwNCsnXHUwMDEwm9VcdTAwMDJT2WlhXHUwMDAxwKhcdTAwMDXPXHUwMDFjsJSUiVx1MDAxNVx1MDAxNmArXHUwMDA26TtcdTAwMDH22o1XXHUwMDFkNL1GTlxiW9Cgp/ctxlxulTvEeKRcdTAwMGJrxn5CXFxcdTAwMTNcdTAwMDVcdTAwMTGytHpb22ftr1eT8+2z8Wn1Qt5d7ux09orNMfDflsKIacRcdFx1MDAwN5JNXHUwMDBmXGZcYsohT+OIaJAvQzRermy6XHUwMDE1wXTOypWKSH9N9iDLo+dww0ZlvXu4N2T76GhcdTAwMWLfNC/2/dNcIvZcdTAwMWPi9DFijDRWSpLlgz75rlx1MDAwYo4swJTFIN2ijMiEsGcou7CfP0osLUXgXHUwMDAySVx1MDAxOddcdTAwMTJdh1x1MDAwNVx1MDAwMNjqMZtcYrDlu1x1MDAwZc1ccubRbbig8Z5Nu6bKkXuXISepXYZcXFx0qjGOXGbqLtLsdalWu5Jf5PblY+nYrfBaV5V4wUGlhaU5o0xhhlx1MDAxMeeRwY1AslpbXHUwMDEykjJFXHUwMDAx2vy9XHUwMDE5V1pn/78749qq7naaeqd+dzW5ZjbjXHUwMDE196TSLFwiv2hqz1x1MDAwM1ZcdTAwMTBcIoJcdTAwMTCyvFx1MDAxNpLvuvD8XHUwMDEyXHUwMDE2uDZJlVx1MDAxNGJWXHJcdTAwMTKr7NQwXHUwMDFmYMJcdTAwMDLpkeQpXHUwMDFkxUy5Mlwi1tIpV63ehoZ9bTOnXFxrQVtcdTAwMWXn1mxp8k+yOI5vfUVcdTAwMTfiTENcZodcdTAwMDG6SK5cdTAwMDfHXHUwMDA3zdaA0Isv3eqX4+OTXHUwMDFlu9i/LDa6INosQUEjlDGKmZhcdTAwMWWmlkpbmlx1MDAxMYyJ1EIrQWLl+kWuxeQ6PajR52qns7lxeXWzuVny9zb7RZyIyEU6uVxiXHUwMDA0XGLBnC8/Ozf5rotOLoUsqVxil1xmXHQ1I1x1MDAwNoVUdmL4XHUwMDA1rkzAVcpcdFxcXHUwMDBiWvJcdTAwMTRwlT5cdTAwMTBcXFwiMi84XHUwMDBlLkkgRFfoJmnd9W7Z8Z1s7PRH53dNxIfPz6Pic4ubuShEMki9dKxfX1x1MDAxMVx1MDAwYiHM4ShBOf2VcK1FXHUwMDAzbCrI0rDVn5Rag3716PByqztcdTAwMWVcZrZLt/aBLiC2hEg1cFJcdTAwMDWhoZenVvJN/1x1MDAwNNTCknFhplx1MDAxYqK4XHUwMDE0NFxiJSspLOwuXHUwMDA0XGJRljjX8Fx1MDAxN7VcIpzYyolaXHUwMDBi2vFcdTAwMTRqbX0gtSDNSDWZUlxiypBUy49I19uXeK//+LXVPO5slUtOZ4ydgnNcdTAwMGJcdTAwMTMkLcbA2UlcdTAwMGVpXHUwMDE3qGVKrZwxXHUwMDBiK6FcdTAwMTjSmpvp9Klyxa7571x1MDAxN7lmyWV3aY9cdTAwMWaUXHS95Vx1MDAxYved60qLlk+eI+T6nPy130/WR7WH6qj29YLXnjp2+9jZmZx0pq/y4/r2aDR4WIWIgmIuXCJvguRERJVORFxiOa3g6S8/+T65Mlx1MDAwYk5ETFx1MDAxNECPSjP7XHUwMDFlMDMjMkGzXHUwMDEz2VxcJiozhEaI+JmmLa5cdTAwMWWlmTBxXHUwMDEz51x1MDAwNMVcdTAwMDWQSOuDxNlS8XtjkaBWqlLVXG7PXHUwMDAxMSRcdTAwMTBafr79/OZrJbnGuzdylCvSxIKEVSqusEAqPuKtmcWQ4kxxoqEy3vWuzPxJWuCVpVx1MDAwZd7XYVx1MDAxYSmqwpbhVbQgaSZcdTAwMTjTTEqOQaihW3nRMFx1MDAxN1xmXHUwMDExjXHuXHUwMDEzQXKR8Ni3R37J6ztev1x1MDAxOT/F7Tvhnlxi0F+WXHUwMDA1WEZuQXPQmJjyI0sgIVxiXHUwMDEwicNDZyTsTzNVZlx1MDAwZoN23NJmXHUwMDFlKzedbuCWiJzxXGZde+xvXHJ6Pc+HXG44XHUwMDFleH0/XurgjjaN/FquPVPdcE/RfXGdXHUwMDBlzTdOe5Hwp7UwjoNfXn/+83Pi0enhZT5cdTAwMWIzkVx1MDAxNX7fp+j/32a8VWp3XHUwMDEx5I5cdTAwMTJSRyyXn1x1MDAxNF0mT5XR3mBywsf+Vc19et586jhFN96EWEJcdTAwMTElXHTTVKP4oCRcdTAwMDNSw3alOWVa/OoxWovG2FScpfnu8Vx1MDAxNTlcdTAwMTm17+tH6KFU2q53y6WHr41lffdZ/aREJjfH9duNc9Stc79avfMy893AXHKR+2RrjVPna1x1MDAxYXYpM5l2aYUlV2bhfbdcdTAwMTZcdTAwMTaRWFJtxk/iXHUwMDFhozQ7jf1cdTAwMWJtNzigj5ts/WPIXCIv271cdTAwMDBcdTAwMTFpIyhcdTAwMWZluzFNnbKGpVKYMK6Xz5Lnt17Ftd1cdTAwMDI8mJRcdTAwMTK4pMEgTC/bYN6cXHUwMDAwZyBcdTAwMTFcdTAwMDJiKvAu+dlugC+nUjMwe4KDXHUwMDE2wkfzKlpcdTAwMDRcdTAwMTm7xJyZIVx1MDAxZoHhIDIzXHUwMDA1u1x1MDAxOMZbXHUwMDEz9aZ31d9tvOdcdTAwMGIuYrw3sMWgqZFCS1xmlcrAe886b25hxM2YXHUwMDAyRULC4+c/t/NOjbDg9NnYytR6k/Qp7Vx1MDAwMoFrXHUwMDAw7ixvvdkp2d183jlnd1x1MDAwZqOrx5Pb9qTSxEW33phaXHUwMDA0Wlx1MDAxYbBhXGZSU1x1MDAxZZtXoYhlTIFcdTAwMTk8woKQX957LVx1MDAxYWRTgZbmvb2u7pSfJ1VbXHUwMDFlPjZcdTAwMGXuryvHh73Kst67/nS9Lyterd5cdTAwMDApf1x1MDAxZFx1MDAxMbt66Oxm570l4uEjzavPm6fjXHUwMDFj62BcdTAwMDWRXGLAXHUwMDE2aSy5Nlx1MDAwYm++XHUwMDE1mG8tXHUwMDE1NYv2zajMjFx1MDAwM2emsvnuW1macKlcdTAwMTJfXHUwMDFjKar5hlxuQVx1MDAxZt7nvZWX+V5cdTAwMDCJtIHgXHUwMDBmM98y9bVkQ2Jqond58z2/+Sqy+VZcdTAwMThDVVx1MDAxMMI5XHUwMDEz033eoFxii4AhQYAtxnTk9Zns+7yZRcC2XHUwMDAy9jSFQqFcdTAwMDTzXHKJOzFcdTAwMTP6iVIscEiFtN5cdTAwMDRsXHUwMDE0j1x1MDAwNvCHWe/5cotab2RcdCzAXlx1MDAxOEctlcDRLuCw05tgXHSZXHUwMDA0hVx1MDAxY1x1MDAxNNOf3Xmnhpf5zFx1MDAwNFaWvltH3o6eSfEhWnjUNCxqZC42mp2d8U6/UdtcdTAwMTje84v9XHUwMDBifH56XXTbXHUwMDFkrGiqNFx1MDAxNlx1MDAxOFx0radcdTAwMDfBobYtjagmjFx1MDAxMiRcdTAwMTlJT+8/YKaJZDxINDNYSvVcdTAwMDNNd6sqSjv1zn351N/aamO/wiS+W3FCiGRcIndzXGahkjr4o1x1MDAwNcFcdTAwMWNFX2hfOFk48a5cdTAwMGJvjoMlSiW4Y1x1MDAwMvldXFxcdTAwMGKUZaeFbHumwVx1MDAxZZhcdTAwMTFQmfuCpFlGaSbmeJPkZI5cdTAwMTc05WlcdTAwMTNCSLbmeD66WKpB1opcImgtV8hmVbfXPfRstH0tvWb9/Eg87jbT5oRcdTAwMTRcdTAwMDdd3DJhzyU4YJPRTunVvJaGuFBAeKrM0j3vccfvZZcyK+UgiTNYXCL1XHUwMDAz2aV4ZVCiztPO+I7tTrbKo537fW/Fjlx1MDAxZLO0bN7s0nNSRSiDJposj67kmy4+ujS4ZCSIefeMxrVg3krLTFx1MDAwYpmyiyouXHUwMDA1o+yf7ddZMUgzQVdug6pcdTAwMGKa8lx1MDAwZlx1MDAxYVSdiy6MXCLRMaNYszS7xGj5eVx1MDAxMMIuo/ve0+BcdTAwMDZX2nvDXHUwMDFhXHUwMDFl35UrsmjwMpNcdTAwMWUg2onkXGYrJKOTi17GP4jFJChHgpXCgk9cdTAwMGa0QmZcZntcdTAwMTVcdTAwMGVmhOnosHO2NGOWRExpgTVoQkfmw4Rw41x1MDAxNlx1MDAxMoLxoFx1MDAwM1x1MDAwZSM9+29eSLMusshA0Vx1MDAxZog6Z1x1MDAxN5J9dF7W3tHGae1gOHRJ2VlcdTAwMTF1Zj3o/NM0nvpyXGZB8ECwWOXlmOTbLlx1MDAxY+xcdTAwMTZqR3HLrJqCzeqNXHUwMDFjRUZKXHUwMDAz7UhkcUNFXHUwMDBlaZ183zL6mVx1MDAwZWpgSDZcdTAwMTVcdTAwMTP/aNq2atRmwr6tvNK2XHUwMDA1JEhcdTAwMWLTeGva9umlRVi3h8MzXHUwMDFmava10YJH6Tkv1Vx1MDAxM9bm+r3nPpSS1nhcdTAwMGY+pr1cdFx1MDAxYVx1MDAwNCM8N2hcdTAwMDG/ffr2N1x1MDAwMlNcdTAwMTn/In0= + + + + + Generator DefinitionGeneratorGraphQL QueryGenerator LogicGroupObject AObject BObject CObject A1Object B1Object C1Object A2Object B1Object C2 \ No newline at end of file diff --git a/docs/docs/media/tutorial_1_branch_creation.png b/docs/docs/media/tutorial_1_branch_creation.png index df3226ae3c..368c078072 100644 Binary files a/docs/docs/media/tutorial_1_branch_creation.png and b/docs/docs/media/tutorial_1_branch_creation.png differ diff --git a/docs/docs/media/tutorial_1_branch_details.png b/docs/docs/media/tutorial_1_branch_details.png index 9ad9a1b13b..a9869c7f19 100644 Binary files a/docs/docs/media/tutorial_1_branch_details.png and b/docs/docs/media/tutorial_1_branch_details.png differ diff --git a/docs/docs/media/tutorial_1_branch_diff.png b/docs/docs/media/tutorial_1_branch_diff.png index 0278a7274b..e33b43de33 100644 Binary files a/docs/docs/media/tutorial_1_branch_diff.png and b/docs/docs/media/tutorial_1_branch_diff.png differ diff --git a/docs/docs/media/tutorial_1_branch_list.png b/docs/docs/media/tutorial_1_branch_list.png index 13c8f7f8a9..236b837491 100644 Binary files a/docs/docs/media/tutorial_1_branch_list.png and b/docs/docs/media/tutorial_1_branch_list.png differ diff --git a/docs/docs/media/tutorial_1_organization_create.png b/docs/docs/media/tutorial_1_organization_create.png index 195e31d8e7..8173f98e41 100644 Binary files a/docs/docs/media/tutorial_1_organization_create.png and b/docs/docs/media/tutorial_1_organization_create.png differ diff --git a/docs/docs/media/tutorial_1_organization_details.png b/docs/docs/media/tutorial_1_organization_details.png index c0aa70d488..e0e072adce 100644 Binary files a/docs/docs/media/tutorial_1_organization_details.png and b/docs/docs/media/tutorial_1_organization_details.png differ diff --git a/docs/docs/media/tutorial_1_organization_edit.png b/docs/docs/media/tutorial_1_organization_edit.png index 3c02037c2b..508c3741a1 100644 Binary files a/docs/docs/media/tutorial_1_organization_edit.png and b/docs/docs/media/tutorial_1_organization_edit.png differ diff --git a/docs/docs/media/tutorial_1_organizations.png b/docs/docs/media/tutorial_1_organizations.png index cff2f18f1f..13f17fbefa 100644 Binary files a/docs/docs/media/tutorial_1_organizations.png and b/docs/docs/media/tutorial_1_organizations.png differ diff --git a/docs/docs/media/tutorial_2_historical.png b/docs/docs/media/tutorial_2_historical.png index 83d4cca7b5..4757e09cac 100644 Binary files a/docs/docs/media/tutorial_2_historical.png and b/docs/docs/media/tutorial_2_historical.png differ diff --git a/docs/docs/media/tutorial_3_schema.png b/docs/docs/media/tutorial_3_schema.png index 5e556df768..8affd9cbb6 100644 Binary files a/docs/docs/media/tutorial_3_schema.png and b/docs/docs/media/tutorial_3_schema.png differ diff --git a/docs/docs/media/tutorial_4_metadata.png b/docs/docs/media/tutorial_4_metadata.png index 5064f7b56f..a484cf6bb8 100644 Binary files a/docs/docs/media/tutorial_4_metadata.png and b/docs/docs/media/tutorial_4_metadata.png differ diff --git a/docs/docs/media/tutorial_4_metadata_edit.png b/docs/docs/media/tutorial_4_metadata_edit.png index d0cd891005..5e7dffebea 100644 Binary files a/docs/docs/media/tutorial_4_metadata_edit.png and b/docs/docs/media/tutorial_4_metadata_edit.png differ diff --git a/docs/docs/media/tutorial_6_branch_creation.png b/docs/docs/media/tutorial_6_branch_creation.png index dcb1c9ce51..2d9459cc87 100644 Binary files a/docs/docs/media/tutorial_6_branch_creation.png and b/docs/docs/media/tutorial_6_branch_creation.png differ diff --git a/docs/docs/media/tutorial_6_interface_update.png b/docs/docs/media/tutorial_6_interface_update.png index b173579ff3..fb3c96e8db 100644 Binary files a/docs/docs/media/tutorial_6_interface_update.png and b/docs/docs/media/tutorial_6_interface_update.png differ diff --git a/docs/docs/python-sdk/guides/tracking.mdx b/docs/docs/python-sdk/guides/tracking.mdx new file mode 100644 index 0000000000..73de152d3a --- /dev/null +++ b/docs/docs/python-sdk/guides/tracking.mdx @@ -0,0 +1,248 @@ +--- +title: Using the client tracking mode +--- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Using the client tracking mode + +The Python SDK provides a feature known as *Tracking Mode*. This mode allows for the aggregation and tracking of operations performed during a session, enhancing efficiency and data management. + +## Overview + +Tracking mode is designed for scenarios where multiple operations or mutations are performed in sequence, and you want to consolidate or track these operations for efficiency or auditing purposes. It is ideal for scenarios requiring precise control and repeatability, such as idempotent scripting. +Under the hood, tracking mode leverages a `CoreStandardGroup` object to aggregate and track these operations. + +For a detailed exploration of tracking mode applications and how to use it for idempotent scripting, visit the [tracking topic](/python-sdk/topics/tracking). + +## Automatic tracking + +The Infrahub Python SDK offers a streamlined process for managing the tracking session lifecycle using the context manager. This method is available for both asynchronous and synchronous clients, ensuring all operations within the context are efficiently tracked without manual intervention. + +### Utilizing the context manager + +With the `start_tracking` method, you initiate tracking mode as you enter the context and automatically conclude the tracking session as you exit. This guarantees that all operations performed within the context are tracked under the specified session, simplifying tracking management. + + + + + ```python + # Auto-manage tracking session with async context manager + async with client.start_tracking(identifier="my_tracking_session", params=params, delete_unused_nodes=True) as session: + # Tracked operations + node = await session.create(kind="MyNodeKind", data={"name": "Example"}) + await node.save() + + # Optionally, add related Nodes and Groups to the context + await session.group_context.add_related_nodes([another_node.id]) + await session.group_context.add_related_groups([group.id]) + ``` + + + + + ```python + # Auto-manage tracking session with sync context manager + with client.start_tracking(identifier="my_tracking_session", params=params, delete_unused_nodes=True) as session: + # Tracked operations + node = session.create(kind="MyNodeKind", data={"name": "Example"}) + node.save() + + # Optionally, add related Nodes and Groups to the context + session.group_context.add_related_nodes([another_node.id]) + session.group_context.add_related_groups([group.id]) + ``` + + + + +:::info + +The context manager feature elegantly handles the start and conclusion of tracking sessions, making your code cleaner and less prone to errors related to manual tracking management. + +::: + +## Manual tracking + +Manual tracking involves explicitly starting, managing, and concluding sessions, offering fine-grained control over the tracking process. + +### Setting up + +First, ensure the Python SDK is installed and configured to communicate with your Infrahub instance. Then, let's start by setting up our client and enabling tracking mode. + +Once tracking mode is enabled, it's important to note that only the objects that are being saved (created or updated) will be automatically tracked under the specified session identifier. Objects that are merely queried and not modified or saved will not be added to the tracking group. +This behavior ensures that the tracking group specifically reflects changes made during the session, providing a clear audit trail of modifications. + + + + + ```python + from infrahub_sdk.client import InfrahubClient + + client = await InfrahubClient.init(address="http://localhost:8000") + + await client.start_tracking(identifier="my_tracking_session") + node = await client.create(kind="MyNodeKind", data={"name": "Example"}) + await node.save() + ``` + + + + + ```python + from infrahub_sdk.client_sync import InfrahubClientSync + + client = InfrahubClientSync.init(address="http://localhost:8000") + + client.start_tracking(identifier="my_tracking_session") + node = client.create(kind="MyNodeKind", data={"name": "Example"}) + node.save() + ``` + + + + +#### Tracking parameters + +When enabling tracking mode with the `start_tracking` method, you can customize the tracking session with the following parameters: + +| Parameter | Description | +|------------------------|-----------------------------------------------------------------------------------------------------------------------------| +| **identifier** | Unique string to identify the session, used for correlating operations and logs. Defaults to "python-sdk" if not specified. | +| **params** | Optional dictionary for extra context, enabling fine-grained control over tracking. | +| **delete_unused_nodes**| Boolean indicating if nodes not referenced should be automatically deleted, helping maintain a clean state. | +| **group_type** | Type of group object for tracking, default is `CoreStandardGroup`, customizable for specific grouping logic. | + +These parameters provide flexibility, enabling detailed auditing, efficient data management, and support for idempotent operations. + +#### Advanced tracking with parameters + +In addition to the basic tracking mode functionalities, the Infrahub Python SDK allows for more granular control over the tracking sessions through parameters. These parameters can be beneficial for categorizing, filtering, or adding metadata to the groups created or updated during the tracking session. +When starting a tracking session with `start_tracking,` you can pass a dictionary of parameters to define further the context and characteristics of the group being tracked. This allows for dynamic grouping based on runtime data, user inputs, or other operational metrics. + + + + + ```python + params = { + "data_source": "external", + "analysis_type": "ipam_data" + } + + await client.start_tracking( + identifier="weekly_analysis", + params=params, + delete_unused_nodes=True + ) + ``` + + + + + ```python + params = { + "data_source": "external", + "analysis_type": "ipam_data" + } + + client.start_tracking( + identifier="weekly_analysis", + params=params, + delete_unused_nodes=True + ) + ``` + + + + +### Ending a tracking session and updating tracking information + +This step involves creating or updating (using upsert) the `CoreStandardGroup` used to store all the Nodes and Groups used during execution. + + + + + ```python + from infrahub_sdk.client import InfrahubClient + + client = await InfrahubClient.init(address="http://localhost:8000") + + await client.start_tracking(identifier="my_tracking_session") + node = await client.create(kind="MyNodeKind", data={"name": "Example"}) + await node.save() + + # Update tracking information for async client + await client.group_context.update_group() + ``` + + + + + ```python + from infrahub_sdk.client_sync import InfrahubClientSync + + client = InfrahubClientSync.init(address="http://localhost:8000") + + client.start_tracking(identifier="my_tracking_session") + node = client.create(kind="MyNodeKind", data={"name": "Example"}) + node.save() + + # Update tracking information for sync client + client.group_context.update_group() + ``` + + + + +## Retrieving and manipulating groups + +After setting up a tracking session with specific parameters, you may need to retrieve the group associated with this session for further manipulation. This involves fetching the group, accessing its members, and executing specific logic based on the member types. + +Here's how to retrieve a group based on an identifier and parameters and interact with its members: + + + + +```python +# Set the context properties to match the tracking session +await client.set_context_properties(identifier="my_tracking_session", params=params) + +# Retrieve the group associated with the specified identifier and parameters +group = await client.group_context.get_group(store_peers=True) + +# Check if the group exists +if group: + # Access previous members of the group, if any + if client.group_context.previous_members: + for member in client.group_context.previous_members: + # Fetch the object from the store based on the member's type and ID + obj = client.store.get(kind=member._typename, key=member.id) + + # Perform operations based on the member's type + pass +``` + + + + +```python +# Set the context properties to match the tracking session +client.set_context_properties(identifier="my_tracking_session", params=params) + +# Retrieve the group associated with the specified identifier and parameters +group = client.group_context.get_group(store_peers=True) + +# Check if the group exists +if group: + # Access previous members of the group, if any + if client.group_context.previous_members: + for member in client.group_context.previous_members: + # Fetch the object from the store based on the member's type and ID + obj = client.store.get(kind=member._typename, key=member.id) + + # Perform operations based on the member's type + pass +``` + + + diff --git a/docs/docs/python-sdk/readme.mdx b/docs/docs/python-sdk/readme.mdx index 329528d8b6..80dd468b7e 100644 --- a/docs/docs/python-sdk/readme.mdx +++ b/docs/docs/python-sdk/readme.mdx @@ -13,8 +13,14 @@ The Infrahub Python SDK greatly simplifies how you can interact with Infrahub pr - [Installing infrahub-sdk](/python-sdk/guides/installation) - [Creating a client](/python-sdk/guides/client) - [Querying data in Infrahub](/python-sdk/guides/query_data) -- [Create, update and deleting nodes](/python-sdk/guides/create_update_delete) -- [Branch management](/python-sdk/guides/branches) +- [Managing nodes](/python-sdk/guides/create_update_delete) +- [Managing branches](/python-sdk/guides/branches) +- [Using the client store](/python-sdk/guides/store) +- [Using the client tracking mode](/python-sdk/guides/tracking) + +## Topics + +- [Understanding tracking in the Python SDK](/python-sdk/topics/tracking) ## Reference diff --git a/docs/docs/python-sdk/reference/config.mdx b/docs/docs/python-sdk/reference/config.mdx index c16b053b1a..0a5d148a60 100644 --- a/docs/docs/python-sdk/reference/config.mdx +++ b/docs/docs/python-sdk/reference/config.mdx @@ -78,15 +78,62 @@ The following settings can be defined in the Config class **Default value**: False
**Environment variable**: `INFRAHUB_SDK_DEFAULT_BRANCH_FROM_GIT`
+## identifier + +**Property**: identifier
+**Description**: Tracker identifier
+**Type**: `string`
+**Environment variable**: `INFRAHUB_SDK_IDENTIFIER`
+ +## insert_tracker + +**Property**: insert_tracker
+**Description**: Insert a tracker on queries to the server
+**Type**: `boolean`
+**Default value**: False
+**Environment variable**: `INFRAHUB_SDK_INSERT_TRACKER`
+ +## max_concurrent_execution + +**Property**: max_concurrent_execution
+**Description**: Max concurrent execution in batch mode
+**Type**: `integer`
+**Default value**: 5
+**Environment variable**: `INFRAHUB_SDK_MAX_CONCURRENT_EXECUTION`
+ ## mode **Property**: mode
**Description**: Default mode for the client
-**Type**: `enum`
+**Type**: `string`
**Default value**: default
**Choices**: default, tracking
**Environment variable**: `INFRAHUB_SDK_MODE`
+## pagination_size + +**Property**: pagination_size
+**Description**: Page size for queries to the server
+**Type**: `integer`
+**Default value**: 50
+**Environment variable**: `INFRAHUB_SDK_PAGINATION_SIZE`
+ +## retry_delay + +**Property**: retry_delay
+**Description**: Number of seconds to wait until attempting a retry.
+**Type**: `integer`
+**Default value**: 5
+**Environment variable**: `INFRAHUB_SDK_RETRY_DELAY`
+ +## retry_on_failure + +**Property**: retry_on_failure
+**Description**: Retry operation in case of failure
+**Type**: `boolean`
+**Default value**: False
+**Environment variable**: `INFRAHUB_SDK_RETRY_ON_FAILURE`
+ ## timeout **Property**: timeout
@@ -99,11 +146,33 @@ The following settings can be defined in the Config class **Property**: transport
**Description**: Set an alternate transport using a predefined option
-**Type**: `enum`
+**Type**: `string`
**Default value**: httpx
**Choices**: httpx, json
**Environment variable**: `INFRAHUB_SDK_TRANSPORT`
+## proxy + +**Property**: proxy
+**Description**: Proxy address
+**Type**: `string`
+**Environment variable**: `INFRAHUB_SDK_PROXY`
+ +## proxy_mounts + +**Property**: proxy_mounts
+**Description**: Proxy mounts configuration
+**Type**: `object`
+**Environment variable**: `INFRAHUB_SDK_PROXY_MOUNTS`
+ +## update_group_context + +**Property**: update_group_context
+**Description**: Update GraphQL query groups
+**Type**: `boolean`
+**Default value**: False
+**Environment variable**: `INFRAHUB_SDK_UPDATE_GROUP_CONTEXT`
+ ## recorder **Property**: recorder
diff --git a/docs/docs/python-sdk/topics/tracking.mdx b/docs/docs/python-sdk/topics/tracking.mdx new file mode 100644 index 0000000000..59ebdd4da2 --- /dev/null +++ b/docs/docs/python-sdk/topics/tracking.mdx @@ -0,0 +1,60 @@ +--- +title: Understanding tracking in the Python SDK +--- + +# Understanding tracking in the Python SDK + +## Introduction + +Tracking mode in the Python SDK is an advanced functionality designed to optimize and streamline data operations. + +## The essence of tracking + +### What is tracking? + +Tracking mode in the Python SDK is aggregates operations, such as creations, updates, and deletions, performed during a session into a `CoreStandardGroup` object. +This aggregation facilitates efficient management and auditing of data mutations. + +### Why it matters? + +In complex workflows where multiple operations occur sequentially, tracking ensures that these actions are consolidated, leading to easier auditing. +This is particularly crucial in scenarios where operations must be repeatable without side effects — a core principle of idempotency. + +## Under the hood + +At the heart of the tracking feature lies the `CoreStandardGroup` object. This object acts as a container, grouping related operations to provide a unified view of the changes made during a session. This grouping is not just for organizational purposes; it plays a vital role in enabling idempotent operations. + +### Automatic vs. manual tracking + +The SDK offers two modes of operation for tracking: automatic and manual: + +- Automatic tracking is facilitated through the use of a context manager, simplifying session management and ensuring that all operations within the context are tracked. +- Manual tracking, on the other hand, provides granular control, allowing developers to start and end sessions explicitly and manage group memberships as needed. + +## Achieving idempotency + +### Idempotency explained + +Idempotency is a property of certain operations that ensures the operation can be performed multiple times without changing the result beyond the initial application. This concept is critical in ensuring that scripts and operations are safe to retry, leading to more robust and reliable systems. + +### Tracking in idempotency + +The tracking feature of the Python SDK shines in scenarios requiring idempotency. By grouping operations within a `CoreStandardGroup`, the SDK allows for operations to be rolled back or repeated without adverse effects, ensuring that the state of the system remains consistent. + +## Practical applications + +### Use cases + +- **Auditing:** Tracking provides a clear audit trail of operations performed during a session. +- **Error Handling:** In case of errors, tracking enables precise identification and rollback of changes. + +### Implementing an idempotent script + +An idempotent script using the Python SDK might involve the following steps: + +1. **Start a Tracking Session:** Begin with a unique identifier and any relevant parameters. +2. **Perform Operations:** Execute create, update, or delete operations as required. +3. **End the Session:** Conclude the tracking session, optionally updating the group context to reflect the changes made. +4. **Handle Errors:** In case of errors, use the tracking information to rollback or correct the state. + +For more detailed guidance on implementing tracking in your scripts, refer to the [tracking guide](/python-sdk/guides/tracking). diff --git a/docs/docs/reference/dotinfrahub.mdx b/docs/docs/reference/dotinfrahub.mdx index a89b519d3e..4e03ae3622 100644 --- a/docs/docs/reference/dotinfrahub.mdx +++ b/docs/docs/reference/dotinfrahub.mdx @@ -89,3 +89,22 @@ See [this topic](/topics/infrahub-yml) for more details on the available reposit | name | string | The name of the Transform | True | | file_path | string | The file within the repository with the transform code. | True | | class_name | string | The name of the transform class to run. | False | + + +## Generator Definitions + + +**Description**: Generator definitions
+**Key**: generator_definitions
+**Type**: array
+**Item type**: InfrahubGeneratorDefinitionConfig
+ +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| name | string | The name of the Generator Definition | True | +| file_path | string | The file within the repository with the generator code. | True | +| query | string | The GraphQL query to use as input. | True | +| parameters | object | The input parameters required to run this check | False | +| targets | string | The group to target when running this generator | True | +| class_name | string | The name of the generator class to run. | False | +| convert_query_response | boolean | Decide if the generator should convert the result of the GraphQL query to SDK InfrahubNode objects. | False | diff --git a/docs/docs/reference/infrahub-cli/infrahub-db.mdx b/docs/docs/reference/infrahub-cli/infrahub-db.mdx index b23d6a2fa0..b45fb5e992 100644 --- a/docs/docs/reference/infrahub-cli/infrahub-db.mdx +++ b/docs/docs/reference/infrahub-cli/infrahub-db.mdx @@ -17,9 +17,11 @@ $ infrahub db [OPTIONS] COMMAND [ARGS]... **Commands**: * `constraint`: Manage Database Constraints +* `index`: Manage Database Indexes * `init`: Erase the content of the database and... * `load-test-data`: Load test data into the database from the... * `migrate`: Check the current format of the internal... +* `update-core-schema`: Check the current format of the internal... ## `infrahub db constraint` @@ -40,6 +42,25 @@ $ infrahub db constraint [OPTIONS] [ACTION]:[show|add|drop] [CONFIG_FILE] * `--help`: Show this message and exit. +## `infrahub db index` + +Manage Database Indexes + +**Usage**: + +```console +$ infrahub db index [OPTIONS] [ACTION]:[show|add|drop] [CONFIG_FILE] +``` + +**Arguments**: + +* `[ACTION]:[show|add|drop]`: [default: IndexAction.SHOW] +* `[CONFIG_FILE]`: [env var: INFRAHUB_CONFIG;default: infrahub.toml] + +**Options**: + +* `--help`: Show this message and exit. + ## `infrahub db init` Erase the content of the database and initialize it with the core schema. @@ -89,3 +110,22 @@ $ infrahub db migrate [OPTIONS] [CONFIG_FILE] * `--check / --no-check`: Check the state of the database without applying the migrations. [default: no-check] * `--help`: Show this message and exit. + +## `infrahub db update-core-schema` + +Check the current format of the internal graph and apply the necessary migrations + +**Usage**: + +```console +$ infrahub db update-core-schema [OPTIONS] [CONFIG_FILE] +``` + +**Arguments**: + +* `[CONFIG_FILE]`: [env var: INFRAHUB_CONFIG;default: infrahub.toml] + +**Options**: + +* `--debug / --no-debug`: Enable advanced logging and troubleshooting [default: no-debug] +* `--help`: Show this message and exit. diff --git a/docs/docs/reference/message-bus-events.mdx b/docs/docs/reference/message-bus-events.mdx new file mode 100644 index 0000000000..0f01aadc39 --- /dev/null +++ b/docs/docs/reference/message-bus-events.mdx @@ -0,0 +1,2293 @@ +--- +title: Message Bus Events +--- + +# Message bus events + +This document provides detailed documentation for all events used in the Infrahub message bus system. + +:::info + +For more detailed explanations on how to use these events within Infrahub, see the [event handling](/topics/event-handling) topic. + +::: + +## Messages events + + +### Check Artifact + + + +#### Event check.artifact.create + + +**Description**: Runs a check to verify the creation of an artifact. + +**Priority**: 2 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_name** | Name of the artifact | string | None | +| **artifact_definition** | The the ID of the artifact definition | string | None | +| **commit** | The commit to target | string | None | +| **content_type** | Content type of the artifact | string | None | +| **transform_type** | The type of transform associated with this artifact | string | None | +| **transform_location** | The transforms location within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this artifact | string | None | +| **target_name** | Name of the artifact target | string | None | +| **artifact_id** | The id of the artifact if it previously existed | N/A | None | +| **query** | The name of the query to use when collecting data | string | None | +| **timeout** | Timeout for requests used to generate this artifact | integer | None | +| **variables** | Input variables when generating the artifact | object | None | +| **validator_id** | The ID of the validator | string | None | + + + +### Check Generator + + + +#### Event check.generator.run + + +**Description**: A check that runs a generator. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator definition | N/A | None | +| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this generator | string | None | +| **target_name** | Name of the generator target | string | None | +| **query** | The name of the query to use when collecting data | string | None | +| **variables** | Input variables when running the generator | object | None | +| **validator_id** | The ID of the validator | string | None | + + + +### Check Repository + + + +#### Event check.repository.check_definition + + +**Description**: Triggers user defined checks to run based on a Check Definition. + +**Priority**: 2 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **check_definition_id** | The unique ID of the check definition | string | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **file_path** | The path and filename of the check | string | None | +| **class_name** | The name of the class containing the check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event check.repository.merge_conflicts + + +**Description**: Runs a check to validate if there are merge conflicts for a proposed change between two branches. + +**Priority**: 2 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **check_execution_id** | The unique ID for the current execution of this check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | + + +#### Event check.repository.user_check + + +**Description**: Runs a check as defined within a CoreCheckDefinition within a repository. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **check_execution_id** | The unique ID for the current execution of this check | string | None | +| **check_definition_id** | The unique ID of the check definition | string | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **file_path** | The path and filename of the check | string | None | +| **class_name** | The name of the class containing the check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **variables** | Input variables when running the check | object | None | +| **name** | The name of the check | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + + +### Event Branch + + + +#### Event event.branch.create + + +**Description**: Sent a new branch is created. + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was created | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **sync_with_git** | Indicates if Infrahub should extend this branch to git. | boolean | None | + + +#### Event event.branch.delete + + +**Description**: Sent when a branch has been deleted. + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was deleted | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **sync_with_git** | Indicates if the branch was extended to Git | boolean | None | + + +#### Event event.branch.merge + + +**Description**: Sent when a branch has been merged. + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + +#### Event event.branch.rebased + + +**Description**: Sent when a branch has been rebased. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was rebased | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + + +### Event Node + + + +#### Event event.node.mutated + + +**Description**: Sent when a node has been mutated + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was created | string | None | +| **kind** | The type of object modified | string | None | +| **node_id** | The ID of the mutated node | string | None | +| **action** | The action taken on the node | string | None | +| **data** | Data on modified object | object | None | + + + +### Event Schema + + + +#### Event event.schema.update + + +**Description**: Sent when the schema on a branch has been updated. + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch where the update occurred | string | None | + + + +### Event Worker + + + +#### Event event.worker.new_primary_api + + +**Description**: Sent on startup or when a new primary API worker is elected. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **worker_id** | The worker ID that got elected | string | None | + + + + +### Finalize Validator + + + +#### Event finalize.validator.execution + + +**Description**: Update the status of a validator after all checks have been completed. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **start_time** | Start time when the message was first created | string | None | +| **validator_type** | The type of validator to complete | string | None | + + + + +### Git Branch + + + +#### Event git.branch.create + + +**Description**: Create a branch in a Git repository. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | Name of the branch to create | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | + + + +### Git Diff + + + +#### Event git.diff.names_only + + +**Description**: Request a list of modified files between two commits. + +**Priority**: 4 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **first_commit** | The first commit | string | None | +| **second_commit** | The second commit | string | None | + + + +### Git File + + + +#### Event git.file.get + + +**Description**: Read a file from a Git repository. + +**Priority**: 4 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **commit** | The commit id to use to access the file | string | None | +| **file** | The path and filename within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | + + + +### Git Repository + + + +#### Event git.repository.add + + +**Description**: Clone and sync an external repository after creation. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **created_by** | The user ID of the user that created the repository | N/A | None | +| **default_branch_name** | Default branch for this repository | N/A | None | + + +#### Event git.repository.merge + + +**Description**: Merge one branch into another. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **source_branch** | The source branch | string | None | +| **destination_branch** | The source branch | string | None | + + +#### Event git.repository.add_read_only + + +**Description**: Clone and sync an external repository after creation. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **ref** | Ref to track on the external repository | string | None | +| **created_by** | The user ID of the user that created the repository | N/A | None | +| **infrahub_branch_name** | Infrahub branch on which to sync the remote repository | string | None | + + +#### Event git.repository.pull_read_only + + +**Description**: Update a read-only repository to the latest commit for its ref + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **ref** | Ref to track on the external repository | N/A | None | +| **commit** | Specific commit to pull | N/A | None | +| **infrahub_branch_name** | Infrahub branch on which to sync the remote repository | string | None | + + + + +### Schema Migration + + + +#### Event schema.migration.path + + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The name of the branch to target | N/A | None | +| **migration_name** | The name of the migration to run | string | None | +| **new_node_schema** | new Schema of Node or Generic to process | N/A | None | +| **previous_node_schema** | Previous Schema of Node or Generic to process | N/A | None | +| **schema_path** | SchemaPath to the element of the schema to migrate | N/A | None | + + + +### Schema Validator + + + +#### Event schema.validator.path + + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The name of the branch to target | N/A | None | +| **constraint_name** | The name of the constraint to validate | string | None | +| **node_schema** | Schema of Node or Generic to validate | N/A | None | +| **schema_path** | SchemaPath to the element of the schema to validate | N/A | None | + + + + +### Refresh Registry + + + +#### Event refresh.registry.branches + + +**Description**: Sent to indicate that the registry should be refreshed and new branch data loaded. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + +#### Event refresh.registry.rebased_branch + + +**Description**: Sent to refresh a rebased branch within the local registry. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was rebased | string | None | + + + +### Refresh Webhook + + + +#### Event refresh.webhook.configuration + + +**Description**: Sent to indicate that configuration in the cache for webhooks should be refreshed. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + + + +### Request Artifact + + + +#### Event request.artifact.generate + + +**Description**: Runs to generate an artifact + +**Priority**: 2 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_name** | Name of the artifact | string | None | +| **artifact_definition** | The the ID of the artifact definition | string | None | +| **commit** | The commit to target | string | None | +| **content_type** | Content type of the artifact | string | None | +| **transform_type** | The type of transform associated with this artifact | string | None | +| **transform_location** | The transforms location within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this artifact | string | None | +| **target_name** | Name of the artifact target | string | None | +| **artifact_id** | The id of the artifact if it previously existed | N/A | None | +| **query** | The name of the query to use when collecting data | string | None | +| **timeout** | Timeout for requests used to generate this artifact | integer | None | +| **variables** | Input variables when generating the artifact | object | None | + + + +### Request Artifact Definition + + + +#### Event request.artifact_definition.check + + +**Description**: Sent to validate the generation of artifacts in relation to a proposed change. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_definition** | The Artifact Definition | N/A | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The target branch | string | None | + + +#### Event request.artifact_definition.generate + + +**Description**: Sent to trigger the generation of artifacts for a given branch. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_definition** | The unique ID of the Artifact Definition | string | None | +| **branch** | The branch to target | string | None | +| **limit** | List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated | array | None | + + + +### Request Generator + + + +#### Event request.generator.run + + +**Description**: Runs a generator. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator definition | N/A | None | +| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this generator | string | None | +| **target_name** | Name of the generator target | string | None | +| **query** | The name of the query to use when collecting data | string | None | +| **variables** | Input variables when running the generator | object | None | + + + +### Request Generator Definition + + + +#### Event request.generator_definition.check + + +**Description**: Sent to trigger Generators to run for a proposed change. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator Definition | N/A | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The target branch | string | None | + + +#### Event request.generator_definition.run + + +**Description**: Sent to trigger a Generator to run on a specific branch. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator Definition | N/A | None | +| **branch** | The branch to target | string | None | + + + +### Request Git + + + +#### Event request.git.create_branch + + +**Description**: Sent to trigger the creation of a branch in git repositories. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to target | string | None | +| **branch_id** | The unique ID of the branch | string | None | + + +#### Event request.git.sync + + +**Description**: Request remote repositories to be synced. + +**Priority**: 4 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + + +### Request Graphql Query Group + + + +#### Event request.graphql_query_group.update + + +**Description**: Sent to create or update a GraphQLQueryGroup associated with a given GraphQLQuery. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to target | string | None | +| **query_name** | The name of the GraphQLQuery that should be associated with the group | string | None | +| **query_id** | The ID of the GraphQLQuery that should be associated with the group | string | None | +| **related_node_ids** | List of nodes related to the GraphQLQuery | array | None | +| **subscribers** | List of subscribers to add to the group | array | None | +| **params** | Params sent with the query | object | None | + + + +### Request Proposed Change + + + +#### Event request.proposed_change.cancel + + +**Description**: Cancel the proposed change + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | + + +#### Event request.proposed_change.data_integrity + + +**Description**: Sent trigger data integrity checks for a proposed change + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.pipeline + + +**Description**: Sent request the start of a pipeline connected to a proposed change. + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the proposed change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **check_type** | Can be used to restrict the pipeline to a specific type of job | N/A | all | + + +#### Event request.proposed_change.refresh_artifacts + + +**Description**: Sent trigger the refresh of artifacts that are impacted by the proposed change. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.repository_checks + + +**Description**: Sent when a proposed change is created to trigger additional checks + +**Priority**: 5 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.run_generators + + +**Description**: Sent trigger the generators that are impacted by the proposed change to run. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.schema_integrity + + +**Description**: Sent trigger schema integrity checks for a proposed change + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.run_tests + + +**Description**: Sent trigger to run tests (smoke, units, integrations) for a proposed change. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + +### Request Repository + + + +#### Event request.repository.checks + + +**Description**: Sent to trigger the checks for a repository to be executed. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository** | The unique ID of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | + + +#### Event request.repository.user_checks + + +**Description**: Sent to trigger the user defined checks on a repository. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository** | The unique ID of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **target_branch** | The target branch | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + + +### Send Echo + + + +#### Event send.echo.request + + +**Description**: Sent a echo request, i.e., ping message. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **message** | The message to send | string | None | + + + +### Send Webhook + + + +#### Event send.webhook.event + + +**Description**: Sent a webhook to an external source. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **webhook_id** | The unique ID of the webhook | string | None | +| **event_type** | The event type | string | None | +| **event_data** | The data tied to the event | object | None | + + + + +### Transform Jinja + + + +#### Event transform.jinja.template + + +**Description**: Sent to trigger the checks for a repository to be executed. + +**Priority**: 4 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **data** | Input data for the template | object | None | +| **branch** | The branch to target | string | None | +| **template_location** | Location of the template within the repository | string | None | +| **commit** | The commit id to use when rendering the template | string | None | + + + +### Transform Python + + + +#### Event transform.python.data + + +**Description**: Sent to run a Python transform. + +**Priority**: 4 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **data** | Input data for the template | object | None | +| **branch** | The branch to target | string | None | +| **transform_location** | Location of the transform within the repository | string | None | +| **commit** | The commit id to use when rendering the template | string | None | + + + + +### Trigger Artifact Definition + + + +#### Event trigger.artifact_definition.generate + + +**Description**: Sent after a branch has been merged to start the regeneration of artifacts + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The impacted branch | string | None | + + + +### Trigger Generator Definition + + + +#### Event trigger.generator_definition.run + + +**Description**: Triggers all Generators to run on the desired branch. + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to run the Generators in | string | None | + + + +### Trigger Ipam + + + +#### Event trigger.ipam.reconciliation + + +**Description**: Sent after a branch has been merged/rebased to reconcile changed IP Prefix and Address nodes + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The updated branch | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + + +### Trigger Proposed Change + + + +#### Event trigger.proposed_change.cancel + + +**Description**: Triggers request to cancel any open or closed proposed changes for a given branch + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The impacted branch | string | None | + + + +### Trigger Webhook + + + +#### Event trigger.webhook.actions + + +**Description**: Triggers webhooks to be sent for the given action + +**Priority**: 3 + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **event_type** | The event type | string | None | +| **event_data** | The webhook payload | object | None | + + + + + +## Responses events + + +### Check Artifact + + + +#### Event check.artifact.create + + +**Description**: Runs a check to verify the creation of an artifact. + +**Priority**: 2 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_name** | Name of the artifact | string | None | +| **artifact_definition** | The the ID of the artifact definition | string | None | +| **commit** | The commit to target | string | None | +| **content_type** | Content type of the artifact | string | None | +| **transform_type** | The type of transform associated with this artifact | string | None | +| **transform_location** | The transforms location within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this artifact | string | None | +| **target_name** | Name of the artifact target | string | None | +| **artifact_id** | The id of the artifact if it previously existed | N/A | None | +| **query** | The name of the query to use when collecting data | string | None | +| **timeout** | Timeout for requests used to generate this artifact | integer | None | +| **variables** | Input variables when generating the artifact | object | None | +| **validator_id** | The ID of the validator | string | None | + + + +### Check Generator + + + +#### Event check.generator.run + + +**Description**: A check that runs a generator. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator definition | N/A | None | +| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this generator | string | None | +| **target_name** | Name of the generator target | string | None | +| **query** | The name of the query to use when collecting data | string | None | +| **variables** | Input variables when running the generator | object | None | +| **validator_id** | The ID of the validator | string | None | + + + +### Check Repository + + + +#### Event check.repository.check_definition + + +**Description**: Triggers user defined checks to run based on a Check Definition. + +**Priority**: 2 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **check_definition_id** | The unique ID of the check definition | string | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **file_path** | The path and filename of the check | string | None | +| **class_name** | The name of the class containing the check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event check.repository.merge_conflicts + + +**Description**: Runs a check to validate if there are merge conflicts for a proposed change between two branches. + +**Priority**: 2 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **check_execution_id** | The unique ID for the current execution of this check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | + + +#### Event check.repository.user_check + + +**Description**: Runs a check as defined within a CoreCheckDefinition within a repository. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **check_execution_id** | The unique ID for the current execution of this check | string | None | +| **check_definition_id** | The unique ID of the check definition | string | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **file_path** | The path and filename of the check | string | None | +| **class_name** | The name of the class containing the check | string | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **variables** | Input variables when running the check | object | None | +| **name** | The name of the check | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + + +### Event Branch + + + +#### Event event.branch.create + + +**Description**: Sent a new branch is created. + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was created | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **sync_with_git** | Indicates if Infrahub should extend this branch to git. | boolean | None | + + +#### Event event.branch.delete + + +**Description**: Sent when a branch has been deleted. + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was deleted | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **sync_with_git** | Indicates if the branch was extended to Git | boolean | None | + + +#### Event event.branch.merge + + +**Description**: Sent when a branch has been merged. + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + +#### Event event.branch.rebased + + +**Description**: Sent when a branch has been rebased. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was rebased | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + + +### Event Node + + + +#### Event event.node.mutated + + +**Description**: Sent when a node has been mutated + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was created | string | None | +| **kind** | The type of object modified | string | None | +| **node_id** | The ID of the mutated node | string | None | +| **action** | The action taken on the node | string | None | +| **data** | Data on modified object | object | None | + + + +### Event Schema + + + +#### Event event.schema.update + + +**Description**: Sent when the schema on a branch has been updated. + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch where the update occurred | string | None | + + + +### Event Worker + + + +#### Event event.worker.new_primary_api + + +**Description**: Sent on startup or when a new primary API worker is elected. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **worker_id** | The worker ID that got elected | string | None | + + + + +### Finalize Validator + + + +#### Event finalize.validator.execution + + +**Description**: Update the status of a validator after all checks have been completed. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **validator_id** | The id of the validator associated with this check | string | None | +| **validator_execution_id** | The id of current execution of the associated validator | string | None | +| **start_time** | Start time when the message was first created | string | None | +| **validator_type** | The type of validator to complete | string | None | + + + + +### Git Branch + + + +#### Event git.branch.create + + +**Description**: Create a branch in a Git repository. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | Name of the branch to create | string | None | +| **branch_id** | The unique ID of the branch | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | + + + +### Git Diff + + + +#### Event git.diff.names_only + + +**Description**: Request a list of modified files between two commits. + +**Priority**: 4 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **first_commit** | The first commit | string | None | +| **second_commit** | The second commit | string | None | + + + +### Git File + + + +#### Event git.file.get + + +**Description**: Read a file from a Git repository. + +**Priority**: 4 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **commit** | The commit id to use to access the file | string | None | +| **file** | The path and filename within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | + + + +### Git Repository + + + +#### Event git.repository.add + + +**Description**: Clone and sync an external repository after creation. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **created_by** | The user ID of the user that created the repository | N/A | None | +| **default_branch_name** | Default branch for this repository | N/A | None | + + +#### Event git.repository.merge + + +**Description**: Merge one branch into another. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **source_branch** | The source branch | string | None | +| **destination_branch** | The source branch | string | None | + + +#### Event git.repository.add_read_only + + +**Description**: Clone and sync an external repository after creation. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **ref** | Ref to track on the external repository | string | None | +| **created_by** | The user ID of the user that created the repository | N/A | None | +| **infrahub_branch_name** | Infrahub branch on which to sync the remote repository | string | None | + + +#### Event git.repository.pull_read_only + + +**Description**: Update a read-only repository to the latest commit for its ref + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **location** | The external URL of the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **ref** | Ref to track on the external repository | N/A | None | +| **commit** | Specific commit to pull | N/A | None | +| **infrahub_branch_name** | Infrahub branch on which to sync the remote repository | string | None | + + + + +### Schema Migration + + + +#### Event schema.migration.path + + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The name of the branch to target | N/A | None | +| **migration_name** | The name of the migration to run | string | None | +| **new_node_schema** | new Schema of Node or Generic to process | N/A | None | +| **previous_node_schema** | Previous Schema of Node or Generic to process | N/A | None | +| **schema_path** | SchemaPath to the element of the schema to migrate | N/A | None | + + + +### Schema Validator + + + +#### Event schema.validator.path + + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The name of the branch to target | N/A | None | +| **constraint_name** | The name of the constraint to validate | string | None | +| **node_schema** | Schema of Node or Generic to validate | N/A | None | +| **schema_path** | SchemaPath to the element of the schema to validate | N/A | None | + + + + +### Refresh Registry + + + +#### Event refresh.registry.branches + + +**Description**: Sent to indicate that the registry should be refreshed and new branch data loaded. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + +#### Event refresh.registry.rebased_branch + + +**Description**: Sent to refresh a rebased branch within the local registry. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch that was rebased | string | None | + + + +### Refresh Webhook + + + +#### Event refresh.webhook.configuration + + +**Description**: Sent to indicate that configuration in the cache for webhooks should be refreshed. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + + + +### Request Artifact + + + +#### Event request.artifact.generate + + +**Description**: Runs to generate an artifact + +**Priority**: 2 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_name** | Name of the artifact | string | None | +| **artifact_definition** | The the ID of the artifact definition | string | None | +| **commit** | The commit to target | string | None | +| **content_type** | Content type of the artifact | string | None | +| **transform_type** | The type of transform associated with this artifact | string | None | +| **transform_location** | The transforms location within the repository | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this artifact | string | None | +| **target_name** | Name of the artifact target | string | None | +| **artifact_id** | The id of the artifact if it previously existed | N/A | None | +| **query** | The name of the query to use when collecting data | string | None | +| **timeout** | Timeout for requests used to generate this artifact | integer | None | +| **variables** | Input variables when generating the artifact | object | None | + + + +### Request Artifact Definition + + + +#### Event request.artifact_definition.check + + +**Description**: Sent to validate the generation of artifacts in relation to a proposed change. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_definition** | The Artifact Definition | N/A | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The target branch | string | None | + + +#### Event request.artifact_definition.generate + + +**Description**: Sent to trigger the generation of artifacts for a given branch. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **artifact_definition** | The unique ID of the Artifact Definition | string | None | +| **branch** | The branch to target | string | None | +| **limit** | List of targets to limit the scope of the generation, if populated only the included artifacts will be regenerated | array | None | + + + +### Request Generator + + + +#### Event request.generator.run + + +**Description**: Runs a generator. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator definition | N/A | None | +| **generator_instance** | The id of the generator instance if it previously existed | N/A | None | +| **commit** | The commit to target | string | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the Repository | string | None | +| **repository_kind** | The kind of the Repository | string | None | +| **branch_name** | The branch where the check is run | string | None | +| **target_id** | The ID of the target object for this generator | string | None | +| **target_name** | Name of the generator target | string | None | +| **query** | The name of the query to use when collecting data | string | None | +| **variables** | Input variables when running the generator | object | None | + + + +### Request Generator Definition + + + +#### Event request.generator_definition.check + + +**Description**: Sent to trigger Generators to run for a proposed change. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator Definition | N/A | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The target branch | string | None | + + +#### Event request.generator_definition.run + + +**Description**: Sent to trigger a Generator to run on a specific branch. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **generator_definition** | The Generator Definition | N/A | None | +| **branch** | The branch to target | string | None | + + + +### Request Git + + + +#### Event request.git.create_branch + + +**Description**: Sent to trigger the creation of a branch in git repositories. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to target | string | None | +| **branch_id** | The unique ID of the branch | string | None | + + +#### Event request.git.sync + + +**Description**: Request remote repositories to be synced. + +**Priority**: 4 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | + + + +### Request Graphql Query Group + + + +#### Event request.graphql_query_group.update + + +**Description**: Sent to create or update a GraphQLQueryGroup associated with a given GraphQLQuery. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to target | string | None | +| **query_name** | The name of the GraphQLQuery that should be associated with the group | string | None | +| **query_id** | The ID of the GraphQLQuery that should be associated with the group | string | None | +| **related_node_ids** | List of nodes related to the GraphQLQuery | array | None | +| **subscribers** | List of subscribers to add to the group | array | None | +| **params** | Params sent with the query | object | None | + + + +### Request Proposed Change + + + +#### Event request.proposed_change.cancel + + +**Description**: Cancel the proposed change + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | + + +#### Event request.proposed_change.data_integrity + + +**Description**: Sent trigger data integrity checks for a proposed change + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.pipeline + + +**Description**: Sent request the start of a pipeline connected to a proposed change. + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the proposed change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **check_type** | Can be used to restrict the pipeline to a specific type of job | N/A | all | + + +#### Event request.proposed_change.refresh_artifacts + + +**Description**: Sent trigger the refresh of artifacts that are impacted by the proposed change. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.repository_checks + + +**Description**: Sent when a proposed change is created to trigger additional checks + +**Priority**: 5 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.run_generators + + +**Description**: Sent trigger the generators that are impacted by the proposed change to run. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.schema_integrity + + +**Description**: Sent trigger schema integrity checks for a proposed change + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + +#### Event request.proposed_change.run_tests + + +**Description**: Sent trigger to run tests (smoke, units, integrations) for a proposed change. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **source_branch** | The source branch of the proposed change | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **destination_branch** | The destination branch of the proposed change | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + +### Request Repository + + + +#### Event request.repository.checks + + +**Description**: Sent to trigger the checks for a repository to be executed. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository** | The unique ID of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **target_branch** | The target branch | string | None | + + +#### Event request.repository.user_checks + + +**Description**: Sent to trigger the user defined checks on a repository. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **proposed_change** | The unique ID of the Proposed Change | string | None | +| **repository** | The unique ID of the Repository | string | None | +| **source_branch** | The source branch | string | None | +| **source_branch_sync_with_git** | Indicates if the source branch should sync with git | boolean | None | +| **target_branch** | The target branch | string | None | +| **branch_diff** | The calculated diff between the two branches | N/A | None | + + + + +### Send Echo + + + +#### Event send.echo.request + + +**Description**: Sent a echo request, i.e., ping message. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **message** | The message to send | string | None | + + + +### Send Webhook + + + +#### Event send.webhook.event + + +**Description**: Sent a webhook to an external source. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **webhook_id** | The unique ID of the webhook | string | None | +| **event_type** | The event type | string | None | +| **event_data** | The data tied to the event | object | None | + + + + +### Transform Jinja + + + +#### Event transform.jinja.template + + +**Description**: Sent to trigger the checks for a repository to be executed. + +**Priority**: 4 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **data** | Input data for the template | object | None | +| **branch** | The branch to target | string | None | +| **template_location** | Location of the template within the repository | string | None | +| **commit** | The commit id to use when rendering the template | string | None | + + + +### Transform Python + + + +#### Event transform.python.data + + +**Description**: Sent to run a Python transform. + +**Priority**: 4 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **repository_id** | The unique ID of the Repository | string | None | +| **repository_name** | The name of the repository | string | None | +| **repository_kind** | The kind of the repository | string | None | +| **data** | Input data for the template | object | None | +| **branch** | The branch to target | string | None | +| **transform_location** | Location of the transform within the repository | string | None | +| **commit** | The commit id to use when rendering the template | string | None | + + + + +### Trigger Artifact Definition + + + +#### Event trigger.artifact_definition.generate + + +**Description**: Sent after a branch has been merged to start the regeneration of artifacts + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The impacted branch | string | None | + + + +### Trigger Generator Definition + + + +#### Event trigger.generator_definition.run + + +**Description**: Triggers all Generators to run on the desired branch. + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The branch to run the Generators in | string | None | + + + +### Trigger Ipam + + + +#### Event trigger.ipam.reconciliation + + +**Description**: Sent after a branch has been merged/rebased to reconcile changed IP Prefix and Address nodes + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The updated branch | string | None | +| **ipam_node_details** | Details for changed IP nodes | array | None | + + + +### Trigger Proposed Change + + + +#### Event trigger.proposed_change.cancel + + +**Description**: Triggers request to cancel any open or closed proposed changes for a given branch + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **branch** | The impacted branch | string | None | + + + +### Trigger Webhook + + + +#### Event trigger.webhook.actions + + +**Description**: Triggers webhooks to be sent for the given action + +**Priority**: 3 + + + +| Key | Description | Type | Default Value | +|-----|-------------|------|---------------| +| **meta** | Meta properties for the message | N/A | None | +| **event_type** | The event type | string | None | +| **event_data** | The webhook payload | object | None | + + + diff --git a/docs/docs/reference/schema/attribute.mdx b/docs/docs/reference/schema/attribute.mdx index 0cfb900b72..c74073914a 100644 --- a/docs/docs/reference/schema/attribute.mdx +++ b/docs/docs/reference/schema/attribute.mdx @@ -15,6 +15,7 @@ Below is the list of all available options to define an Attribute in the schema | Name | Type | Description | Mandatory | | ---- | ---- | ---- | --------- | +| [**allow_override**](#allow_override) | Attribute | Type of allowed override for the attribute. | False | | [**branch**](#branch) | Attribute | Type of branch support for the attribute, if not defined it will be inherited from the node. | False | | [**choices**](#choices) | Attribute | Define a list of valid choices for a dropdown attribute. | False | | [**default_value**](#default_value) | Attribute | Default value of the attribute. | False | @@ -26,9 +27,9 @@ Below is the list of all available options to define an Attribute in the schema | [**min_length**](#min_length) | Attribute | Set a minimum number of characters allowed for a given attribute. | False | | [**name**](#name) | Attribute | Attribute name, must be unique within a model and must be all lowercase. | True | | [**optional**](#optional) | Attribute | Indicate if this attribute is mandatory or optional. | False | -| [**order_weight**](#order_weight) | Attribute | Number used to order the attribute in the frontend (table and view). | False | +| [**order_weight**](#order_weight) | Attribute | Number used to order the attribute in the frontend (table and view). Lowest value will be ordered first. | False | | [**read_only**](#read_only) | Attribute | Set the attribute as Read-Only, users won't be able to change its value. Mainly relevant for internal object. | False | -| [**regex**](#regex) | Attribute | Regex uses to limit limit the characters allowed in for the attributes. | False | +| [**regex**](#regex) | Attribute | Regex uses to limit the characters allowed in for the attributes. | False | | [**state**](#state) | Attribute | Expected state of the attribute after loading the schema | False | | [**unique**](#unique) | Attribute | Indicate if the value of this attribute must be unique in the database for a given model. | False | @@ -55,6 +56,18 @@ extensions: ## Reference Guide +### allow_override + +| Key | Value | +| ---- | --------------- | +| **Name** | allow_override | +| **Kind** | `Text` | +| **Description** | Type of allowed override for the attribute. | +| **Optional** | True | +| **Default Value** | any | +| **Constraints** | | +| **Accepted Values** | `none` `any` | + ### branch | Key | Value | @@ -195,7 +208,7 @@ extensions: | ---- | --------------- | | **Name** | order_weight | | **Kind** | `Number` | -| **Description** | Number used to order the attribute in the frontend (table and view). | +| **Description** | Number used to order the attribute in the frontend (table and view). Lowest value will be ordered first. | | **Optional** | True | | **Default Value** | | | **Constraints** | | @@ -217,7 +230,7 @@ extensions: | ---- | --------------- | | **Name** | regex | | **Kind** | `Text` | -| **Description** | Regex uses to limit limit the characters allowed in for the attributes. | +| **Description** | Regex uses to limit the characters allowed in for the attributes. | | **Optional** | True | | **Default Value** | | | **Constraints** | | diff --git a/docs/docs/reference/schema/generic.mdx b/docs/docs/reference/schema/generic.mdx index 86cf366b15..44a36940b4 100644 --- a/docs/docs/reference/schema/generic.mdx +++ b/docs/docs/reference/schema/generic.mdx @@ -18,6 +18,7 @@ Below is the list of all available options to define a Generic in the schema | [**default_filter**](#default_filter) | Attribute | Default filter used to search for a node in addition to its ID. | False | | [**description**](#description) | Attribute | Short description of the model, will be visible in the frontend. | False | | [**display_labels**](#display_labels) | Attribute | List of attributes to use to generate the display label | False | +| [**documentation**](#documentation) | Attribute | Link to a documentation associated with this object, can be internal or external. | False | | [**hierarchical**](#hierarchical) | Attribute | Defines if the Generic support the hierarchical mode. | False | | [**icon**](#icon) | Attribute | Defines the icon to use in the menu. Must be a valid value from the MDI library https://icon-sets.iconify.design/mdi/ | False | | [**include_in_menu**](#include_in_menu) | Attribute | Defines if objects of this kind should be included in the menu. | False | @@ -79,6 +80,17 @@ Below is the list of all available options to define a Generic in the schema | **Default Value** | | | **Constraints** | | +### documentation + +| Key | Value | +| ---- | --------------- | +| **Name** | documentation | +| **Kind** | `URL` | +| **Description** | Link to a documentation associated with this object, can be internal or external. | +| **Optional** | True | +| **Default Value** | | +| **Constraints** | | + ### hierarchical | Key | Value | diff --git a/docs/docs/reference/schema/node.mdx b/docs/docs/reference/schema/node.mdx index bb1f51f353..619234e2c3 100644 --- a/docs/docs/reference/schema/node.mdx +++ b/docs/docs/reference/schema/node.mdx @@ -19,6 +19,7 @@ Below is the list of all available options to define a Node in the schema | [**default_filter**](#default_filter) | Attribute | Default filter used to search for a node in addition to its ID. | False | | [**description**](#description) | Attribute | Short description of the model, will be visible in the frontend. | False | | [**display_labels**](#display_labels) | Attribute | List of attributes to use to generate the display label | False | +| [**documentation**](#documentation) | Attribute | Link to a documentation associated with this object, can be internal or external. | False | | [**hierarchy**](#hierarchy) | Attribute | Internal value to track the name of the Hierarchy, must match the name of a Generic supporting hierarchical mode | False | | [**icon**](#icon) | Attribute | Defines the icon to use in the menu. Must be a valid value from the MDI library https://icon-sets.iconify.design/mdi/ | False | | [**include_in_menu**](#include_in_menu) | Attribute | Defines if objects of this kind should be included in the menu. | False | @@ -92,6 +93,17 @@ Below is the list of all available options to define a Node in the schema | **Default Value** | | | **Constraints** | | +### documentation + +| Key | Value | +| ---- | --------------- | +| **Name** | documentation | +| **Kind** | `URL` | +| **Description** | Link to a documentation associated with this object, can be internal or external. | +| **Optional** | True | +| **Default Value** | | +| **Constraints** | | + ### hierarchy | Key | Value | diff --git a/docs/docs/reference/schema/relationship.mdx b/docs/docs/reference/schema/relationship.mdx index 981ffeb90f..43b48312b4 100644 --- a/docs/docs/reference/schema/relationship.mdx +++ b/docs/docs/reference/schema/relationship.mdx @@ -15,6 +15,7 @@ Below is the list of all available options to define a Relationship in the schem | Name | Type | Description | Mandatory | | ---- | ---- | ---- | --------- | +| [**allow_override**](#allow_override) | Attribute | Type of allowed override for the relationship. | False | | [**branch**](#branch) | Attribute | Type of branch support for the relatioinship, if not defined it will be determine based both peers. | False | | [**cardinality**](#cardinality) | Attribute | Defines how many objects are expected on the other side of the relationship. | False | | [**description**](#description) | Attribute | Short description of the relationship. | False | @@ -26,13 +27,27 @@ Below is the list of all available options to define a Relationship in the schem | [**max_count**](#max_count) | Attribute | Defines the maximum objects allowed on the other side of the relationship. | False | | [**min_count**](#min_count) | Attribute | Defines the minimum objects allowed on the other side of the relationship. | False | | [**name**](#name) | Attribute | Relationship name, must be unique within a model and must be all lowercase. | True | +| [**on_delete**](#on_delete) | Attribute | Default is no-action. If cascade, related node(s) are deleted when this node is deleted. | False | | [**optional**](#optional) | Attribute | Indicate if this relationship is mandatory or optional. | False | -| [**order_weight**](#order_weight) | Attribute | Number used to order the relationship in the frontend (table and view). | False | +| [**order_weight**](#order_weight) | Attribute | Number used to order the relationship in the frontend (table and view). Lowest value will be ordered first. | False | | [**peer**](#peer) | Attribute | Type (kind) of objects supported on the other end of the relationship. | True | +| [**read_only**](#read_only) | Attribute | Set the relationship as read-only, users won't be able to change its value. | False | | [**state**](#state) | Attribute | Expected state of the relationship after loading the schema | False | ## Reference Guide +### allow_override + +| Key | Value | +| ---- | --------------- | +| **Name** | allow_override | +| **Kind** | `Text` | +| **Description** | Type of allowed override for the relationship. | +| **Optional** | True | +| **Default Value** | any | +| **Constraints** | | +| **Accepted Values** | `none` `any` | + ### branch | Key | Value | @@ -123,7 +138,7 @@ Below is the list of all available options to define a Relationship in the schem | **Optional** | False | | **Default Value** | Generic | | **Constraints** | | -| **Accepted Values** | `Generic` `Attribute` `Component` `Parent` `Group` `Hierarchy` | +| **Accepted Values** | `Generic` `Attribute` `Component` `Parent` `Group` `Hierarchy` `Profile` | ### label @@ -169,6 +184,18 @@ Below is the list of all available options to define a Relationship in the schem | **Default Value** | | | **Constraints** | Regex: `^[a-z0-9\_]+$`
Length: min 3, max 32 | +### on_delete + +| Key | Value | +| ---- | --------------- | +| **Name** | on_delete | +| **Kind** | `Text` | +| **Description** | Default is no-action. If cascade, related node(s) are deleted when this node is deleted. | +| **Optional** | True | +| **Default Value** | | +| **Constraints** | | +| **Accepted Values** | `no-action` `cascade` | + ### optional | Key | Value | @@ -186,7 +213,7 @@ Below is the list of all available options to define a Relationship in the schem | ---- | --------------- | | **Name** | order_weight | | **Kind** | `Number` | -| **Description** | Number used to order the relationship in the frontend (table and view). | +| **Description** | Number used to order the relationship in the frontend (table and view). Lowest value will be ordered first. | | **Optional** | True | | **Default Value** | | | **Constraints** | | @@ -200,7 +227,18 @@ Below is the list of all available options to define a Relationship in the schem | **Description** | Type (kind) of objects supported on the other end of the relationship. | | **Optional** | False | | **Default Value** | | -| **Constraints** | Regex: `^[A-Z][a-zA-Z0-9]+$`
Length: min 3, max 32 | +| **Constraints** | Regex: `^[A-Z][a-zA-Z0-9]+$` | + +### read_only + +| Key | Value | +| ---- | --------------- | +| **Name** | read_only | +| **Kind** | `Boolean` | +| **Description** | Set the relationship as read-only, users won't be able to change its value. | +| **Optional** | True | +| **Default Value** | False | +| **Constraints** | | ### state diff --git a/docs/docs/reference/schema/validator-migration.mdx b/docs/docs/reference/schema/validator-migration.mdx index 8e0ad76b0d..592b191b55 100644 --- a/docs/docs/reference/schema/validator-migration.mdx +++ b/docs/docs/reference/schema/validator-migration.mdx @@ -38,6 +38,7 @@ In this context, an element represent either a Node, a Generic, an Attribute or | **icon** | allowed | | **order_by** | allowed | | **uniqueness_constraints** | validate_constraint | +| **documentation** | allowed | | **inherit_from** | not_supported | | **hierarchy** | validate_constraint | | **parent** | validate_constraint | @@ -63,6 +64,7 @@ In this context, an element represent either a Node, a Generic, an Attribute or | **branch** | not_supported | | **order_weight** | allowed | | **default_value** | allowed | +| **allow_override** | allowed | ### Relationship @@ -83,6 +85,9 @@ In this context, an element represent either a Node, a Generic, an Attribute or | **branch** | not_supported | | **direction** | not_supported | | **hierarchical** | not_supported | +| **on_delete** | allowed | +| **allow_override** | allowed | +| **read_only** | allowed | ### Generic @@ -101,6 +106,7 @@ In this context, an element represent either a Node, a Generic, an Attribute or | **icon** | allowed | | **order_by** | allowed | | **uniqueness_constraints** | validate_constraint | +| **documentation** | allowed | | **hierarchical** | validate_constraint | diff --git a/docs/docs/release-notes/release-0_12.mdx b/docs/docs/release-notes/release-0_12.mdx index aee17d27f5..fee6937dbd 100644 --- a/docs/docs/release-notes/release-0_12.mdx +++ b/docs/docs/release-notes/release-0_12.mdx @@ -162,7 +162,7 @@ The processing of the schema internally has been significantly improved and as a #### Sync Engine -The Synchronization Engine has been improved to be able to run in standalone mode and it has been integrated with Dagster, to provide more visibility into the process and leverage its orchestration capabilitites. +The Synchronization Engine has been improved to be able to run in standalone mode and it has been integrated with Dagster, to provide more visibility into the process and leverage its orchestration capabilities. The project has been packaged as a dedicated Python package and it's now available on PyPI `infrahub-sync`. #### Documentation Update diff --git a/docs/docs/release-notes/release-0_13.mdx b/docs/docs/release-notes/release-0_13.mdx new file mode 100644 index 0000000000..f29de2895d --- /dev/null +++ b/docs/docs/release-notes/release-0_13.mdx @@ -0,0 +1,189 @@ +--- +title: Release 0.13 +--- + + + + + + + + + + + + + + + + + + + +
Release Number0.13.0
Release DateMay XX, 2024
Release Codename -
Tag -
+ +# Release 0.13.0 + +## Main Changes + +### Unified Storage + +#### IP Address Management + +Infrahub now includes builtin support for IP Prefixes and IP Addresses, both for IPv4 and IPv6. + +To keep things extensible, Infrahub provides a minimal schema to capture the relationships between IP Prefix, IP address and IP Namespace. +Infrahub will automatically maintain trees of IP prefixes and IP addresses being built based on the IP Namespace. +Building these hierarchies/trees allows Infrahub to determine how IP prefixes and IP addresses are nested as well as computing utilization of the recorded IP spaces. + +The following `Generic` models are provided by default and can be extended as needed to add your own attributes/relationships or constraints: + +- `BuiltinIPNamespace`: used to model a namespace to manage IP resources, this is a generic representation of what could be, for examples, a routing table, a routing instance or a VRF +- `BuiltinIPPrefix`: used to model a network, sometimes referred as supernet/subnet +- `BuiltinIPAddress`: used to model a single IP address + +More information about IPAM is available in the [Documentation](/topics/ipam). + +![prefix View](../media/release_notes/infrahub_0_13_0/ipam_01.png) + +![Prefix List](../media/release_notes/infrahub_0_13_0/ipam_02.png) + +#### Profiles + +A profile in Infrahub allow you to define a common set of attributes that should be applied to nodes. + +A node that has a profile assigned, will get the values of its attributes inherited from the assigned profile, if no value is defined for the attribute at the node, or if the default value is used. +The attribute values of a node that were inherited from a profile can be overridden, by defining them at the node. + +More information about Profiles is available in the [Documentation](/topics/profiles). + +#### Leverage Database Indexes to improve performance + +Infrahub is now leveraging database indexes to improve the overall performance of the database. +Indexes will be automatically applied during startup. + +A new command `infrahub db index` has been introduced to manage the indexes. + +```shell +infrahub db index --help + + Usage: infrahub db index [OPTIONS] [ACTION]:[show|add|drop] [CONFIG_FILE] + + Manage Database Indexes + +╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ action [ACTION]:[show|add|drop] [default: IndexAction.SHOW] │ +│ config_file [CONFIG_FILE] [env var: INFRAHUB_CONFIG] [default: infrahub.toml] │ +╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +│ --help Show this message and exit. │ +╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +``` + + +### CI Pipeline + +#### Generators + +A Generator is a generic plugin that can be used apply your own logic to create new nodes and relationships. +Generator are expected to be idempotent and should be able to run multiple times and always produce the same result. + +One use case for the generators is to be able to manage technical objects derived from a higher level definition of a service. + +Generators are associated with some input data identified by a GraphQL query. +Similar to the Transforms & Artifacts, Generator will be automatically executed as part of the CI Pipeline if the data associated with a given generator has changed. + +The Generator itself is a Python class that is based on the `InfrahubGenerator` class from the SDK. Just like transforms and checks, the Generators are user defined. + +More information about Generators is available in the [Documentation](/topics/generator). + +#### Redesigned proposed change creation form + +The form to create a proposed change has been redesigned to expand the description field. + +![Proposed Change Form](../media/release_notes/infrahub_0_13_0/proposed_change_form.png) + +### Schema + +#### Relationship of type Parent can't be optional + +The constraints around the relationships of kind `Parent` have been tightened and these relationships can't be optional anymore. +All existing relationships will be automatically migrated when upgrading to this release and your schema will need to be updated. + +#### Improvement and modularization of demo schema + +The demo schemas located in the `models` directory have been updated to cover more use cases and to simplify how to use them. + +The main schema, previously stored in the file `infrastructure_base.yml` has been broken down in multiple schemas now located in the `base` directory. +The other schemas have been moved to the `examples` directory + +#### Cascade node deletion + +It's now possible to define how related nodes should be handled when a node is being deleted. This feature is usually referred as CASCADE DELETE. +On each relationship, it's now possible to defined `on_delete: CASCADE` to indicate that if this node is deleted, all nodes on the other side of this relationship must be deleted too. + +#### New options available in the schema + +The attribute `read_only` is now available on all relationships. +If True, users won't be able to add or remove peers to this relationship (not present in GraphQL mutations and forms) + +The attribute `allow_override` is now available on all attributes and relationships. +This new flag is meant to be used on a Generic node to protect the attribute / relationship that can't be overwritten by a node inheriting from this generic. + +The attribute `documentation` is now available on all Node and Generic. +This field is meant to store an URL where the documentation for this model is available. +The link will be displayed in the help popup in the frontend. + +The attribute `on_delete` is now available on all relationships. See section above. + +More information about the schema is available in the [Documentation](/reference/schema). + +### API / GraphQL + +#### `is_from_profile` and `is_default` properties on all attributes + +In GraphQL, it's now possible to query 2 new properties on all attributes to gather more information about the origin of the value: + +- `is_from_profile`: True if the value was inherited from a profile, the name of the profile will be available in under `source` +- `is_default`: True if the value was set by the default value defined in the schema + +### Other + +#### Schema information included in the search anywhere bar + +The main search anywhere bar now includes information from the schema and +it can include return existing schema page as a potential result for the search. + +![Search in Schema](../media/release_notes/infrahub_0_13_0/search_in_list.png) + +![Search in Schema](../media/release_notes/infrahub_0_13_0/filter_in_list.png) + +#### Search and filter on object list view + +The object list view has been updated to include a new search bar and a new panel to filter the element of the list. + +![Search in Schema](../media/release_notes/infrahub_0_13_0/search_schema.png) + +#### Cleanup of invoke tasks and introduction of the `dev` namespace + +The invoke tasks have been cleaned up to clearly separate the tasks related to the demo environment and the one for internal development. +A new namespace `dev` has been introduced and some commands have been renamed including `demo.dev-start` that has been renamed to `dev.deps`. + +A new `demo.migrate` command has been introduced to apply the database & schema migrations. + +#### Update GraphiQL Application + +The Graph sandbox (GraphiQL) has been integrated into Infrahub directly to provide a better experience +and remove the dependencies to an external CDN. + +The standard menu is now available on the left of the GraphiQL application +and its possible to directly select the active branch with the standard dropdown. + +![Search in Schema](../media/release_notes/infrahub_0_13_0/graphiql.png) + +#### Help Panel in the list view + +A new help popup is now available in the top right corner of a list view for all objects. +The help popup will include a link to the schema for this object and if defined in the schema, a link to the external documentation. + +![help Panel](../media/release_notes/infrahub_0_13_0/help_panel.png) diff --git a/docs/docs/sync/guides/creation.mdx b/docs/docs/sync/guides/creation.mdx new file mode 100644 index 0000000000..fe60cca523 --- /dev/null +++ b/docs/docs/sync/guides/creation.mdx @@ -0,0 +1,63 @@ +--- +title: Creating a New Sync Instance +--- + +# Creating a new Sync instance + +This guide will walk you through the steps to create a new Sync Instance for Infrahub Sync, allowing you to synchronize data between your source and destination systems efficiently. + +## Step 1: Define your configuration + +Start by defining your synchronization requirements in a YAML configuration file. + +Here's an example configuration for syncing data from Nautobot to Infrahub: +In this example, consider that `device_type` and `manufacturer` are Attribute of InfraDevice. If you are using another object with a Relationship, you would need to first import those objects and then referenced them (like InfraDevice in InfraInterface) + +```yaml +--- +name: example-sync-task + +source: + name: nautobot + settings: + url: "https://nautobot.example.com" + token: "NAUTOBOT_API_TOKEN" # This can also be loaded from environment variables + +destination: + name: infrahub + settings: + url: "https://infrahub.example.com" + token: "INFRAHUB_API_TOKEN" # This can also be loaded from environment variables + + +order: + - "InfraDevice" + - "InfraInterface" + +schema_mapping: + - name: InfraDevice + mapping: "dcim.devices" + identifiers: ["name"] + fields: + - name: "name" + mapping: "name" + - name: "device_type" + mapping: "device_type.display_name" + - name: "manufacturer" + mapping: "device_type.manufacturer.name" + + - name: InfraInterface + mapping: "dcim.interfaces" + identifiers: ["device", "name"] + fields: + - name: "name" + mapping: "name" + - name: "interface_type" + static: "10gbe" + - name: "description" + mapping: "description" + - name: "device" + reference: "InfraDevice" +``` + +For more information on customizing your sync configuration and troubleshooting, see the [Sync Instance configuration reference](/sync/reference/config) \ No newline at end of file diff --git a/docs/docs/sync/guides/installation.mdx b/docs/docs/sync/guides/installation.mdx new file mode 100644 index 0000000000..2221fe70ed --- /dev/null +++ b/docs/docs/sync/guides/installation.mdx @@ -0,0 +1,12 @@ +--- +title: Installing infrahub-sync +--- +# Installing infrahub-sync + +The Infrahub Sync is available on [PyPI](https://pypi.org/project/infrahub-sync/) and can be installed using the pip package installer. It is recommended to install the Sync into a virtual environment. + +```bash +python3 -m venv .venv +source .venv/bin/activate +pip install infrahub-sync +``` diff --git a/docs/docs/sync/guides/run.mdx b/docs/docs/sync/guides/run.mdx new file mode 100644 index 0000000000..46975eaea4 --- /dev/null +++ b/docs/docs/sync/guides/run.mdx @@ -0,0 +1,76 @@ +--- +title: Running Sync Tasks +--- + +# Running sync tasks with Infrahub Sync + +Learn how to use Infrahub Sync's commands to generate sync adapters, calculate differences, and synchronize data between your source and destination systems. + +![Infrahub-Sync process](../../media/infrahub_sync_process.excalidraw.svg) + + +## Generating Sync Adapters and Models + + +Before you can run a synchronization task, you need to generate the necessary Python code for your sync adapters and models based on your configuration. +To create a new configuration, please refer to the guide [Creating a new Sync Instance](/sync/guides/creation) + +### Command + +```bash +infrahub-sync generate --name --directory +``` + +### Parameters + +- `--name`: The name of the sync project you want to generate code for. +- `--directory`: The directory where your sync configuration files are located. + +This command reads your configuration file and generates Python code for the sync adapters and models required for the synchronization task. + +## Calculating differences + +The `diff` command lets you see the differences between your source and destination before actually performing the synchronization. This is useful for verifying what will be synchronized. + +### Command + +```bash +infrahub-sync diff --name --directory +``` + +### Parameters + +- `--name`: Specifies the sync project for which you want to calculate differences. +- `--directory`: The directory where your sync configuration files are located. + +Running this command will output the differences detected based on the current state of your source and destination systems. + +## Synchronizing data + +Once you're ready to synchronize the data between your source and destination, you can use the `sync` command. + +### Command + +```bash +infrahub-sync sync --name --directory +``` + +### Parameters + +- `--name`: The name of the sync project you wish to run. +- `--directory`: The directory where your sync configuration files are located. + +This command performs the synchronization, applying the changes from the source to the destination based on the differences calculated by the `diff` command. + +### Progress and logging + +The `sync` command also supports additional flags for displaying progress and managing logging: + +- `--show-progress`: Displays a progress bar during synchronization. +- `--diff`: Print the differences between the source and the destination before syncing. + +For example: + +```bash +infrahub-sync sync --name my_project --directory configs --diff --show-progress +``` \ No newline at end of file diff --git a/docs/docs/sync/readme.mdx b/docs/docs/sync/readme.mdx new file mode 100644 index 0000000000..d396ecd18d --- /dev/null +++ b/docs/docs/sync/readme.mdx @@ -0,0 +1,20 @@ +--- +title: Sync Engine +--- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +# Infrahub Sync + +Infrahub-Sync is a versatile Python package that synchronizes data between a source and a destination system.It builds on the robust capabilities of `diffsync` to offer flexible and efficient data synchronization across different platforms, including Netbox, Nautobot, and Infrahub. This package features a Typer-based CLI for ease of use, supporting operations such as listing available sync projects, generating diffs, and executing sync processes. + +## Guides + +- [Installing infrahub-sync](/sync/guides/installation) +- [Creating a new Sync Instance](/sync/guides/creation) +- [Run a Sync Instance](/sync/guides/run) + +## Reference + +- [Sync Instance configuration](/sync/reference/config) diff --git a/docs/docs/sync/reference/cli.mdx b/docs/docs/sync/reference/cli.mdx new file mode 100644 index 0000000000..19f1d0fc84 --- /dev/null +++ b/docs/docs/sync/reference/cli.mdx @@ -0,0 +1,91 @@ +# `infrahub-sync` + +**Usage**: + +```console +$ infrahub-sync [OPTIONS] COMMAND [ARGS]... +``` + +**Options**: + +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. + +**Commands**: + +* `diff`: Calculate and print the differences... +* `generate`: Generate all the python files for a given... +* `list`: List all available SYNC projects. +* `sync`: Synchronize the data between source and... + +## `infrahub-sync diff` + +Calculate and print the differences between the source and the destination systems for a given project. + +**Usage**: + +```console +$ infrahub-sync diff [OPTIONS] +``` + +**Options**: + +* `--name TEXT`: Name of the sync to use +* `--config-file TEXT`: File path to the sync configuration YAML file +* `--directory TEXT`: Base directory to search for sync configurations +* `--branch TEXT`: Branch to use for the diff. +* `--show-progress / --no-show-progress`: Show a progress bar during diff [default: show-progress] +* `--help`: Show this message and exit. + +## `infrahub-sync generate` + +Generate all the python files for a given sync based on the configuration. + +**Usage**: + +```console +$ infrahub-sync generate [OPTIONS] +``` + +**Options**: + +* `--name TEXT`: Name of the sync to use +* `--config-file TEXT`: File path to the sync configuration YAML file +* `--directory TEXT`: Base directory to search for sync configurations +* `--help`: Show this message and exit. + +## `infrahub-sync list` + +List all available SYNC projects. + +**Usage**: + +```console +$ infrahub-sync list [OPTIONS] +``` + +**Options**: + +* `--directory TEXT`: Base directory to search for sync configurations +* `--help`: Show this message and exit. + +## `infrahub-sync sync` + +Synchronize the data between source and the destination systems for a given project or configuration file. + +**Usage**: + +```console +$ infrahub-sync sync [OPTIONS] +``` + +**Options**: + +* `--name TEXT`: Name of the sync to use +* `--config-file TEXT`: File path to the sync configuration YAML file +* `--directory TEXT`: Base directory to search for sync configurations +* `--branch TEXT`: Branch to use for the sync. +* `--diff / --no-diff`: Print the differences between the source and the destination before syncing [default: diff] +* `--show-progress / --no-show-progress`: Show a progress bar during syncing [default: show-progress] +* `--help`: Show this message and exit. diff --git a/docs/docs/sync/reference/config.mdx b/docs/docs/sync/reference/config.mdx new file mode 100644 index 0000000000..a1d42f08f9 --- /dev/null +++ b/docs/docs/sync/reference/config.mdx @@ -0,0 +1,66 @@ +--- +title: Sync configuration file +--- +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Sync instance configuration file + +The configuration file allows you to define the ressources needs for the sync. +The file should be formatted as a Yaml file, have the filename `config.yml`. During the generation phase, the Sync adapters, and models will be generated in the same folder. + +The following settings can be defined: + +## Configuration fields + +Describes the overall synchronization configuration. + +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| name | string | Unique identifier for the sync instance. | Yes | +| store | SyncStore | Configuration for the optional storage mechanism. | No | +| source | SyncAdapter | Configuration for the source adapter. | Yes | +| destination | SyncAdapter | Configuration for the destination adapter. | Yes | +| order | List of strings | Specifies the order in which objects should be synchronized. | Yes | +| schema_mapping | List of SchemaMappingModel | Defines how data is mapped from source to destination. | Yes | + +### Sync store + +Optional configuration for a storage mechanism used for stateful synchronization. + +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| type | string | Type of the store (for example `redis`). | Yes | +| settings | Dictionary | Connection details and other settings for the store. | No | + + +### Source and Destination + +Configuration for source and destination adapters. + +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| name | string | Identifier for the adapter. | Yes | +| settings | Dictionary | Adapter-specific settings like `url` and `token`. If not provided, values will be loaded from environment variables. | No | + +### Schema Mapping + +Defines the mappings from source to destination schemas. + +#### Mapping models + +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| name | string | Infrahub model name to be mapped. | Yes | +| mapping | string | The source's schema mapping (API) path. | Yes | +| identifiers | List of strings | Fields used to uniquely identify an object. | Yes | +| fields | List of SchemaMappingField | Describes individual field mappings or transformations. | Yes | + +##### Mapping field + +| Property | Type | Description | Mandatory | +| -------- | ---- | ----------- | --------- | +| name | string | Name of the field. | Yes | +| mapping | string | How the field is mapped from source to destination. | No | +| static | Any | A static value to assign to the field, used if `mapping` is not provided. | No | +| reference | string | Reference to another object in the configuration, used if direct mapping is not applicable. | No | diff --git a/docs/docs/topics/event-handling.mdx b/docs/docs/topics/event-handling.mdx new file mode 100644 index 0000000000..9a0e7eede6 --- /dev/null +++ b/docs/docs/topics/event-handling.mdx @@ -0,0 +1,12 @@ +--- +title: Bus Event handling +--- + +# Bus event handling + +:::warning Under Construction + +This page is still under construction and is not available yet.
+Please reach out in Slack if you have some questions about the **Bus events** + +::: \ No newline at end of file diff --git a/docs/docs/topics/generator.mdx b/docs/docs/topics/generator.mdx new file mode 100644 index 0000000000..73f6768b31 --- /dev/null +++ b/docs/docs/topics/generator.mdx @@ -0,0 +1,31 @@ +--- +title: Generator +--- + +# Generator + +A `Generator` is a generic plugin that queries data and creates new nodes and relationships based on the result. + +:::success Examples + +- Within your [schema](schema) you could create an abstract service object that through a Generator creates other nodes. + +::: + +## High level design + +Generators are defined as a **generator definition** within an [.infrahub.yml](infrahub-yml) file. A Generator definition consists of a number of related objects. + +- Group of targets +- Generator class +- GraphQL Query + +![](../media/topics/generator/generator_overview.excalidraw.svg) + +Running an artifact definition will create new nodes as defined by the generator, or remove old ones that are no longer required. The removal of obsolete objects is handled using the [SDK tracking feature](/python-sdk/topics/tracking) + +The targets point to a group that will consist of objects that are impacted by the generator. The members of this group can be any type of object within your schema, service objects, devices, contracts or anything you want the generator to act upon. + +The [GraphQL query](graphql) defines the data that will be collected when running the generator. Any object identified in this step is added as a member to a GraphQL query group. The membership in these groups are then used to determine which generators need to be executed as part of a proposed change during the pipeline run. + +The Generator itself is a Python class that is based on the `InfrahubGenerator` class from the SDK. Just like [transforms](transformation) and [checks](check), the Generators are user defined. \ No newline at end of file diff --git a/docs/docs/topics/graphql.mdx b/docs/docs/topics/graphql.mdx index 6ada1b90e5..334d584716 100644 --- a/docs/docs/topics/graphql.mdx +++ b/docs/docs/topics/graphql.mdx @@ -6,8 +6,8 @@ title: GraphQL queries The GraphQL interface is the main interface to interact with Infrahub. The GraphQL schema is automatically generated based on the core models and the user-defined schema models. -The endpoint to interact with the main branch is accessible at `https://:host/graphql`. -To interact with a branch the URL must include the name of the branch, such as `https://:host/graphql/:branch_name`. +The endpoint to interact with the main branch is accessible at `https:///graphql`. +To interact with a branch the URL must include the name of the branch, such as `https:///graphql/`. ## Query & mutations diff --git a/docs/docs/topics/hardware-requirements.mdx b/docs/docs/topics/hardware-requirements.mdx index 5746f5a460..fe7fee1684 100644 --- a/docs/docs/topics/hardware-requirements.mdx +++ b/docs/docs/topics/hardware-requirements.mdx @@ -4,9 +4,9 @@ title: Hardware requirements The system on which you want to run Infrahub, has to meet the following hardware requirements: -| Level | CPU Cores | RAM | -|-------------|-----------|------| -| Minimum | 6 | 12GB | -| Recommended | 8 | 16GB | +| Level | CPU Cores | RAM | Storage / database (Neo4j) | +|-------------|-----------|------|----------------------------| +| Minimum | 6 | 12GB | SSD and/or >= 5000 IOPS | +| Recommended | 8 | 16GB | SSD and/or >= 5000 IOPS | If you only want to give Infrahub a try, or follow the [getting started tutorial](/tutorials/getting-started/), then you can use [GitHub Codespaces](/guides/installation#github-codespaces) as an alternative. diff --git a/docs/docs/topics/ipam.mdx b/docs/docs/topics/ipam.mdx new file mode 100644 index 0000000000..9a1b847b96 --- /dev/null +++ b/docs/docs/topics/ipam.mdx @@ -0,0 +1,101 @@ +--- +title: IP address management +--- + +# IP address management + +IP address management, also known as IPAM, is a critical part of any infrastructure involving IP network and address allocations. Depending on the scale of networks, it can be challenging to keep track of all IP resources, how they are used, and which ones remain available. + +:::info + +Building an IPAM serves a different purpose as using the `IPHost` or `IPNetwork` attribute kinds in other node definitions. In fact, the IPAM feature leverage those attribute kinds. + +::: + +## IPAM generics + +To keep things extensible, Infrahub provides generics that users can inherit their nodes from in their own schemas and build their own IPAM: + +- `BuiltinIPNamespace`: used to model a namespace to manage IP resources, this is a generic representation of what could be, for examples, a routing table, a routing instance or a VRF +- `BuiltinIPPrefix`: used to model a network, sometimes referred as supernet/subnet +- `BuiltinIPAddress`: used to model a single IP address + +By default, Infrahub comes with a node inheriting from the `BuiltinIPNamespace` generic. This node is called `IpamNamespace`. A object of this kind of node, called "default", is also automatically created when starting Infrahub for the first time. + +## Building an IPAM + +As mentioned in the previous section, an IPAM namespace is already provided with Infrahub, so there are no needs to redefined this unless more attributes are required. The default implementation is very minimal and attributes only include a name and a description. + +The below schema defines two nodes: one for IP prefixes `IpamIPPrefix` and one for IP addresses `IpamIPAddress`. Both of these nodes inherit from the built-in generics. + +```yaml title="Schema definition implementing IPAM generics" +# yaml-language-server: $schema=https://schema.infrahub.app/develop/schema.schema.json +--- +version: "1.0" +nodes: + - name: "IPPrefix" + namespace: "Ipam" + inherit_from: + - "BuiltinIPPrefix" + description: "IPv4 or IPv6 network" + icon: "mdi:ip-network" + label: "IP Prefix" + menu_placement: "IpamNamespace" + - name: "IPAddress" + namespace: "Ipam" + inherit_from: + - "BuiltinIPAddress" + description: "IPv4 or IPv6 address" + icon: "mdi:ip" + label: "IP Address" + menu_placement: "IpamNamespace" +``` + +### How IPAM works + +IPAM generics and nodes that inherit from them have relationships and a hierarchy. This means that an IP prefix can be related to other prefixes (as parent or as child), an IP address can be related to an IP prefix, and, finally, both of these objects are related to an IP namespace. + +To simplify day-to-day usage and prevent from doing many operations via the user interface or the GraphQL API, relationships for IP prefixes and IP addresses are automatically managed. This implies that when an IP prefix is created, relations with a parent prefix, children prefixes and IP addresses that belong to it will be discovered automatically. Same goes when an IP address is created, the more specific prefix it belongs to will be automatically discovered. This will result in trees of IP prefixes and IP addresses being built. Building these hierarchies/trees allows Infrahub to determine how IP prefixes and IP addresses are nested as well as computing utilization of the recorded IP spaces. + +### Prefix utilization + +By default, IP prefixes have a read-only field called `utilization`. This field value is computed on the fly based on the value of the `member_type` field. Member type refers to the kind of objects that an IP prefix serves as a container of. It can be either other prefixes or IP addresses. + +When the member type value is set to "prefix", the utilization of the IP prefix will be computed using the children prefixes and their sizes. For example, if `192.0.2.0/24` has one subnet, `192.0.2.0/26`, it's utilization will report 25%. + +When the member type value is set to "address", the utilization of the IP prefix will be computed using the number of IP addresses it contains. For example, subnet `192.0.2.0/26` can have up to 62 IP addresses once network and broadcast addresses excluded. So if this subnet has 20 IP addresses, its utilization will report 32%. Broadcast and network addresses can be taken into account while computing utilization for IPv4 prefixes if the `is_pool` value is set to `true` (checkbox checked) or if the prefix' length is 31 as defined in [RFC 3021](https://datatracker.ietf.org/doc/html/rfc3021). + +### GraphQL queries and mutations + +Creating a schema inheriting the IPAM generics will generate GraphQL queries and mutations related to the defined schema. For example, the schema defined in a previous section will generate the following queries: + +- `IpamIPPrefix` +- `IpamIPAddress` + +The following mutations will also be available after schema load: + +- `IpamIPPrefixCreate` +- `IpamIPPrefixUpdate` +- `IpamIPPrefixUpsert` +- `IpamIPPrefixDelete` +- `IpamIPAddressCreate` +- `IpamIPAddressUpdate` +- `IpamIPAddressUpsert` +- `IpamIPAddressDelete` + +As mentioned previously, IP prefixes are organised as a hierarchy. Hierarchical nodes have special relationships called `parent`, `children`, `ancestors` and `descendants`. These relationships can be used in the queries. However, in the case of IPAM, they cannot be used in mutations due to the fact that Infrahub manage them automatically. + +IP prefixes also have default read-only attributes which can be queried and can bring details about a particular networks. These attributes are: + +- `utilization`, utilization of an IP prefix in percent +- `netmask`, network mask of an IP prefix like `255.255.255.0` +- `hostmask`, host mask of an IP prefix like `0.0.0.255` +- `network_address`, network address of an IP prefix, usually the first IP address of the prefix +- `broadcast_address`, broadcast address of an IP prefix, usually the last IP address of the prefix + +## Known limitations + +These are a non-exhaustive list of known issues and limitations of Infrahub IPAM. These will be addressed in future releases. + +- [2955](https://github.com/opsmill/infrahub/issues/2955) There are no ways to programmatically know the next IP prefix available in a namespace or an IP prefix +- [2954](https://github.com/opsmill/infrahub/issues/2954) There are no ways to programmatically know the next IP address available in a namespace or an IP prefix \ No newline at end of file diff --git a/docs/docs/topics/local-demo-environment.mdx b/docs/docs/topics/local-demo-environment.mdx index 26d2b24203..7329353eef 100644 --- a/docs/docs/topics/local-demo-environment.mdx +++ b/docs/docs/topics/local-demo-environment.mdx @@ -11,13 +11,11 @@ It's designed to be controlled by `invoke` using a list of predefined commands. | Command | Description | | ------------------------ | ------------------------------------------------------------------------- | | `demo.build` | Build an image with the provided name and Python version. | -| `demo.init` | (deprecated) Initialize Infrahub database before using it the first time. | | `demo.start` | Start a local instance of Infrahub within docker compose. | | `demo.stop` | Stop the running instance of Infrahub. | | `demo.destroy` | Destroy all containers and volumes. | | `demo.cli-git` | Launch a bash shell inside the running Infrahub container. | | `demo.cli-server` | Launch a bash shell inside the running Infrahub container. | -| `demo.debug` | Start a local instance of Infrahub in debug mode. | | `demo.status` | Display the status of all containers. | | `demo.load-infra-schema` | Load the `infrastructure_base` schema into Infrahub. | | `demo.load-infra-data` | Generate some data representing a small network with 6 devices. | diff --git a/docs/docs/topics/object-storage.mdx b/docs/docs/topics/object-storage.mdx index 07fb99d663..53ed5eaefa 100644 --- a/docs/docs/topics/object-storage.mdx +++ b/docs/docs/topics/object-storage.mdx @@ -10,7 +10,7 @@ Currently, Infrahub only supports a local backend. The goal over time is to supp Currently, the main interface to interact with the object storage is the REST API. 3 methods are supported: -- GET /api/storage/object/:identifier +- GET /api/storage/object/\ - POST /api/storage/upload/content - POST /api/storage/upload/file diff --git a/docs/docs/topics/profiles.mdx b/docs/docs/topics/profiles.mdx new file mode 100644 index 0000000000..766ac6a70e --- /dev/null +++ b/docs/docs/topics/profiles.mdx @@ -0,0 +1,60 @@ +--- +title: Profiles +--- + +# Profiles + +A profile in Infrahub allow you to define a common set of attributes that should be applied to nodes. + +A node that has a profile assigned, will get the values of its attributes inherited from the assigned profile, if no value is defined for the attribute at the node, or if the default value is used. + +The attribute values of a node that were inherited from a profile can be overridden, by defining them at the node. + +## Use cases + +Here's a couple of scenarios and use cases where profiles can be used: + +### Interface profiles + +A common scenario is that you want to create multiple interface configurations that you want to use for multiple interfaces. An example of this could be a user facing switch port. All of these ports would need to be in access mode and have a particular VLAN assigned to them. + +In that case we could create an interface profile with the name "user-interface" that defines the mode of the interface and the untagged VLAN to be used. This profile can then be applied to all the interfaces in Infrahub that will be used to connect users too. This assures that all these interfaces will have the same value for the mode and untagged VLAN. + +### BGP Session profiles + +Internal BGP sessions within a network typically contain multiple common configuration options. Profiles can be used to gather these common attributes. The profile can be applied to all the internal BGP sessions to assure they use the same configuration settings. + +## Current limitations + +- Profiles can only be statically assigned to nodes. In a future release you will be able to dynamically assign profiles to nodes. +- Profiles can only define attributes, it is currently not yet possible to define relationships +- Profiles can only define attributes that are not part of a unique constraint in the Node schema +- Required attributes of a Node must have a default value defined in the schema, if you want to define these attributes using profiles + +## Dynamic generation + +For every node that is defined in the schema, a profile schema will be dynamically generated every time a schema is loaded into Infrahub. Profile schemas cannot be created manually. + +This means that an instance of a profile is always specific for a kind of Node and cannot be used for another kind of Node. + +The kind of the generated profile of a node will be `Profile`. For example, the profile kind for a node kind `InfraInterface` will be `ProfileInfraInterface`. + +Infrahub will also generate the GraphQL query and mutations to be able to perform CRUD operations on the profile. For the same example of `InfraInterface` the following GraphQL query and mutations would be generated: + +| Type | Name | +|----------|-----------------------------| +| query | ProfileInfraInterface | +| mutation | ProfileInfraInterfaceCreate | +| mutation | ProfileInfraInterfaceUpdate | +| mutation | ProfileInfraInterfaceUpsert | +| mutation | ProfileInfraInterfaceDelete | + +## Profile priority + +Profiles have a priority assigned to them through the `profile_priority`. The priority determines the order of inheritance for attributes and relationships when a node has multiple profiles assigned to it. + +In a scenario where we have 2 profiles assigned to a node, the value of the attribute of a node will be inherited from the profile with the lowest priority value, if that profile defines a value for the attribute. If the profile does not define a value for that attribute, the next profile with the lowest priority value will be checked. + +## Data lineage + +When a node inherits a value of an attribute from a profile, then the source property of that attribute will be set to the UUID of the profile that it is inherited from. This allows us to identify where the value of an attribute of a node originated. diff --git a/docs/docs/topics/readme.mdx b/docs/docs/topics/readme.mdx deleted file mode 100644 index 1423e4936d..0000000000 --- a/docs/docs/topics/readme.mdx +++ /dev/null @@ -1,19 +0,0 @@ ---- -title: Topics ---- - -# Topics - -Topics explain the concepts of Infrahub and how it works. - -- [Architecture](./architecture.mdx) -- [Artifact](./artifact.mdx) -- [Data lineage and metadata](./metadata.mdx) -- [Demo environment](./local-demo-environment.mdx) -- [GraphQL queries](./graphql.mdx) -- [Object storage](./object-storage.mdx) -- [Proposed change](./proposed-change.mdx) -- [Schema](./schema.mdx) -- [Transformation](./transformation.mdx) -- [User management and authentication](./auth.mdx) -- [Resources testing framework](./resources-testing-framework.mdx) \ No newline at end of file diff --git a/docs/docs/topics/repository.mdx b/docs/docs/topics/repository.mdx index 9457b911f1..08c6a49c5a 100644 --- a/docs/docs/topics/repository.mdx +++ b/docs/docs/topics/repository.mdx @@ -2,7 +2,9 @@ title: Repository --- -# Summary +# Repository + +## Summary Infrahub supports two different types of connections to external Git repositories diff --git a/docs/docs/topics/resources-testing-framework.mdx b/docs/docs/topics/resources-testing-framework.mdx index 143a64bb7d..3d9fc9295c 100644 --- a/docs/docs/topics/resources-testing-framework.mdx +++ b/docs/docs/topics/resources-testing-framework.mdx @@ -45,7 +45,7 @@ infrahub_tests: kind: TypeOfTest ``` -All availables keys and their values are listed in the tests configuration file [reference](/reference/infrahub-tests). +All available keys and their values are listed in the tests configuration file [reference](/reference/infrahub-tests). ## Real life example @@ -105,7 +105,7 @@ infrahub_tests: kind: python-transform-smoke ``` -To run these tests via a command line, we will simply use `pytest`, it is installed with the Infrahub SDK. +To run these tests via the command line, we will use `pytest,` which is installed with the Infrahub SDK. ```shell pytest tests # tests is the directory containing the files @@ -144,7 +144,7 @@ This means that the Infrahub Git agent will take care of running the `pytest` pr :::note -This section is intented for an audience with advanced knowledge of testing with pytest. It is not a mandatory to know this in order to use the testing framework. +This section is intended for an audience with advanced knowledge of testing with pytest. It is optional to know this when using the testing framework. ::: diff --git a/docs/docs/topics/schema.mdx b/docs/docs/topics/schema.mdx index fa7ae1f4a2..921ba159c0 100644 --- a/docs/docs/topics/schema.mdx +++ b/docs/docs/topics/schema.mdx @@ -16,7 +16,7 @@ Unlike traditional databases that can only have one schema at a time, in Infrahu There is several way to [load a new schema](/guides/schema). -## Schema Definition +## Schema definition ### Namespace, Node, Attributes, Relationships & Generics @@ -408,14 +408,14 @@ Schema files can be loaded into Infrahub with the `infrahubctl` command or direc The `infrahubctl` command can be used to check & load individual schema files or multiple files as part of a directory. -The `infrahub schema check` command will validate if a given schema is valid and it will return a summary of the changes +The `infrahub schema check` command will validate if a given schema is valid and it will return a summary of the changes that will be applied to the schema if the schema was loaded. ```bash infrahubctl schema check [--branch ] ``` -The `infrahub schema load` command will load the schemas into infrahub into the specified branch. +The `infrahub schema load` command will load the schemas into Infrahub into the specified branch. ```bash infrahubctl schema load [--branch ] @@ -440,7 +440,7 @@ schemas: - schemas/demo_edge_fabric.yml ``` -## Schema Update & Data Migrations +## Schema update and data migrations Unlike most databases that support a single schema at a time, in Infrahub it is possible to have a different schema per branch. This is possible because the schema itself is stored in the database like any other object. @@ -461,16 +461,16 @@ the recommendation is to create a new branch and to integrate the changes into t ::: -### Isolated Mode +### Isolated mode When a new schema is loaded into a branch, the branch will automatically be converted into isolated mode in order to apply the required data migrations for this branch. A branch rebase will be required to bring the latest changes from main into the branch. -### State : Absent or Present +### State: absent or present The format of the schema is declarative and incremental to allow schema to be composed from multiple sources. -To remove an existing Node, Generic, Attribute or Relationship from the schema it is required to use the `state: absent` flag to explicitely indicate that this element must be deleted. +To remove an existing Node, Generic, Attribute or Relationship from the schema it is required to use the `state: absent` flag to explicitly indicate that this element must be deleted. Without this flag the element will be ignored but it will not be removed. ```yaml {3} showLineNumbers @@ -528,16 +528,16 @@ The internal identifier should be an UUID4, with 36 characters.
-### Restrictions & Limitations +### Restrictions and limitations Most changes to an existing schema are supported but there are a few properties that can't be updated in the current release: -- Properties that can't be updated on a Node or a Generic : `branch` -- Properties that can't be updated on an Attribute : `branch` -- Properties that can't be updated on an Relationship : `branch`, `direction`, `hierarchical` +- Properties that can't be updated on a Node or a Generic: `branch` +- Properties that can't be updated on an Attribute: `branch` +- Properties that can't be updated on an Relationship: `branch`, `direction`, `hierarchical` [A reference guide for validators and migrations is available](/reference/schema/validator-migration) -### Known Limitations +### Known limitations - When updating the identifier on a relationship, the existing relationships won't be impacted or updated even if it was the last relationship using a given identifier. [ISSUE-2472](https://github.com/opsmill/infrahub/issues/2472) diff --git a/docs/docs/topics/transformation.mdx b/docs/docs/topics/transformation.mdx index c2a3cff9de..7689cb2c1a 100644 --- a/docs/docs/topics/transformation.mdx +++ b/docs/docs/topics/transformation.mdx @@ -20,7 +20,7 @@ The output of a transformation can be either in JSON format or in plain text. A transformation is composed of 2 main components: -- A **GraphQL query** that will define what the input data. +- A **GraphQL query** that will define what the input data is. - A **Transformation logic** that will process the data and transform it. ![](../media/transformation.excalidraw.svg) @@ -67,7 +67,7 @@ A Jinja2 Transform can be rendered with 3 different methods: ### TransformPython (Python plugin) -A `TransformaPython` is a transformation plugin written in Python. It can generate any dataset in JSON format and must be composed of 1 main Python Class and 1 GraphQL Query. +A `TransformPython` is a transformation plugin written in Python. It can generate any dataset in JSON format and must be composed of 1 main Python Class and 1 GraphQL Query. #### Create a Python transform @@ -77,11 +77,11 @@ Please refer to the guide [Creating a Python transform](/guides/python-transform #### Render a TransformPython -An TransformPython can be rendered with 2 different methods: +A TransformPython can be rendered with 2 different methods: - On demand via the REST API - As part of an [Artifact](./artifact.mdx) -- In CLI for development and troubleshooting [infrahubctl transform](/infrahubctl/infrahubctl-transform) +- Via the CLI for development and troubleshooting: [infrahubctl transform](/infrahubctl/infrahubctl-transform) ## Unit testing for transformation diff --git a/docs/docs/topics/version-control.mdx b/docs/docs/topics/version-control.mdx index 5286af2ccf..93de3bfcf2 100644 --- a/docs/docs/topics/version-control.mdx +++ b/docs/docs/topics/version-control.mdx @@ -33,9 +33,9 @@ A branch provide a safe environment to prepare and validate a change before inte Branches are meant to be short lived, from a few seconds to a few weeks, and are hierarchical by nature which means that a branch can only be merged into the branch it was created from. > Currently only a single level of hierarchy is supported, meaning that all branches must be created from the default branch and be merged into the default branch. - + ### Sync and Isolated mode - + Unlike Git, by default, a branch in Infrahub will automatically stay in sync with the default branch, unless the branch is currently in isolated mode. A branch in isolated mode will have a similar behavior as branches in Git and a rebase will be required to integrate the latest changes from the default branch. @@ -136,7 +136,7 @@ After the merge operation, all changes will be accessible in main at the time of :::info -It's recommended to use a Proposed Change to merge a branch to guarantee that all changes are valid and compliants. +Using a Proposed Change to merge a branch is recommended to guarantee that all changes are valid and compliant. ::: diff --git a/docs/docs/tutorials/getting-started/branches.mdx b/docs/docs/tutorials/getting-started/branches.mdx index 44170f39f7..c452e3cc67 100644 --- a/docs/docs/tutorials/getting-started/branches.mdx +++ b/docs/docs/tutorials/getting-started/branches.mdx @@ -162,3 +162,9 @@ Go back to the detailed page for the Organization `my-first-org`. **The object should now have the value previously defined in the branch. Try switching between the `main` branch and `cr1234`.** ::: + +:::info Proposed Change + +For an in-depth understanding of Infrahub's approach to handling differences between branches and merging them, please consult the [proposed change topic](/topics/proposed-change). + +::: \ No newline at end of file diff --git a/docs/docs/tutorials/getting-started/creating-an-object.mdx b/docs/docs/tutorials/getting-started/creating-an-object.mdx index 3f3fa03bda..973d14e222 100644 --- a/docs/docs/tutorials/getting-started/creating-an-object.mdx +++ b/docs/docs/tutorials/getting-started/creating-an-object.mdx @@ -2,16 +2,16 @@ label: Creating an object --- -# Create a new organization +# Create a new tenant We need to create an object in Infrahub that will be used later in the tutorial. -Navigate to the Organization object in the left sidebar of the frontend. +Navigate to the Tenant object in the left sidebar of the frontend. :::note The demo environment is configured to allow anonymous read-only access to the system. In order to modify data you have to sign in. Before you sign in you will notice that -the plus button in the Organization list is grayed out. After signing in with a user +the plus button in the Tenant list is grayed out. After signing in with a user that has at least write access you will be able to add objects. ::: @@ -19,7 +19,7 @@ that has at least write access you will be able to add objects. Click on the plus icon and create a new organization using the values below. ```yaml -Name: my-first-org +Name: my-first-tenant Description: Testing Infrahub ``` diff --git a/docs/docs/tutorials/getting-started/graphql-mutation.mdx b/docs/docs/tutorials/getting-started/graphql-mutation.mdx index 3870b9e034..612e6feacc 100644 --- a/docs/docs/tutorials/getting-started/graphql-mutation.mdx +++ b/docs/docs/tutorials/getting-started/graphql-mutation.mdx @@ -4,7 +4,7 @@ title: GraphQL mutation # Make changes using GraphQL -GraphQL Mutations are available to create, update or delete any objects in the database. In a REST API they are the equivalent of a the methods POST, PUT or DELETE. +GraphQL Mutations are available to create, update or delete any objects in the database. In a REST API they are the equivalent to the methods POST, PUT, or DELETE. :::info @@ -16,10 +16,10 @@ To execute any mutation you'll need to define a HTTP header in the GraphQL Explo ::: -```graphql # Create a new organization +```graphql # Create a new tenant # Endpoint : http://localhost:8000/graphql/main mutation { - CoreOrganizationCreate( + OrganizationTenantCreate( data: { name: { value: "Hooli" }, description: { value: "Transforming the world as we know it."} @@ -35,20 +35,20 @@ mutation { ## Add a new interface and a new IP address in the Graph -Add a new interface `Ethernet9` to the device `ord1-edge1`. +Add a new interface `Ethernet0` to the device `ord1-edge1`. ```graphql # Endpoint : http://127.0.0.1:8000/graphql/cr1234 mutation { InfraInterfaceL3Create( data: { - name: { value: "Ethernet9" } + name: { value: "Ethernet0" } enabled: { value: true } description: { value: "new interface in branch" } device: { id: "ord1-edge1" } - status: { id: "active" } + status: { value: "active" } speed: { value: 10000 } - role: { id: "spare" } + role: { value: "spare" } } ) { ok @@ -74,7 +74,7 @@ Add a new IP address connected to the new interface. mutation { InfraIPAddressCreate( data: { - interface: { id: "" }, + interface: { id: "" }, address: { value: "192.168.0.2/24" } } ) { diff --git a/docs/docs/tutorials/getting-started/graphql-query.mdx b/docs/docs/tutorials/getting-started/graphql-query.mdx index b854cb3939..2fe97c362d 100644 --- a/docs/docs/tutorials/getting-started/graphql-query.mdx +++ b/docs/docs/tutorials/getting-started/graphql-query.mdx @@ -18,7 +18,7 @@ Unlike a REST API, the format of the response is not fixed in GraphQL. It depend ## First query -The following query will return the name of the all the devices in the database. +The following query will return the name of all the devices in the database. ```graphql # First Query # Endpoint : http://localhost:8000/graphql/main diff --git a/docs/docs/tutorials/getting-started/readme.mdx b/docs/docs/tutorials/getting-started/readme.mdx index 1f64277a64..8b26d362b6 100644 --- a/docs/docs/tutorials/getting-started/readme.mdx +++ b/docs/docs/tutorials/getting-started/readme.mdx @@ -94,6 +94,10 @@ To follow the tutorial you should use the `admin` account but you can try the ot | David Palmer | `David Palmer` | `Password123` | read-write | | Jack Bauer | `Jack Bauer` | `Password123` | read-only | +## Access the Infrahub interfaces + +Confirm access to the Infrahub web interface and the GraphQL interface by visiting the following links: + diff --git a/docs/docs/tutorials/getting-started/schema.mdx b/docs/docs/tutorials/getting-started/schema.mdx index 1c58cd0467..96b7699db3 100644 --- a/docs/docs/tutorials/getting-started/schema.mdx +++ b/docs/docs/tutorials/getting-started/schema.mdx @@ -2,7 +2,12 @@ title: Extend the schema --- import CodeBlock from '@theme/CodeBlock'; -import infrastructureBaseYaml from '!!raw-loader!../../../../models/infrastructure_base.yml'; +import dcimYaml from '!!raw-loader!../../../../models/base/dcim.yml'; +import ipamYaml from '!!raw-loader!../../../../models/base/ipam.yml'; +import locationYaml from '!!raw-loader!../../../../models/base/location.yml'; +import organizationYaml from '!!raw-loader!../../../../models/base/organization.yml'; +import routingYaml from '!!raw-loader!../../../../models/base/routing.yml'; + import ReferenceLink from "../../../src/components/Card"; # Extend the schema @@ -27,11 +32,15 @@ You can explore the current schema by visiting the schema page, you can find it In order to model a network, we need to extend the current models to capture more information like: `Device`, `Interface`, `IPAddress`, `BGPSession`, `Location`, `Role`, `Status` etc. -A "base" schema with these types of models and more is available in the `models/` directory +A "base" schema with these types of models and more is available in the `models/base` directory
Infrastructure Base Schema - {infrastructureBaseYaml} + {dcimYaml} + {ipamYaml} + {locationYaml} + {organizationYaml} + {routingYaml}
Use the following command to load these new models into Infrahub @@ -45,7 +54,14 @@ invoke demo.load-infra-schema ```bash > invoke demo.load-infra-schema --- abbreviated --- - schema 'models/infrastructure_base.yml' loaded successfully in 12.352 sec! + schema 'models/base/dcim.yml' loaded successfully + schema 'models/base/ipam.yml' loaded successfully + schema 'models/base/location.yml' loaded successfully + schema 'models/base/organization.yml' loaded successfully + schema 'models/base/routing.yml' loaded successfully + 5 schemas processed in 26.640 seconds. + Waiting for schema to sync across all workers + Schema updated on all workers. --- abbreviated --- [+] Restarting 1/1 ✔ Container infrahub-infrahub-server-1 Started 1.5s @@ -80,7 +96,7 @@ invoke demo.load-infra-data [13:27:43] INFO - Replaced jfk1-edge1-Ethernet1 IP to 10.1.0.32/31 infrastructure_edge.py:678 INFO - Replaced jfk1-edge2-Ethernet1 IP to 10.1.0.33/31 infrastructure_edge.py:687 INFO Create a new Branch and Delete Colt Transit Circuit infrastructure_edge.py:694 - INFO - Creating branch: 'atl1-delete-transit' infrastructure_edge.py:699 + INFO - Creating branch: 'atl1-delete-upstream' infrastructure_edge.py:699 [13:27:47] INFO - Deleted Colt [DUFF-cf3a6ed2d959] infrastructure_edge.py:752 INFO - Deleted Colt [DUFF-4141a7be1f9a] infrastructure_edge.py:752 INFO Create a new Branch and introduce some conflicts infrastructure_edge.py:759 diff --git a/docs/package-lock.json b/docs/package-lock.json index 25ff73bd0e..ea56b46bd4 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -8,8 +8,8 @@ "name": "infrahub-docs", "version": "0.0.0", "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/preset-classic": "3.1.1", + "@docusaurus/core": "^3.2.1", + "@docusaurus/preset-classic": "^3.2.1", "@easyops-cn/docusaurus-search-local": "^0.40.1", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", @@ -19,10 +19,10 @@ "react-dom": "^18.0.0" }, "devDependencies": { - "@docusaurus/eslint-plugin": "^3.1.1", - "@docusaurus/module-type-aliases": "3.1.1", - "@docusaurus/tsconfig": "3.1.1", - "@docusaurus/types": "3.1.1", + "@docusaurus/eslint-plugin": "^3.2.1", + "@docusaurus/module-type-aliases": "^3.2.1", + "@docusaurus/tsconfig": "^3.2.1", + "@docusaurus/types": "^3.2.1", "@typescript-eslint/eslint-plugin": "^6.20.0", "@typescript-eslint/parser": "^6.20.0", "eslint": "^8.56.0", @@ -83,74 +83,74 @@ } }, "node_modules/@algolia/cache-browser-local-storage": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.22.1.tgz", - "integrity": "sha512-Sw6IAmOCvvP6QNgY9j+Hv09mvkvEIDKjYW8ow0UDDAxSXy664RBNQk3i/0nt7gvceOJ6jGmOTimaZoY1THmU7g==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.23.3.tgz", + "integrity": "sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==", "dependencies": { - "@algolia/cache-common": "4.22.1" + "@algolia/cache-common": "4.23.3" } }, "node_modules/@algolia/cache-common": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.22.1.tgz", - "integrity": "sha512-TJMBKqZNKYB9TptRRjSUtevJeQVXRmg6rk9qgFKWvOy8jhCPdyNZV1nB3SKGufzvTVbomAukFR8guu/8NRKBTA==" + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.23.3.tgz", + "integrity": "sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==" }, "node_modules/@algolia/cache-in-memory": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.22.1.tgz", - "integrity": "sha512-ve+6Ac2LhwpufuWavM/aHjLoNz/Z/sYSgNIXsinGofWOysPilQZPUetqLj8vbvi+DHZZaYSEP9H5SRVXnpsNNw==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.23.3.tgz", + "integrity": "sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==", "dependencies": { - "@algolia/cache-common": "4.22.1" + "@algolia/cache-common": "4.23.3" } }, "node_modules/@algolia/client-account": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.22.1.tgz", - "integrity": "sha512-k8m+oegM2zlns/TwZyi4YgCtyToackkOpE+xCaKCYfBfDtdGOaVZCM5YvGPtK+HGaJMIN/DoTL8asbM3NzHonw==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.23.3.tgz", + "integrity": "sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==", "dependencies": { - "@algolia/client-common": "4.22.1", - "@algolia/client-search": "4.22.1", - "@algolia/transporter": "4.22.1" + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/client-analytics": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.22.1.tgz", - "integrity": "sha512-1ssi9pyxyQNN4a7Ji9R50nSdISIumMFDwKNuwZipB6TkauJ8J7ha/uO60sPJFqQyqvvI+px7RSNRQT3Zrvzieg==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.23.3.tgz", + "integrity": "sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==", "dependencies": { - "@algolia/client-common": "4.22.1", - "@algolia/client-search": "4.22.1", - "@algolia/requester-common": "4.22.1", - "@algolia/transporter": "4.22.1" + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/client-common": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.22.1.tgz", - "integrity": "sha512-IvaL5v9mZtm4k4QHbBGDmU3wa/mKokmqNBqPj0K7lcR8ZDKzUorhcGp/u8PkPC/e0zoHSTvRh7TRkGX3Lm7iOQ==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.23.3.tgz", + "integrity": "sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==", "dependencies": { - "@algolia/requester-common": "4.22.1", - "@algolia/transporter": "4.22.1" + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/client-personalization": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.22.1.tgz", - "integrity": "sha512-sl+/klQJ93+4yaqZ7ezOttMQ/nczly/3GmgZXJ1xmoewP5jmdP/X/nV5U7EHHH3hCUEHeN7X1nsIhGPVt9E1cQ==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.23.3.tgz", + "integrity": "sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==", "dependencies": { - "@algolia/client-common": "4.22.1", - "@algolia/requester-common": "4.22.1", - "@algolia/transporter": "4.22.1" + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/client-search": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.22.1.tgz", - "integrity": "sha512-yb05NA4tNaOgx3+rOxAmFztgMTtGBi97X7PC3jyNeGiwkAjOZc2QrdZBYyIdcDLoI09N0gjtpClcackoTN0gPA==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.23.3.tgz", + "integrity": "sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==", "dependencies": { - "@algolia/client-common": "4.22.1", - "@algolia/requester-common": "4.22.1", - "@algolia/transporter": "4.22.1" + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/events": { @@ -159,47 +159,65 @@ "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "node_modules/@algolia/logger-common": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.22.1.tgz", - "integrity": "sha512-OnTFymd2odHSO39r4DSWRFETkBufnY2iGUZNrMXpIhF5cmFE8pGoINNPzwg02QLBlGSaLqdKy0bM8S0GyqPLBg==" + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.23.3.tgz", + "integrity": "sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==" }, "node_modules/@algolia/logger-console": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.22.1.tgz", - "integrity": "sha512-O99rcqpVPKN1RlpgD6H3khUWylU24OXlzkavUAMy6QZd1776QAcauE3oP8CmD43nbaTjBexZj2nGsBH9Tc0FVA==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.23.3.tgz", + "integrity": "sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==", "dependencies": { - "@algolia/logger-common": "4.22.1" + "@algolia/logger-common": "4.23.3" + } + }, + "node_modules/@algolia/recommend": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.23.3.tgz", + "integrity": "sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==", + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/@algolia/requester-browser-xhr": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.22.1.tgz", - "integrity": "sha512-dtQGYIg6MteqT1Uay3J/0NDqD+UciHy3QgRbk7bNddOJu+p3hzjTRYESqEnoX/DpEkaNYdRHUKNylsqMpgwaEw==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.23.3.tgz", + "integrity": "sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==", "dependencies": { - "@algolia/requester-common": "4.22.1" + "@algolia/requester-common": "4.23.3" } }, "node_modules/@algolia/requester-common": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.22.1.tgz", - "integrity": "sha512-dgvhSAtg2MJnR+BxrIFqlLtkLlVVhas9HgYKMk2Uxiy5m6/8HZBL40JVAMb2LovoPFs9I/EWIoFVjOrFwzn5Qg==" + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.23.3.tgz", + "integrity": "sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==" }, "node_modules/@algolia/requester-node-http": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.22.1.tgz", - "integrity": "sha512-JfmZ3MVFQkAU+zug8H3s8rZ6h0ahHZL/SpMaSasTCGYR5EEJsCc8SI5UZ6raPN2tjxa5bxS13BRpGSBUens7EA==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.23.3.tgz", + "integrity": "sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==", "dependencies": { - "@algolia/requester-common": "4.22.1" + "@algolia/requester-common": "4.23.3" } }, "node_modules/@algolia/transporter": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.22.1.tgz", - "integrity": "sha512-kzWgc2c9IdxMa3YqA6TN0NW5VrKYYW/BELIn7vnLyn+U/RFdZ4lxxt9/8yq3DKV5snvoDzzO4ClyejZRdV3lMQ==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.23.3.tgz", + "integrity": "sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==", "dependencies": { - "@algolia/cache-common": "4.22.1", - "@algolia/logger-common": "4.22.1", - "@algolia/requester-common": "4.22.1" + "@algolia/cache-common": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/requester-common": "4.23.3" } }, "node_modules/@ampproject/remapping": { @@ -2143,18 +2161,18 @@ } }, "node_modules/@docsearch/css": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.5.2.tgz", - "integrity": "sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==" + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz", + "integrity": "sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==" }, "node_modules/@docsearch/react": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.5.2.tgz", - "integrity": "sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz", + "integrity": "sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==", "dependencies": { "@algolia/autocomplete-core": "1.9.3", "@algolia/autocomplete-preset-algolia": "1.9.3", - "@docsearch/css": "3.5.2", + "@docsearch/css": "3.6.0", "algoliasearch": "^4.19.1" }, "peerDependencies": { @@ -2179,9 +2197,9 @@ } }, "node_modules/@docusaurus/core": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.1.1.tgz", - "integrity": "sha512-2nQfKFcf+MLEM7JXsXwQxPOmQAR6ytKMZVSx7tVi9HEm9WtfwBH1fp6bn8Gj4zLUhjWKCLoysQ9/Wm+EZCQ4yQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.2.1.tgz", + "integrity": "sha512-ZeMAqNvy0eBv2dThEeMuNzzuu+4thqMQakhxsgT5s02A8LqRcdkg+rbcnuNqUIpekQ4GRx3+M5nj0ODJhBXo9w==", "dependencies": { "@babel/core": "^7.23.3", "@babel/generator": "^7.23.3", @@ -2193,14 +2211,13 @@ "@babel/runtime": "^7.22.6", "@babel/runtime-corejs3": "^7.22.6", "@babel/traverse": "^7.22.8", - "@docusaurus/cssnano-preset": "3.1.1", - "@docusaurus/logger": "3.1.1", - "@docusaurus/mdx-loader": "3.1.1", + "@docusaurus/cssnano-preset": "3.2.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/mdx-loader": "3.2.1", "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-common": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", - "@slorber/static-site-generator-webpack-plugin": "^4.0.7", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "@svgr/webpack": "^6.5.1", "autoprefixer": "^10.4.14", "babel-loader": "^9.1.3", @@ -2221,6 +2238,7 @@ "detect-port": "^1.5.1", "escape-html": "^1.0.3", "eta": "^2.2.0", + "eval": "^0.1.8", "file-loader": "^6.2.0", "fs-extra": "^11.1.1", "html-minifier-terser": "^7.2.0", @@ -2229,6 +2247,7 @@ "leven": "^3.1.0", "lodash": "^4.17.21", "mini-css-extract-plugin": "^2.7.6", + "p-map": "^4.0.0", "postcss": "^8.4.26", "postcss-loader": "^7.3.3", "prompts": "^2.4.2", @@ -2265,9 +2284,9 @@ } }, "node_modules/@docusaurus/cssnano-preset": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.1.1.tgz", - "integrity": "sha512-LnoIDjJWbirdbVZDMq+4hwmrTl2yHDnBf9MLG9qyExeAE3ac35s4yUhJI8yyTCdixzNfKit4cbXblzzqMu4+8g==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.2.1.tgz", + "integrity": "sha512-wTL9KuSSbMJjKrfu385HZEzAoamUsbKqwscAQByZw4k6Ja/RWpqgVvt/CbAC+aYEH6inLzOt+MjuRwMOrD3VBA==", "dependencies": { "cssnano-preset-advanced": "^5.3.10", "postcss": "^8.4.26", @@ -2279,9 +2298,9 @@ } }, "node_modules/@docusaurus/eslint-plugin": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/eslint-plugin/-/eslint-plugin-3.1.1.tgz", - "integrity": "sha512-seOuNvqXVxM4hWs9NzUgLnlVvc0Rk9N+C5fZ5gVTk5hc5JlyJNZNWh85TU+XsLogbA5odjJSEDKOhv8SDGmWRg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/eslint-plugin/-/eslint-plugin-3.2.1.tgz", + "integrity": "sha512-DIhptlFPvSFZm7ZyisxorEzWK6SF0SvyHTR5NzljnGSTn8lln9mlc18xYhm0zYI/TLoDRGN/fkwCMWPkxB8YVQ==", "dev": true, "dependencies": { "@typescript-eslint/utils": "^5.62.0", @@ -2395,9 +2414,9 @@ } }, "node_modules/@docusaurus/logger": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.1.1.tgz", - "integrity": "sha512-BjkNDpQzewcTnST8trx4idSoAla6zZ3w22NqM/UMcFtvYJgmoE4layuTzlfql3VFPNuivvj7BOExa/+21y4X2Q==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.2.1.tgz", + "integrity": "sha512-0voOKJCn9RaM3np6soqEfo7SsVvf2C+CDTWhW+H/1AyBhybASpExtDEz+7ECck9TwPzFQ5tt+I3zVugUJbJWDg==", "dependencies": { "chalk": "^4.1.2", "tslib": "^2.6.0" @@ -2407,15 +2426,13 @@ } }, "node_modules/@docusaurus/mdx-loader": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.1.1.tgz", - "integrity": "sha512-xN2IccH9+sv7TmxwsDJNS97BHdmlqWwho+kIVY4tcCXkp+k4QuzvWBeunIMzeayY4Fu13A6sAjHGv5qm72KyGA==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.2.1.tgz", + "integrity": "sha512-Fs8tXhXKZjNkdGaOy1xSLXSwfjCMT73J3Zfrju2U16uGedRFRjgK0ojpK5tiC7TnunsL3tOFgp1BSMBRflX9gw==", "dependencies": { - "@babel/parser": "^7.22.7", - "@babel/traverse": "^7.22.8", - "@docusaurus/logger": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "@mdx-js/mdx": "^3.0.0", "@slorber/remark-comment": "^1.0.0", "escape-html": "^1.0.3", @@ -2447,12 +2464,12 @@ } }, "node_modules/@docusaurus/module-type-aliases": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.1.1.tgz", - "integrity": "sha512-xBJyx0TMfAfVZ9ZeIOb1awdXgR4YJMocIEzTps91rq+hJDFJgJaylDtmoRhUxkwuYmNK1GJpW95b7DLztSBJ3A==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.2.1.tgz", + "integrity": "sha512-FyViV5TqhL1vsM7eh29nJ5NtbRE6Ra6LP1PDcPvhwPSlA7eiWGRKAn3jWwMUcmjkos5SYY+sr0/feCdbM3eQHQ==", "dependencies": { "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/types": "3.1.1", + "@docusaurus/types": "3.2.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", @@ -2466,17 +2483,17 @@ } }, "node_modules/@docusaurus/plugin-content-blog": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.1.1.tgz", - "integrity": "sha512-ew/3VtVoG3emoAKmoZl7oKe1zdFOsI0NbcHS26kIxt2Z8vcXKCUgK9jJJrz0TbOipyETPhqwq4nbitrY3baibg==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/logger": "3.1.1", - "@docusaurus/mdx-loader": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-common": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.2.1.tgz", + "integrity": "sha512-lOx0JfhlGZoZu6pEJfeEpSISZR5dQbJGGvb42IP13G5YThNHhG9R9uoWuo4IOimPqBC7sHThdLA3VLevk61Fsw==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/mdx-loader": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "cheerio": "^1.0.0-rc.12", "feed": "^4.2.2", "fs-extra": "^11.1.1", @@ -2497,17 +2514,18 @@ } }, "node_modules/@docusaurus/plugin-content-docs": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.1.1.tgz", - "integrity": "sha512-lhFq4E874zw0UOH7ujzxnCayOyAt0f9YPVYSb9ohxrdCM8B4szxitUw9rIX4V9JLLHVoqIJb6k+lJJ1jrcGJ0A==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/logger": "3.1.1", - "@docusaurus/mdx-loader": "3.1.1", - "@docusaurus/module-type-aliases": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.2.1.tgz", + "integrity": "sha512-GHe5b/lCskAR8QVbfWAfPAApvRZgqk7FN3sOHgjCtjzQACZxkHmq6QqyqZ8Jp45V7lVck4wt2Xw2IzBJ7Cz3bA==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/mdx-loader": "3.2.1", + "@docusaurus/module-type-aliases": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "@types/react-router-config": "^5.0.7", "combine-promises": "^1.1.0", "fs-extra": "^11.1.1", @@ -2526,15 +2544,15 @@ } }, "node_modules/@docusaurus/plugin-content-pages": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.1.1.tgz", - "integrity": "sha512-NQHncNRAJbyLtgTim9GlEnNYsFhuCxaCNkMwikuxLTiGIPH7r/jpb7O3f3jUMYMebZZZrDq5S7om9a6rvB/YCA==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/mdx-loader": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.2.1.tgz", + "integrity": "sha512-TOqVfMVTAHqWNEGM94Drz+PUpHDbwFy6ucHFgyTx9zJY7wPNSG5EN+rd/mU7OvAi26qpOn2o9xTdUmb28QLjEQ==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/mdx-loader": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "fs-extra": "^11.1.1", "tslib": "^2.6.0", "webpack": "^5.88.1" @@ -2548,13 +2566,13 @@ } }, "node_modules/@docusaurus/plugin-debug": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.1.1.tgz", - "integrity": "sha512-xWeMkueM9wE/8LVvl4+Qf1WqwXmreMjI5Kgr7GYCDoJ8zu4kD+KaMhrh7py7MNM38IFvU1RfrGKacCEe2DRRfQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.2.1.tgz", + "integrity": "sha512-AMKq8NuUKf2sRpN1m/sIbqbRbnmk+rSA+8mNU1LNxEl9BW9F/Gng8m9HKlzeyMPrf5XidzS1jqkuTLDJ6KIrFw==", "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", + "@docusaurus/core": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", "fs-extra": "^11.1.1", "react-json-view-lite": "^1.2.0", "tslib": "^2.6.0" @@ -2568,13 +2586,13 @@ } }, "node_modules/@docusaurus/plugin-google-analytics": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.1.1.tgz", - "integrity": "sha512-+q2UpWTqVi8GdlLoSlD5bS/YpxW+QMoBwrPrUH/NpvpuOi0Of7MTotsQf9JWd3hymZxl2uu1o3PIrbpxfeDFDQ==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.2.1.tgz", + "integrity": "sha512-/rJ+9u+Px0eTCiF4TNcNtj3kHf8cp6K1HCwOTdbsSlz6Xn21syZYcy+f1VM9wF6HrvUkXUcbM5TDCvg2IRL6bQ==", "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "@docusaurus/core": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "tslib": "^2.6.0" }, "engines": { @@ -2586,13 +2604,13 @@ } }, "node_modules/@docusaurus/plugin-google-gtag": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.1.1.tgz", - "integrity": "sha512-0mMPiBBlQ5LFHTtjxuvt/6yzh8v7OxLi3CbeEsxXZpUzcKO/GC7UA1VOWUoBeQzQL508J12HTAlR3IBU9OofSw==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.2.1.tgz", + "integrity": "sha512-XtuJnlMvYfppeVdUyKiDIJAa/gTJKCQU92z8CLZZ9ibJdgVjFOLS10s0hIC0eL5z0U2u2loJz2rZ63HOkNHbBA==", "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "@docusaurus/core": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "@types/gtag.js": "^0.0.12", "tslib": "^2.6.0" }, @@ -2605,13 +2623,13 @@ } }, "node_modules/@docusaurus/plugin-google-tag-manager": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.1.1.tgz", - "integrity": "sha512-d07bsrMLdDIryDtY17DgqYUbjkswZQr8cLWl4tzXrt5OR/T/zxC1SYKajzB3fd87zTu5W5klV5GmUwcNSMXQXA==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.2.1.tgz", + "integrity": "sha512-wiS/kE0Ny5pnjTxVCs8ljRnkL1RVMj59t6jmSsgEX7piDOoaXSMIUaoIt9ogS/v132uO0xEsxHstkRUZHQyPcQ==", "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "@docusaurus/core": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "tslib": "^2.6.0" }, "engines": { @@ -2623,16 +2641,16 @@ } }, "node_modules/@docusaurus/plugin-sitemap": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.1.1.tgz", - "integrity": "sha512-iJ4hCaMmDaUqRv131XJdt/C/jJQx8UreDWTRqZKtNydvZVh/o4yXGRRFOplea1D9b/zpwL1Y+ZDwX7xMhIOTmg==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/logger": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-common": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.2.1.tgz", + "integrity": "sha512-uWZ7AxzdeaQSTCwD2yZtOiEm9zyKU+wqCmi/Sf25kQQqqFSBZUStXfaQ8OHP9cecnw893ZpZ811rPhB/wfujJw==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "fs-extra": "^11.1.1", "sitemap": "^7.1.1", "tslib": "^2.6.0" @@ -2646,23 +2664,23 @@ } }, "node_modules/@docusaurus/preset-classic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.1.1.tgz", - "integrity": "sha512-jG4ys/hWYf69iaN/xOmF+3kjs4Nnz1Ay3CjFLDtYa8KdxbmUhArA9HmP26ru5N0wbVWhY+6kmpYhTJpez5wTyg==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/plugin-content-blog": "3.1.1", - "@docusaurus/plugin-content-docs": "3.1.1", - "@docusaurus/plugin-content-pages": "3.1.1", - "@docusaurus/plugin-debug": "3.1.1", - "@docusaurus/plugin-google-analytics": "3.1.1", - "@docusaurus/plugin-google-gtag": "3.1.1", - "@docusaurus/plugin-google-tag-manager": "3.1.1", - "@docusaurus/plugin-sitemap": "3.1.1", - "@docusaurus/theme-classic": "3.1.1", - "@docusaurus/theme-common": "3.1.1", - "@docusaurus/theme-search-algolia": "3.1.1", - "@docusaurus/types": "3.1.1" + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.2.1.tgz", + "integrity": "sha512-E3OHSmttpEBcSMhfPBq3EJMBxZBM01W1rnaCUTXy9EHvkmB5AwgTfW1PwGAybPAX579ntE03R+2zmXdizWfKnQ==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/plugin-content-blog": "3.2.1", + "@docusaurus/plugin-content-docs": "3.2.1", + "@docusaurus/plugin-content-pages": "3.2.1", + "@docusaurus/plugin-debug": "3.2.1", + "@docusaurus/plugin-google-analytics": "3.2.1", + "@docusaurus/plugin-google-gtag": "3.2.1", + "@docusaurus/plugin-google-tag-manager": "3.2.1", + "@docusaurus/plugin-sitemap": "3.2.1", + "@docusaurus/theme-classic": "3.2.1", + "@docusaurus/theme-common": "3.2.1", + "@docusaurus/theme-search-algolia": "3.2.1", + "@docusaurus/types": "3.2.1" }, "engines": { "node": ">=18.0" @@ -2685,22 +2703,22 @@ } }, "node_modules/@docusaurus/theme-classic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.1.1.tgz", - "integrity": "sha512-GiPE/jbWM8Qv1A14lk6s9fhc0LhPEQ00eIczRO4QL2nAQJZXkjPG6zaVx+1cZxPFWbAsqSjKe2lqkwF3fGkQ7Q==", - "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/mdx-loader": "3.1.1", - "@docusaurus/module-type-aliases": "3.1.1", - "@docusaurus/plugin-content-blog": "3.1.1", - "@docusaurus/plugin-content-docs": "3.1.1", - "@docusaurus/plugin-content-pages": "3.1.1", - "@docusaurus/theme-common": "3.1.1", - "@docusaurus/theme-translations": "3.1.1", - "@docusaurus/types": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-common": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.2.1.tgz", + "integrity": "sha512-+vSbnQyoWjc6vRZi4vJO2dBU02wqzynsai15KK+FANZudrYaBHtkbLZAQhgmxzBGVpxzi87gRohlMm+5D8f4tA==", + "dependencies": { + "@docusaurus/core": "3.2.1", + "@docusaurus/mdx-loader": "3.2.1", + "@docusaurus/module-type-aliases": "3.2.1", + "@docusaurus/plugin-content-blog": "3.2.1", + "@docusaurus/plugin-content-docs": "3.2.1", + "@docusaurus/plugin-content-pages": "3.2.1", + "@docusaurus/theme-common": "3.2.1", + "@docusaurus/theme-translations": "3.2.1", + "@docusaurus/types": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "copy-text-to-clipboard": "^3.2.0", @@ -2724,17 +2742,17 @@ } }, "node_modules/@docusaurus/theme-common": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.1.1.tgz", - "integrity": "sha512-38urZfeMhN70YaXkwIGXmcUcv2CEYK/2l4b05GkJPrbEbgpsIZM3Xc+Js2ehBGGZmfZq8GjjQ5RNQYG+MYzCYg==", - "dependencies": { - "@docusaurus/mdx-loader": "3.1.1", - "@docusaurus/module-type-aliases": "3.1.1", - "@docusaurus/plugin-content-blog": "3.1.1", - "@docusaurus/plugin-content-docs": "3.1.1", - "@docusaurus/plugin-content-pages": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-common": "3.1.1", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.2.1.tgz", + "integrity": "sha512-d+adiD7L9xv6EvfaAwUqdKf4orsM3jqgeqAM+HAjgL/Ux0GkVVnfKr+tsoe+4ow4rHe6NUt+nkkW8/K8dKdilA==", + "dependencies": { + "@docusaurus/mdx-loader": "3.2.1", + "@docusaurus/module-type-aliases": "3.2.1", + "@docusaurus/plugin-content-blog": "3.2.1", + "@docusaurus/plugin-content-docs": "3.2.1", + "@docusaurus/plugin-content-pages": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", @@ -2753,18 +2771,18 @@ } }, "node_modules/@docusaurus/theme-search-algolia": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.1.1.tgz", - "integrity": "sha512-tBH9VY5EpRctVdaAhT+b1BY8y5dyHVZGFXyCHgTrvcXQy5CV4q7serEX7U3SveNT9zksmchPyct6i1sFDC4Z5g==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.2.1.tgz", + "integrity": "sha512-bzhCrpyXBXzeydNUH83II2akvFEGfhsNTPPWsk5N7e+odgQCQwoHhcF+2qILbQXjaoZ6B3c48hrvkyCpeyqGHw==", "dependencies": { "@docsearch/react": "^3.5.2", - "@docusaurus/core": "3.1.1", - "@docusaurus/logger": "3.1.1", - "@docusaurus/plugin-content-docs": "3.1.1", - "@docusaurus/theme-common": "3.1.1", - "@docusaurus/theme-translations": "3.1.1", - "@docusaurus/utils": "3.1.1", - "@docusaurus/utils-validation": "3.1.1", + "@docusaurus/core": "3.2.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/plugin-content-docs": "3.2.1", + "@docusaurus/theme-common": "3.2.1", + "@docusaurus/theme-translations": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-validation": "3.2.1", "algoliasearch": "^4.18.0", "algoliasearch-helper": "^3.13.3", "clsx": "^2.0.0", @@ -2783,9 +2801,9 @@ } }, "node_modules/@docusaurus/theme-translations": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.1.1.tgz", - "integrity": "sha512-xvWQFwjxHphpJq5fgk37FXCDdAa2o+r7FX8IpMg+bGZBNXyWBu3MjZ+G4+eUVNpDhVinTc+j6ucL0Ain5KCGrg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.2.1.tgz", + "integrity": "sha512-jAUMkIkFfY+OAhJhv6mV8zlwY6J4AQxJPTgLdR2l+Otof9+QdJjHNh/ifVEu9q0lp3oSPlJj9l05AaP7Ref+cg==", "dependencies": { "fs-extra": "^11.1.1", "tslib": "^2.6.0" @@ -2795,15 +2813,15 @@ } }, "node_modules/@docusaurus/tsconfig": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/tsconfig/-/tsconfig-3.1.1.tgz", - "integrity": "sha512-FTBuY3KvaHfMVBgvlPmDQ+KS9Q/bYtVftq2ugou3PgBDJoQmw2aUZ4Sg15HKqLGbfIkxoy9t6cqE4Yw1Ta8Q1A==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/tsconfig/-/tsconfig-3.2.1.tgz", + "integrity": "sha512-+biUwtsYW3oChLxYezzA+NIgS3Q9KDRl7add/YT54RXs9Q4rKInebxdHdG6JFs5BaTg45gyjDu0rvNVcGeHODg==", "dev": true }, "node_modules/@docusaurus/types": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.1.1.tgz", - "integrity": "sha512-grBqOLnubUecgKFXN9q3uit2HFbCxTWX4Fam3ZFbMN0sWX9wOcDoA7lwdX/8AmeL20Oc4kQvWVgNrsT8bKRvzg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.2.1.tgz", + "integrity": "sha512-n/toxBzL2oxTtRTOFiGKsHypzn/Pm+sXyw+VSk1UbqbXQiHOwHwts55bpKwbcUgA530Is6kix3ELiFOv9GAMfw==", "dependencies": { "@mdx-js/mdx": "^3.0.0", "@types/history": "^4.7.11", @@ -2821,11 +2839,12 @@ } }, "node_modules/@docusaurus/utils": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.1.1.tgz", - "integrity": "sha512-ZJfJa5cJQtRYtqijsPEnAZoduW6sjAQ7ZCWSZavLcV10Fw0Z3gSaPKA/B4micvj2afRZ4gZxT7KfYqe5H8Cetg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.2.1.tgz", + "integrity": "sha512-DPkIS/EPc+pGAV798PUXgNzJFM3HJouoQXgr0KDZuJVz1EkWbDLOcQwLIz8Qx7liI9ddfkN/TXTRQdsTPZNakw==", "dependencies": { - "@docusaurus/logger": "3.1.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/utils-common": "3.2.1", "@svgr/webpack": "^6.5.1", "escape-string-regexp": "^4.0.0", "file-loader": "^6.2.0", @@ -2837,6 +2856,7 @@ "js-yaml": "^4.1.0", "lodash": "^4.17.21", "micromatch": "^4.0.5", + "prompts": "^2.4.2", "resolve-pathname": "^3.0.0", "shelljs": "^0.8.5", "tslib": "^2.6.0", @@ -2856,9 +2876,9 @@ } }, "node_modules/@docusaurus/utils-common": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.1.1.tgz", - "integrity": "sha512-eGne3olsIoNfPug5ixjepZAIxeYFzHHnor55Wb2P57jNbtVaFvij/T+MS8U0dtZRFi50QU+UPmRrXdVUM8uyMg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.2.1.tgz", + "integrity": "sha512-N5vadULnRLiqX2QfTjVEU3u5vo6RG2EZTdyXvJdzDOdrLCGIZAfnf/VkssinFZ922sVfaFfQ4FnStdhn5TWdVg==", "dependencies": { "tslib": "^2.6.0" }, @@ -2875,12 +2895,13 @@ } }, "node_modules/@docusaurus/utils-validation": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.1.1.tgz", - "integrity": "sha512-KlY4P9YVDnwL+nExvlIpu79abfEv6ZCHuOX4ZQ+gtip+Wxj0daccdReIWWtqxM/Fb5Cz1nQvUCc7VEtT8IBUAA==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.2.1.tgz", + "integrity": "sha512-+x7IR9hNMXi62L1YAglwd0s95fR7+EtirjTxSN4kahYRWGqOi3jlQl1EV0az/yTEvKbxVvOPcdYicGu9dk4LJw==", "dependencies": { - "@docusaurus/logger": "3.1.1", - "@docusaurus/utils": "3.1.1", + "@docusaurus/logger": "3.2.1", + "@docusaurus/utils": "3.2.1", + "@docusaurus/utils-common": "3.2.1", "joi": "^17.9.2", "js-yaml": "^4.1.0", "tslib": "^2.6.0" @@ -3620,19 +3641,6 @@ "micromark-util-symbol": "^1.0.1" } }, - "node_modules/@slorber/static-site-generator-webpack-plugin": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", - "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", - "dependencies": { - "eval": "^0.1.8", - "p-map": "^4.0.0", - "webpack-sources": "^3.2.2" - }, - "engines": { - "node": ">=14" - } - }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "6.5.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz", @@ -4751,30 +4759,31 @@ } }, "node_modules/algoliasearch": { - "version": "4.22.1", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.22.1.tgz", - "integrity": "sha512-jwydKFQJKIx9kIZ8Jm44SdpigFwRGPESaxZBaHSV0XWN2yBJAOT4mT7ppvlrpA4UGzz92pqFnVKr/kaZXrcreg==", - "dependencies": { - "@algolia/cache-browser-local-storage": "4.22.1", - "@algolia/cache-common": "4.22.1", - "@algolia/cache-in-memory": "4.22.1", - "@algolia/client-account": "4.22.1", - "@algolia/client-analytics": "4.22.1", - "@algolia/client-common": "4.22.1", - "@algolia/client-personalization": "4.22.1", - "@algolia/client-search": "4.22.1", - "@algolia/logger-common": "4.22.1", - "@algolia/logger-console": "4.22.1", - "@algolia/requester-browser-xhr": "4.22.1", - "@algolia/requester-common": "4.22.1", - "@algolia/requester-node-http": "4.22.1", - "@algolia/transporter": "4.22.1" + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.23.3.tgz", + "integrity": "sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==", + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-account": "4.23.3", + "@algolia/client-analytics": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-personalization": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/recommend": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" } }, "node_modules/algoliasearch-helper": { - "version": "3.16.2", - "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.16.2.tgz", - "integrity": "sha512-Yl/Gu5Cq4Z5s/AJ0jR37OPI1H3+z7PHz657ibyaXgMOaWvPlZ3OACN13N+7HCLPUlB0BN+8BtmrG/CqTilowBA==", + "version": "3.18.0", + "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.18.0.tgz", + "integrity": "sha512-ZXvA8r6VG46V343jnIE7Tei8Xr0/9N8YhD27joC0BKxeogQyvNu7O37i510wA7FnrDjoa/tFhK90WUaBlkaqnw==", "dependencies": { "@algolia/events": "^4.0.1" }, @@ -4893,9 +4902,9 @@ } }, "node_modules/autoprefixer": { - "version": "10.4.17", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz", - "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==", + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", "funding": [ { "type": "opencollective", @@ -4911,8 +4920,8 @@ } ], "dependencies": { - "browserslist": "^4.22.2", - "caniuse-lite": "^1.0.30001578", + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -5032,12 +5041,12 @@ } }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", @@ -5045,7 +5054,7 @@ "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", - "raw-body": "2.5.1", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -5131,9 +5140,9 @@ } }, "node_modules/browserslist": { - "version": "4.22.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.3.tgz", - "integrity": "sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A==", + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", "funding": [ { "type": "opencollective", @@ -5149,8 +5158,8 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001580", - "electron-to-chromium": "^1.4.648", + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", "node-releases": "^2.0.14", "update-browserslist-db": "^1.0.13" }, @@ -5211,13 +5220,18 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5263,9 +5277,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001582", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001582.tgz", - "integrity": "sha512-vsJG3V5vgfduaQGVxL53uSX/HUzxyr2eA8xCo36OLal7sRcSZbibJtLeh0qja4sFOr/QQGt4opB4tOy+eOgAxg==", + "version": "1.0.30001612", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001612.tgz", + "integrity": "sha512-lFgnZ07UhaCcsSZgWW0K5j4e69dK1u/ltrL9lTUiFOwNHs12S3UMIEYgBV0Z6C6hRDev7iRnMzzYmKabYdXF9g==", "funding": [ { "type": "opencollective", @@ -5721,9 +5735,9 @@ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "engines": { "node": ">= 0.6" } @@ -6258,16 +6272,19 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-lazy-prop": { @@ -6539,9 +6556,9 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { - "version": "1.4.653", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.653.tgz", - "integrity": "sha512-wA2A2LQCqnEwQAvwADQq3KpMpNwgAUBnRmrFgRzHnPhbQUFArTR32Ab46f4p0MovDLcg4uqd4nCsN2hTltslpA==" + "version": "1.4.749", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.749.tgz", + "integrity": "sha512-LRMMrM9ITOvue0PoBrvNIraVmuDbJV5QC9ierz/z5VilMdPOVMjOtpICNld3PuXuTZ3CHH/UPxX9gHhAPwi+0Q==" }, "node_modules/emoji-regex": { "version": "9.2.2", @@ -6609,6 +6626,25 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-module-lexer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", @@ -7134,16 +7170,16 @@ } }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -7492,9 +7528,9 @@ "devOptional": true }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -7727,15 +7763,19 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -7999,11 +8039,11 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -12951,9 +12991,9 @@ } }, "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -13231,9 +13271,9 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-json-view-lite": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.2.1.tgz", - "integrity": "sha512-Itc0g86fytOmKZoIoJyGgvNqohWSbh3NXIKNgH6W6FT9PC1ck4xas1tT3Rr/b3UlFXyA9Jjaw9QSXdZy2JwGMQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.3.0.tgz", + "integrity": "sha512-aN1biKC5v4DQkmQBlZjuMFR09MKZGMPtIg+cut8zEeg2HXd6gl2gRy0n4HMacHf0dznQgo0SVXN7eT8zV3hEuQ==", "engines": { "node": ">=14" }, @@ -14132,15 +14172,16 @@ } }, "node_modules/set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dependencies": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -14211,13 +14252,17 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -15572,9 +15617,9 @@ } }, "node_modules/webpack-dev-middleware": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", - "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", + "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", "dependencies": { "colorette": "^2.0.10", "memfs": "^3.4.3", diff --git a/docs/package.json b/docs/package.json index 009cf947f6..e82a99814f 100644 --- a/docs/package.json +++ b/docs/package.json @@ -15,8 +15,8 @@ "typecheck": "tsc" }, "dependencies": { - "@docusaurus/core": "3.1.1", - "@docusaurus/preset-classic": "3.1.1", + "@docusaurus/core": "^3.2.1", + "@docusaurus/preset-classic": "^3.2.1", "@easyops-cn/docusaurus-search-local": "^0.40.1", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", @@ -26,10 +26,10 @@ "react-dom": "^18.0.0" }, "devDependencies": { - "@docusaurus/eslint-plugin": "^3.1.1", - "@docusaurus/module-type-aliases": "3.1.1", - "@docusaurus/tsconfig": "3.1.1", - "@docusaurus/types": "3.1.1", + "@docusaurus/eslint-plugin": "^3.2.1", + "@docusaurus/module-type-aliases": "^3.2.1", + "@docusaurus/tsconfig": "^3.2.1", + "@docusaurus/types": "^3.2.1", "@typescript-eslint/eslint-plugin": "^6.20.0", "@typescript-eslint/parser": "^6.20.0", "eslint": "^8.56.0", diff --git a/docs/retype.yml b/docs/retype.yml deleted file mode 100644 index 08af10f3e5..0000000000 --- a/docs/retype.yml +++ /dev/null @@ -1,13 +0,0 @@ ---- -input: . -output: build -url: docs.infrahub.app -branding: - title: Infrahub - label: Docs -favicon: ./media/infrahub-favicon.png -links: - - text: Getting Started - link: https://retype.com/guides/getting-started/ -footer: - copyright: "© Copyright {{ year }}. All rights reserved." diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 5d62b13bed..496ae131ce 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -38,29 +38,38 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Guides', - link: {type: 'doc', id: 'guides/readme'}, + link: { + type: 'generated-index', + slug: 'guides' + }, items: [ 'guides/installation', 'guides/create-schema', + 'guides/generator', 'guides/repository', 'guides/jinja2-transform', 'guides/python-transform', 'guides/artifact', 'guides/database-backup', + 'guides/profiles', ], }, { type: 'category', label: 'Topics', - link: {type: 'doc', id: 'topics/readme'}, + link: { + type: 'generated-index', + slug: 'topics' + }, items: [ 'topics/infrahub-yml', 'topics/architecture', - 'topics/artifact', 'topics/check', 'topics/hardware-requirements', + 'topics/ipam', 'topics/local-demo-environment', + 'topics/generator', 'topics/graphql', 'topics/object-storage', 'topics/version-control', @@ -71,6 +80,7 @@ const sidebars: SidebarsConfig = { 'topics/auth', 'topics/database-backup', 'topics/resources-testing-framework', + 'topics/profiles', ], }, { @@ -112,6 +122,7 @@ const sidebars: SidebarsConfig = { }, 'reference/configuration', 'reference/git-agent', + 'reference/message-bus-events', 'reference/api-server', 'reference/dotinfrahub', 'reference/infrahub-tests' @@ -120,25 +131,63 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Python SDK', - link: {type: 'doc', id: 'python-sdk/readme'}, + link: { + type: 'doc', + id: 'python-sdk/readme' + }, items: [ { type: 'category', label: 'Guides', items: [ - 'python-sdk/guides/installation', - 'python-sdk/guides/client', + 'python-sdk/guides/installation', + 'python-sdk/guides/client', 'python-sdk/guides/query_data', - 'python-sdk/guides/create_update_delete', - 'python-sdk/guides/branches', - 'python-sdk/guides/store' + 'python-sdk/guides/create_update_delete', + 'python-sdk/guides/branches', + 'python-sdk/guides/store', + 'python-sdk/guides/tracking' + ], + }, + { + type: 'category', + label: 'Topics', + items: [ + 'python-sdk/topics/tracking' + ], + }, + { + type: 'category', + label: 'Reference', + items: [ + 'python-sdk/reference/config' + ], + }, + ], + }, + { + type: 'category', + label: 'Infrahub Sync', + link: { + type: 'doc', + id: 'sync/readme' + }, + items: [ + { + type: 'category', + label: 'Guides', + items: [ + 'sync/guides/installation', + 'sync/guides/creation', + 'sync/guides/run', ], }, { type: 'category', label: 'Reference', items: [ - 'python-sdk/reference/config' + 'sync/reference/config', + 'sync/reference/cli', ], }, ], @@ -170,6 +219,7 @@ const sidebars: SidebarsConfig = { }, { 'Release Notes': [ + 'release-notes/release-0_13', 'release-notes/release-0_12', 'release-notes/release-0_11', 'release-notes/release-0_10', diff --git a/frontend/.eslintrc b/frontend/.eslintrc index d9fc18b0b5..3fef9bb882 100644 --- a/frontend/.eslintrc +++ b/frontend/.eslintrc @@ -68,4 +68,4 @@ // } // ] } -} \ No newline at end of file +} diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 444bc16128..b96da846a8 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,97 +8,102 @@ "name": "frontend", "version": "0.1.0", "dependencies": { - "@apollo/client": "^3.7.7", - "@codemirror/commands": "^6.3.2", - "@codemirror/lang-markdown": "^6.2.3", - "@codemirror/state": "^6.3.2", - "@codemirror/view": "^6.22.1", - "@headlessui/react": "^1.7.10", - "@heroicons/react": "^2.0.15", + "@apollo/client": "^3.9.10", + "@codemirror/commands": "^6.3.3", + "@codemirror/lang-markdown": "^6.2.4", + "@codemirror/state": "^6.4.1", + "@codemirror/view": "^6.26.1", + "@graphiql/plugin-explorer": "^1.0.4", + "@headlessui/react": "^1.7.18", + "@heroicons/react": "^2.1.3", "@hookform/error-message": "^2.0.1", - "@iconify-icon/react": "^1.0.8", - "@iconify-json/mdi": "^1.1.55", + "@iconify-icon/react": "^2.0.1", + "@iconify-json/mdi": "^1.1.64", + "@loadable/component": "^5.16.3", "@popperjs/core": "^2.11.8", + "@radix-ui/react-dropdown-menu": "^2.0.6", + "@radix-ui/react-popover": "^1.0.7", + "@radix-ui/react-progress": "^1.0.3", "@radix-ui/react-tooltip": "^1.0.7", - "@sentry/react": "^7.45.0", - "@sentry/tracing": "^7.45.0", - "@tailwindcss/forms": "^0.5.3", - "@uiw/react-color": "^2.0.5", - "@vitejs/plugin-react": "^4.0.0", - "autoprefixer": "^10.4.14", + "@svgr/rollup": "^8.1.0", + "@tailwindcss/forms": "^0.5.7", + "@uiw/react-color": "^2.1.1", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.19", "class-variance-authority": "^0.7.0", "clsx": "^2.1.0", "cm6-theme-basic-light": "^0.2.0", "cross-fetch": "^4.0.0", - "date-fns": "^2.29.3", - "handlebars": "^4.7.7", - "jotai": "^2.6.0", + "date-fns": "^3.6.0", + "graphiql": "^3.1.2", + "graphql": "^16.8.1", + "handlebars": "^4.7.8", + "jotai": "^2.7.2", "prismjs": "^1.29.0", - "query-string": "^7.1.3", - "ramda": "^0.28.0", + "query-string": "^9.0.0", + "ramda": "^0.29.1", "react": "^18.2.0", + "react-accessible-treeview": "^2.8.3", "react-datepicker": "^4.11.0", "react-diff-view": "^3.2.0", "react-dom": "^18.2.0", - "react-hook-form": "^7.43.5", + "react-hook-form": "^7.51.2", "react-loading": "^2.0.3", "react-markdown": "^9.0.1", "react-paginate": "^8.2.0", "react-popper": "^2.3.0", - "react-router-dom": "^6.21.1", + "react-router-dom": "^6.22.3", "react-simple-code-editor": "^0.13.1", - "react-toastify": "^9.1.2", - "recharts": "^2.12.2", + "react-toastify": "^9.1.3", + "recharts": "^2.12.3", "remark-gfm": "^4.0.0", "sha1": "^1.1.1", "subscriptions-transport-ws": "^0.11.0", - "tailwind-merge": "^2.2.1", + "tailwind-merge": "^2.2.2", "unidiff": "^1.0.4", "use-query-params": "^2.2.1", - "vite": "^4.3.8", - "vite-plugin-svgr": "^3.2.0", - "vite-tsconfig-paths": "^4.2.0", + "vite": "^5.2.8", + "vite-tsconfig-paths": "^4.3.2", "web-vitals": "^2.1.4" }, "devDependencies": { - "@graphql-codegen/cli": "^3.0.0", - "@graphql-codegen/typescript": "^3.0.0", - "@playwright/test": "^1.41.1", - "@testing-library/react": "^14.0.0", - "@types/node": "^16.18.12", - "@types/prismjs": "^1.26.0", - "@types/ramda": "^0.28.23", - "@types/react": "^18.0.28", + "@graphql-codegen/cli": "^5.0.2", + "@graphql-codegen/typescript": "^4.0.6", + "@playwright/test": "^1.42.1", + "@testing-library/react": "^14.2.2", + "@types/loadable__component": "^5.13.9", + "@types/node": "^20.12.3", + "@types/prismjs": "^1.26.3", + "@types/ramda": "^0.29.12", + "@types/react": "^18.2.74", "@types/react-datepicker": "^4.10.0", - "@types/react-dom": "^18.0.10", - "@types/react-test-renderer": "^18.0.0", - "@types/sha1": "^1.1.3", - "@typescript-eslint/eslint-plugin": "^5.55.0", - "@vitest/coverage-c8": "^0.31.1", - "c8": "^7.13.0", - "cypress": "^13.6.1", - "cypress-vite": "^1.5.0", - "eslint": "^8.36.0", - "eslint-config-prettier": "^8.8.0", - "eslint-config-standard-with-typescript": "^34.0.1", + "@types/react-dom": "^18.2.23", + "@types/react-test-renderer": "^18.0.7", + "@types/sha1": "^1.1.5", + "@typescript-eslint/eslint-plugin": "^7.5.0", + "@vitest/coverage-v8": "^1.4.0", + "cypress": "^13.7.2", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", "eslint-plugin-cypress": "^2.15.1", - "eslint-plugin-import": "^2.27.5", - "eslint-plugin-n": "^15.6.1", + "eslint-plugin-import": "^2.29.1", + "eslint-plugin-n": "^16.6.2", "eslint-plugin-promise": "^6.1.1", - "eslint-plugin-react": "^7.32.2", - "eslint-plugin-unused-imports": "^2.0.0", + "eslint-plugin-react": "^7.34.1", + "eslint-plugin-unused-imports": "^3.1.0", "husky": "^8.0.3", - "jsdom": "^21.1.2", + "jsdom": "^24.0.0", "lint-staged": "^13.2.0", "openapi-typescript": "^6.7.3", "postcss": "^8.4.23", "prettier": "2.8.8", "pretty-quick": "^3.1.3", "react-test-renderer": "^18.2.0", - "tailwindcss": "^3.3.2", - "ts-node": "^10.9.1", - "typescript": "^4.9.5", - "vitest": "^0.31.1" + "tailwindcss": "^3.4.3", + "ts-node": "^10.9.2", + "typescript": "^5.4.3", + "typescript-eslint": "^7.5.0", + "vitest": "^1.4.0" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -134,9 +139,9 @@ } }, "node_modules/@apollo/client": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.9.0.tgz", - "integrity": "sha512-M6I7h9UF0MmW/eK1oTzuHewZRZmvNzvw4c7nXhvQnxIk0V0VeJWSmscRGQNtsNmk8WnBmgyV/1KOVNow2aOM8w==", + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.9.10.tgz", + "integrity": "sha512-w8i/Lk1P0vvWZF0Xb00XPonn79/0rgRJ1vopBlVudVuy9QP29/NZXK0rI2xJIN6VrKuEqJZaVGJC+7k23I2sfA==", "dependencies": { "@graphql-typed-document-node/core": "^3.1.1", "@wry/caches": "^1.0.0", @@ -146,7 +151,7 @@ "hoist-non-react-statics": "^3.3.2", "optimism": "^0.18.0", "prop-types": "^15.7.2", - "rehackt": "0.0.3", + "rehackt": "0.0.6", "response-iterator": "^0.2.6", "symbol-observable": "^4.0.0", "ts-invariant": "^0.10.3", @@ -408,9 +413,9 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz", - "integrity": "sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.1.tgz", + "integrity": "sha512-Pc65opHDliVpRHuKfzI+gSA4zcgr65O4cl64fFJIWEEh8JoHIHh0Oez1Eo8Arz8zq/JhgKodQaxEwUPRtZylVA==", "engines": { "node": ">=6.9.0" } @@ -462,7 +467,6 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz", "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", - "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, @@ -470,6 +474,17 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz", + "integrity": "sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==", + "dependencies": { + "@babel/types": "^7.22.15" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helper-compilation-targets": { "version": "7.23.6", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz", @@ -486,17 +501,16 @@ } }, "node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.23.10", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.23.10.tgz", - "integrity": "sha512-2XpP2XhkXzgxecPNEEK8Vz8Asj9aRxt08oKOqtiZoqV2UGZ5T+EkyP9sXQ9nwMxBIG34a7jmasVqoMop7VdPUw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.1.tgz", + "integrity": "sha512-1yJa9dX9g//V6fDebXoEfEsxkZHk3Hcbm+zLhyu6qVgYFLvmTALTeV+jNU9e5RnYtioBrGEOdoI2joMSNQ/+aA==", "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-function-name": "^7.23.0", "@babel/helper-member-expression-to-functions": "^7.23.0", "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20", + "@babel/helper-replace-supers": "^7.24.1", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", "semver": "^6.3.1" @@ -508,6 +522,37 @@ "@babel/core": "^7.0.0" } }, + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz", + "integrity": "sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "regexpu-core": "^5.3.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.1.tgz", + "integrity": "sha512-o7SDgTJuvx5vLKD6SFvkydkSMBvahDKGiNJzG22IZYXhiqoe9efY7zocICBgzHV4IRg5wdgl2nEL/tulKIEIbA==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.22.6", + "@babel/helper-plugin-utils": "^7.22.5", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, "node_modules/@babel/helper-environment-visitor": { "version": "7.22.20", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", @@ -543,7 +588,6 @@ "version": "7.23.0", "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz", "integrity": "sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==", - "dev": true, "dependencies": { "@babel/types": "^7.23.0" }, @@ -552,11 +596,11 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", - "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", + "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", "dependencies": { - "@babel/types": "^7.22.15" + "@babel/types": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -584,7 +628,6 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz", "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", - "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, @@ -593,21 +636,36 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz", - "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.0.tgz", + "integrity": "sha512-9cUznXMG0+FxRuJfvL82QlTqIzhVW9sL0KjMPHhAOOvpQGL8QtdxnBKILjBqxlHyliz0yCa1G903ZXI/FuHy2w==", "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-replace-supers": { + "node_modules/@babel/helper-remap-async-to-generator": { "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz", - "integrity": "sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw==", - "dev": true, + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz", + "integrity": "sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-wrap-function": "^7.22.20" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.1.tgz", + "integrity": "sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==", "dependencies": { "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-member-expression-to-functions": "^7.22.15", + "@babel/helper-member-expression-to-functions": "^7.23.0", "@babel/helper-optimise-call-expression": "^7.22.5" }, "engines": { @@ -632,7 +690,6 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz", "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", - "dev": true, "dependencies": { "@babel/types": "^7.22.5" }, @@ -675,6 +732,19 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz", + "integrity": "sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==", + "dependencies": { + "@babel/helper-function-name": "^7.22.5", + "@babel/template": "^7.22.15", + "@babel/types": "^7.22.19" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@babel/helpers": { "version": "7.23.9", "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.9.tgz", @@ -766,9 +836,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.9.tgz", - "integrity": "sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.1.tgz", + "integrity": "sha512-Zo9c7N3xdOIQrNip7Lc9wvRPzlRtovHVE4lkz8WEDr7uYh/GMQhSiIgFxGIArRHYdJE5kxtZjAf8rT0xhdLCzg==", "bin": { "parser": "bin/babel-parser.js" }, @@ -776,6 +846,51 @@ "node": ">=6.0.0" } }, + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.1.tgz", + "integrity": "sha512-y4HqEnkelJIOQGd+3g1bTeKsA5c6qM7eOn7VggGVbBc0y8MLSKHacwcIE2PplNlQSj0PqS9rrXL/nkPVK+kUNg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.1.tgz", + "integrity": "sha512-Hj791Ii4ci8HqnaKHAlLNs+zaLXb0EzSDhiAWp5VNlyvCNymYfacs64pxTxbH1znW/NcArSmwpmG9IKE/TUVVQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/plugin-transform-optional-chaining": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.1.tgz", + "integrity": "sha512-m9m/fXsXLiHfwdgydIFnpk+7jlVbnvlK5B2EKiPdLUb6WX654ZaaEWJUjk8TftRbZpK0XibovlLWX4KIZhV6jw==", + "dependencies": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, "node_modules/@babel/plugin-proposal-class-properties": { "version": "7.18.6", "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz", @@ -813,11 +928,32 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-syntax-class-properties": { "version": "7.12.13", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", - "dev": true, "dependencies": { "@babel/helper-plugin-utils": "^7.12.13" }, @@ -825,13 +961,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-flow": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.23.3.tgz", - "integrity": "sha512-YZiAIpkJAwQXBJLIQbRFayR5c+gJ35Vcz3bg954k7cd73zqjvhacJuL9RbrzPz8qPmZdgqP6EUKwy0PCNhaaPA==", - "dev": true, + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" @@ -840,28 +975,35 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz", - "integrity": "sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw==", - "dev": true, + "node_modules/@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.8.0" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.3" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz", - "integrity": "sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg==", + "node_modules/@babel/plugin-syntax-flow": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.24.1.tgz", + "integrity": "sha512-sxi2kLTI5DeW5vDtMUsk4mTPwvlUDbjOnoWayhynCwrw4QXRld4QEYwqzY8JmQXaJUtgUuCIurtSRH5sn4c7mA==", "dev": true, "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -870,25 +1012,26 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-syntax-object-rest-spread": { - "version": "7.8.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", - "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", - "dev": true, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.1.tgz", + "integrity": "sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.8.0" + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.23.3.tgz", - "integrity": "sha512-NzQcQrzaQPkaEwoTm4Mhyl8jI1huEL/WWIEvudjTCMJ9aBZNpsJbMASx7EQECtQQPS/DcnFpo0FIh3LvEO9cxQ==", - "dev": true, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.1.tgz", + "integrity": "sha512-zhQTMH0X2nVLnb04tz+s7AMuasX8U0FnpE+nHTOhSOINjWMnopoZTxtIKsd45n4GQ/HIZLyfIpoul8e2m0DnRA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -897,28 +1040,34 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.23.3.tgz", - "integrity": "sha512-vI+0sIaPIO6CNuM9Kk5VmXcMVRiOpDh7w2zZt9GXzmE/9KD70CUEVhvPR/etAeNK/FAEkhxQtXOzVF3EuRL41A==", - "dev": true, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.10.4" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.4.tgz", - "integrity": "sha512-0QqbP6B6HOh7/8iNR4CQU2Th/bbRtBp4KS9vcaZd1fZ0wSh5Fyssg0UCIHwxh+ka+pNDREbVLQnHCMHKZfPwfw==", - "dev": true, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.1.tgz", + "integrity": "sha512-2eCtxZXf+kbkMIsXS4poTvT4Yu5rXiRa+9xGVT56raghjmBTKMpFNc9R4IDiB4emao9eO22Ox7CxuJG7BgExqA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -927,67 +1076,78 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.23.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.23.8.tgz", - "integrity": "sha512-yAYslGsY1bX6Knmg46RjiCiNSwJKv2IUC8qOdYKqMMr0491SXFhcHqOdRDeCRohOOIzwN/90C6mQ9qAKgrP7dg==", - "dev": true, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", "dependencies": { - "@babel/helper-annotate-as-pure": "^7.22.5", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20", - "@babel/helper-split-export-declaration": "^7.22.6", - "globals": "^11.1.0" + "@babel/helper-plugin-utils": "^7.10.4" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.23.3.tgz", - "integrity": "sha512-dTj83UVTLw/+nbiHqQSFdwO9CbTtwq1DsDqm3CUEtDrZNET5rT5E6bIdTlOftDTDLMYxvxHNEYO4B9SLl8SLZw==", - "dev": true, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/template": "^7.22.15" + "@babel/helper-plugin-utils": "^7.10.4" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz", - "integrity": "sha512-n225npDqjDIr967cMScVKHXJs7rout1q+tt50inyBCPkyZ8KxeI6d+GIbSBTT/w/9WdlWDOej3V9HE5Lgk57gw==", - "dev": true, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.8.0" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-flow-strip-types": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.23.3.tgz", - "integrity": "sha512-26/pQTf9nQSNVJCrLB1IkHUKyPxR+lMrH2QDPG89+Znu9rAMbtrybdbWeE9bb7gzjmE5iXHEY+e0HUwM6Co93Q==", - "dev": true, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-flow": "^7.23.3" + "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" @@ -996,14 +1156,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.23.6.tgz", - "integrity": "sha512-aYH4ytZ0qSuBbpfhuofbg/e96oQ7U2w1Aw/UQmKT+1l39uEhUPoFS3fHevDc1G0OvewyDudfMKY1OulczHzWIw==", - "dev": true, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + "@babel/helper-plugin-utils": "^7.14.5" }, "engines": { "node": ">=6.9.0" @@ -1012,15 +1170,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.23.3.tgz", - "integrity": "sha512-I1QXp1LxIvt8yLaib49dRW5Okt7Q4oaxao6tFVKS/anCdEOMtYwWVKoiOA1p34GOWIZjUK0E+zCp7+l1pfQyiw==", - "dev": true, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.1.tgz", + "integrity": "sha512-Yhnmvy5HZEnHUty6i++gcfH1/l68AHnItFHnaCv6hn9dNh0hQvvQJsxpi4BMBFN5DLeHBuucT/0DgzXif/OyRw==", "dependencies": { - "@babel/helper-compilation-targets": "^7.22.15", - "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1029,28 +1184,27 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.23.3.tgz", - "integrity": "sha512-wZ0PIXRxnwZvl9AYpqNUxpZ5BiTGrYt7kueGQ+N5FiQ7RCOD4cm8iShd6S6ggfVIWaJf2EMk8eRzAh52RfP4rQ==", - "dev": true, + "node_modules/@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", + "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" }, "engines": { "node": ">=6.9.0" }, "peerDependencies": { - "@babel/core": "^7.0.0-0" + "@babel/core": "^7.0.0" } }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.23.3.tgz", - "integrity": "sha512-sC3LdDBDi5x96LA+Ytekz2ZPk8i/Ck+DEuDbRAll5rknJ5XRTSaPKEYwomLcs1AA8wg9b3KjIQRsnApj+q51Ag==", - "dev": true, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.1.tgz", + "integrity": "sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1059,15 +1213,15 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.3.tgz", - "integrity": "sha512-aVS0F65LKsdNOtcz6FRCpE4OgsP2OFnW46qNxNIX9h3wuzaNcSQsJysuMwqSibC98HPrf2vCgtxKNwS0DAlgcA==", - "dev": true, + "node_modules/@babel/plugin-transform-async-generator-functions": { + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.3.tgz", + "integrity": "sha512-Qe26CMYVjpQxJ8zxM1340JFNjZaF+ISWpr1Kt/jGo+ZTUzKkfw/pphEWbRCb+lmSM6k/TOgfYLvmbHkUQ0asIg==", "dependencies": { - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-simple-access": "^7.22.5" + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-remap-async-to-generator": "^7.22.20", + "@babel/plugin-syntax-async-generators": "^7.8.4" }, "engines": { "node": ">=6.9.0" @@ -1076,14 +1230,14 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.23.3.tgz", - "integrity": "sha512-BwQ8q0x2JG+3lxCVFohg+KbQM7plfpBwThdW9A6TMtWwLsbDA01Ek2Zb/AgDN39BiZsExm4qrXxjk+P1/fzGrA==", - "dev": true, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.1.tgz", + "integrity": "sha512-AawPptitRXp1y0n4ilKcGbRYWfbbzFWz2NqNu7dacYDtFtz0CMjG64b3LQsb3KIgnf4/obcUL78hfaOS7iCUfw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20" + "@babel/helper-module-imports": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-remap-async-to-generator": "^7.22.20" }, "engines": { "node": ">=6.9.0" @@ -1092,13 +1246,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz", - "integrity": "sha512-09lMt6UsUb3/34BbECKVbVwrT9bO6lILWln237z7sLaWnMsTi7Yc9fhX5DLpkJzAGfaReXI22wP41SZmnAA3Vw==", - "dev": true, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.1.tgz", + "integrity": "sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1107,13 +1260,12 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.23.3.tgz", - "integrity": "sha512-jR3Jn3y7cZp4oEWPFAlRsSWjxKe4PZILGBSd4nis1TsC5qeSpb+nrtihJuDhNI7QHiVbUaiXa0X2RZY3/TI6Nw==", - "dev": true, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.1.tgz", + "integrity": "sha512-h71T2QQvDgM2SmT29UYU6ozjMlAt7s7CSs5Hvy8f8cf/GM/Z4a2zMfN+fjVGaieeCrXR3EdQl6C4gQG+OgmbKw==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1122,17 +1274,535 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/plugin-transform-react-display-name": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.23.3.tgz", - "integrity": "sha512-GnvhtVfA2OAtzdX58FJxU19rhoGeQzyVndw3GgtdECQvQFXPEZIOVULHVZGAYmOgmqjXpVpfocAbSjh99V/Fqw==", + "node_modules/@babel/plugin-transform-class-properties": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.1.tgz", + "integrity": "sha512-OMLCXi0NqvJfORTaPQBwqLXHhb93wkBKZ4aNwMl6WtehO7ar+cmp+89iPEQPqxAnxsOKTaMcs3POz3rKayJ72g==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.1.tgz", + "integrity": "sha512-FUHlKCn6J3ERiu8Dv+4eoz7w8+kFLSyeVG4vDAikwADGjUCoHw/JHokyGtr8OR4UjpwPVivyF+h8Q5iv/JmrtA==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-class-static-block": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.1.tgz", + "integrity": "sha512-ZTIe3W7UejJd3/3R4p7ScyyOoafetUShSf4kCqV0O7F/RiHxVj/wRaRnQlrGwflvcehNA8M42HkAiEDYZu2F1Q==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-replace-supers": "^7.24.1", + "@babel/helper-split-export-declaration": "^7.22.6", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.1.tgz", + "integrity": "sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/template": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.1.tgz", + "integrity": "sha512-ow8jciWqNxR3RYbSNVuF4U2Jx130nwnBnhRw6N6h1bOejNkABmcI5X5oz29K4alWX7vf1C+o6gtKXikzRKkVdw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.1.tgz", + "integrity": "sha512-p7uUxgSoZwZ2lPNMzUkqCts3xlp8n+o05ikjy7gbtFJSt9gdU88jAmtfmOxHM14noQXBxfgzf2yRWECiNVhTCw==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.1.tgz", + "integrity": "sha512-msyzuUnvsjsaSaocV6L7ErfNsa5nDWL1XKNnDePLgmz+WdU4w/J8+AxBMrWfi9m4IxfL5sZQKUPQKDQeeAT6lA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dynamic-import": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.1.tgz", + "integrity": "sha512-av2gdSTyXcJVdI+8aFZsCAtR29xJt0S5tas+Ef8NvBNmD1a+N/3ecMLeMBgfcK+xzsjdLDT6oHt+DFPyeqUbDA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.1.tgz", + "integrity": "sha512-U1yX13dVBSwS23DEAqU+Z/PkwE9/m7QQy8Y9/+Tdb8UWYaGNDYwTLi19wqIAiROr8sXVum9A/rtiH5H0boUcTw==", + "dependencies": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-export-namespace-from": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.1.tgz", + "integrity": "sha512-Ft38m/KFOyzKw2UaJFkWG9QnHPG/Q/2SkOrRk4pNBPg5IPZ+dOxcmkK5IyuBcxiNPyyYowPGUReyBvrvZs7IlQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-flow-strip-types": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.24.1.tgz", + "integrity": "sha512-iIYPIWt3dUmUKKE10s3W+jsQ3icFkw0JyRVyY1B7G4yK/nngAOHLVx8xlhA6b/Jzl/Y0nis8gjqhqKtRDQqHWQ==", "dev": true, "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-flow": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.1.tgz", + "integrity": "sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.1.tgz", + "integrity": "sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-function-name": "^7.23.0", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-json-strings": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.1.tgz", + "integrity": "sha512-U7RMFmRvoasscrIFy5xA4gIp8iWnWubnKkKuUGJjsuOH7GfbMkB+XZzeslx2kLdEGdOJDamEmCqOks6e8nv8DQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-json-strings": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.1.tgz", + "integrity": "sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-logical-assignment-operators": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.1.tgz", + "integrity": "sha512-OhN6J4Bpz+hIBqItTeWJujDOfNP+unqv/NJgyhlpSqgBTPm37KkMmZV6SYcOj+pnDbdcl1qRGV/ZiIjX9Iy34w==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.1.tgz", + "integrity": "sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.1.tgz", + "integrity": "sha512-lAxNHi4HVtjnHd5Rxg3D5t99Xm6H7b04hUS7EHIXcUl2EV4yl1gWdqZrNzXnSrHveL9qMdbODlLF55mvgjAfaQ==", + "dependencies": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.1.tgz", + "integrity": "sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==", + "dependencies": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-simple-access": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.1.tgz", + "integrity": "sha512-mqQ3Zh9vFO1Tpmlt8QPnbwGHzNz3lpNEMxQb1kAemn/erstyqw1r9KeOlOfo3y6xAnFEcOv2tSyrXfmMk+/YZA==", + "dependencies": { + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-identifier": "^7.22.20" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.1.tgz", + "integrity": "sha512-tuA3lpPj+5ITfcCluy6nWonSL7RvaG0AOTeAuvXqEKS34lnLzXpDb0dcP6K8jD0zWZFNDVly90AGFJPnm4fOYg==", + "dependencies": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz", + "integrity": "sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.22.5", "@babel/helper-plugin-utils": "^7.22.5" }, "engines": { "node": ">=6.9.0" }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.1.tgz", + "integrity": "sha512-/rurytBM34hYy0HKZQyA0nHbQgQNFm4Q/BOc9Hflxi2X3twRof7NaE5W46j4kQitm7SvACVRXsa6N/tSZxvPug==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.1.tgz", + "integrity": "sha512-iQ+caew8wRrhCikO5DrUYx0mrmdhkaELgFa+7baMcVuhxIkN7oxt06CZ51D65ugIb1UWRQ8oQe+HXAVM6qHFjw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-numeric-separator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.1.tgz", + "integrity": "sha512-7GAsGlK4cNL2OExJH1DzmDeKnRv/LXq0eLUSvudrehVA5Rgg4bIrqEUW29FbKMBRT0ztSqisv7kjP+XIC4ZMNw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.1.tgz", + "integrity": "sha512-XjD5f0YqOtebto4HGISLNfiNMTTs6tbkFf2TOqJlYKYmbo+mN9Dnpl4SRoofiziuOWMIyq3sZEUqLo3hLITFEA==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.1.tgz", + "integrity": "sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-replace-supers": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-catch-binding": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.1.tgz", + "integrity": "sha512-oBTH7oURV4Y+3EUrf6cWn1OHio3qG/PVwO5J03iSJmBg6m2EhKjkAu/xuaXaYwWW9miYtvbWv4LNf0AmR43LUA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-chaining": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.1.tgz", + "integrity": "sha512-n03wmDt+987qXwAgcBlnUUivrZBPZ8z1plL0YvgQalLm+ZE5BMhGm94jhxXtA1wzv1Cu2aaOv1BM9vbVttrzSg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.1.tgz", + "integrity": "sha512-8Jl6V24g+Uw5OGPeWNKrKqXPDw2YDjLc53ojwfMcKwlEoETKU9rU0mHUtcg9JntWI/QYzGAXNWEcVHZ+fR+XXg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.1.tgz", + "integrity": "sha512-tGvisebwBO5em4PaYNqt4fkw56K2VALsAbAakY0FjTYqJp7gfdrgr7YX76Or8/cpik0W6+tj3rZ0uHU9Oil4tw==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.1.tgz", + "integrity": "sha512-pTHxDVa0BpUbvAgX3Gat+7cSciXqUcY9j2VZKTbSB6+VQGpNgNO9ailxTGHSXlqOnX1Hcx1Enme2+yv7VqP9bg==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.1.tgz", + "integrity": "sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-constant-elements": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.24.1.tgz", + "integrity": "sha512-QXp1U9x0R7tkiGB0FOk8o74jhnap0FlZ5gNkRIWdG3eP+SvMFg118e1zaWewDzgABb106QSKpVsD3Wgd8t6ifA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-display-name": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.1.tgz", + "integrity": "sha512-mvoQg2f9p2qlpDQRBC7M3c3XTr0k7cp/0+kFKKO/7Gtu0LSw16eKB+Fabe2bDT/UpsyasTBBkAnbdsLrkD5XMw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, "peerDependencies": { "@babel/core": "^7.0.0-0" } @@ -1141,7 +1811,6 @@ "version": "7.23.4", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.23.4.tgz", "integrity": "sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA==", - "dev": true, "dependencies": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-module-imports": "^7.22.15", @@ -1156,6 +1825,20 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-react-jsx-development": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz", + "integrity": "sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A==", + "dependencies": { + "@babel/plugin-transform-react-jsx": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-transform-react-jsx-self": { "version": "7.23.3", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.23.3.tgz", @@ -1184,13 +1867,56 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-react-pure-annotations": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.1.tgz", + "integrity": "sha512-+pWEAaDJvSm9aFvJNpLiM2+ktl2Sn2U5DdyiWdZBxmLc6+xGt88dvFqsHiAiDS+8WqUwbDfkKz9jRxK3M0k+kA==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regenerator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.1.tgz", + "integrity": "sha512-sJwZBCzIBE4t+5Q4IGLaaun5ExVMRY0lYwos/jNecjMrVCygCdph3IKv0tkP5Fc87e/1+bebAmEAGBfnRD+cnw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "regenerator-transform": "^0.15.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.1.tgz", + "integrity": "sha512-JAclqStUfIwKN15HrsQADFgeZt+wexNQ0uLhuqvqAUFoqPMjEcFCYZBhq0LUdz6dZK/mD+rErhW71fbx8RYElg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.23.3.tgz", - "integrity": "sha512-ED2fgqZLmexWiN+YNFX26fx4gh5qHDhn1O2gvEhreLW2iI63Sqm4llRLCXALKrCnbN4Jy0VcMQZl/SAzqug/jg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.1.tgz", + "integrity": "sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1200,12 +1926,11 @@ } }, "node_modules/@babel/plugin-transform-spread": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.23.3.tgz", - "integrity": "sha512-VvfVYlrlBVu+77xVTOAoxQ6mZbnIq5FM0aGBSFEcIh03qHf+zNqA4DC/3XMUozTg7bZV3e3mZQ0i13VB6v5yUg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.1.tgz", + "integrity": "sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" }, "engines": { @@ -1215,13 +1940,259 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.1.tgz", + "integrity": "sha512-9v0f1bRXgPVcPrngOQvLXeGNNVLc8UjMVfebo9ka0WF3/7+aVUHmaJVT3sa0XCzEFioPfPHZiOcYG9qOsH63cw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.23.3.tgz", - "integrity": "sha512-Flok06AYNp7GV2oJPZZcP9vZdszev6vPBkHLwxwSpaIqx75wn6mUd3UFWsSsA0l8nXAKkyCmL/sR02m8RYGeHg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.1.tgz", + "integrity": "sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.1.tgz", + "integrity": "sha512-CBfU4l/A+KruSUoW+vTQthwcAdwuqbpRNB8HQKlZABwHRhsdHZ9fezp4Sn18PeAlYxTNiLMlx4xUBV3AWfg1BA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.1.tgz", + "integrity": "sha512-liYSESjX2fZ7JyBFkYG78nfvHlMKE6IpNdTVnxmlYUR+j5ZLsitFbaAE+eJSK2zPPkNWNw4mXL51rQ8WrvdK0w==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-typescript": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.1.tgz", + "integrity": "sha512-RlkVIcWT4TLI96zM660S877E7beKlQw7Ig+wqkKBiWfj0zH5Q4h50q6er4wzZKRNSYpfo6ILJ+hrJAGSX2qcNw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-property-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.1.tgz", + "integrity": "sha512-Ss4VvlfYV5huWApFsF8/Sq0oXnGO+jB+rijFEFugTd3cwSObUSnUi88djgR5528Csl0uKlrI331kRqe56Ov2Ng==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.1.tgz", + "integrity": "sha512-2A/94wgZgxfTsiLaQ2E36XAOdcZmGAaEEgVmxQWwZXWkGhvoHbaqXcKnU8zny4ycpu3vNqg0L/PcCiYtHtA13g==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-sets-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.1.tgz", + "integrity": "sha512-fqj4WuzzS+ukpgerpAoOnMfQXwUHFxXUZUE84oL2Kao2N8uSlvcpnAidKASgsNgzZHBsHWvcm8s9FPWUhAb8fA==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/preset-env": { + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.3.tgz", + "integrity": "sha512-fSk430k5c2ff8536JcPvPWK4tZDwehWLGlBp0wrsBUjZVdeQV6lePbwKWZaZfK2vnh/1kQX1PzAJWsnBmVgGJA==", + "dependencies": { + "@babel/compat-data": "^7.24.1", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.1", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.1", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.1", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.24.1", + "@babel/plugin-syntax-import-attributes": "^7.24.1", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.24.1", + "@babel/plugin-transform-async-generator-functions": "^7.24.3", + "@babel/plugin-transform-async-to-generator": "^7.24.1", + "@babel/plugin-transform-block-scoped-functions": "^7.24.1", + "@babel/plugin-transform-block-scoping": "^7.24.1", + "@babel/plugin-transform-class-properties": "^7.24.1", + "@babel/plugin-transform-class-static-block": "^7.24.1", + "@babel/plugin-transform-classes": "^7.24.1", + "@babel/plugin-transform-computed-properties": "^7.24.1", + "@babel/plugin-transform-destructuring": "^7.24.1", + "@babel/plugin-transform-dotall-regex": "^7.24.1", + "@babel/plugin-transform-duplicate-keys": "^7.24.1", + "@babel/plugin-transform-dynamic-import": "^7.24.1", + "@babel/plugin-transform-exponentiation-operator": "^7.24.1", + "@babel/plugin-transform-export-namespace-from": "^7.24.1", + "@babel/plugin-transform-for-of": "^7.24.1", + "@babel/plugin-transform-function-name": "^7.24.1", + "@babel/plugin-transform-json-strings": "^7.24.1", + "@babel/plugin-transform-literals": "^7.24.1", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.1", + "@babel/plugin-transform-member-expression-literals": "^7.24.1", + "@babel/plugin-transform-modules-amd": "^7.24.1", + "@babel/plugin-transform-modules-commonjs": "^7.24.1", + "@babel/plugin-transform-modules-systemjs": "^7.24.1", + "@babel/plugin-transform-modules-umd": "^7.24.1", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", + "@babel/plugin-transform-new-target": "^7.24.1", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.1", + "@babel/plugin-transform-numeric-separator": "^7.24.1", + "@babel/plugin-transform-object-rest-spread": "^7.24.1", + "@babel/plugin-transform-object-super": "^7.24.1", + "@babel/plugin-transform-optional-catch-binding": "^7.24.1", + "@babel/plugin-transform-optional-chaining": "^7.24.1", + "@babel/plugin-transform-parameters": "^7.24.1", + "@babel/plugin-transform-private-methods": "^7.24.1", + "@babel/plugin-transform-private-property-in-object": "^7.24.1", + "@babel/plugin-transform-property-literals": "^7.24.1", + "@babel/plugin-transform-regenerator": "^7.24.1", + "@babel/plugin-transform-reserved-words": "^7.24.1", + "@babel/plugin-transform-shorthand-properties": "^7.24.1", + "@babel/plugin-transform-spread": "^7.24.1", + "@babel/plugin-transform-sticky-regex": "^7.24.1", + "@babel/plugin-transform-template-literals": "^7.24.1", + "@babel/plugin-transform-typeof-symbol": "^7.24.1", + "@babel/plugin-transform-unicode-escapes": "^7.24.1", + "@babel/plugin-transform-unicode-property-regex": "^7.24.1", + "@babel/plugin-transform-unicode-regex": "^7.24.1", + "@babel/plugin-transform-unicode-sets-regex": "^7.24.1", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.10.4", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "core-js-compat": "^3.31.0", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/preset-react": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.24.1.tgz", + "integrity": "sha512-eFa8up2/8cZXLIpkafhaADTXSnl7IsUFCYenRWrARBz0/qZwcT0RBXpys0LJU4+WfPoF2ZG6ew6s2V6izMCwRA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-transform-react-display-name": "^7.24.1", + "@babel/plugin-transform-react-jsx": "^7.23.4", + "@babel/plugin-transform-react-jsx-development": "^7.22.5", + "@babel/plugin-transform-react-pure-annotations": "^7.24.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.1.tgz", + "integrity": "sha512-1DBaMmRDpuYQBPWD8Pf/WEwCrtgRHxsZnP4mIy9G/X+hFfbI47Q2G4t1Paakld84+qsk2fSsUPMKg71jkoOOaQ==", "dependencies": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-syntax-jsx": "^7.24.1", + "@babel/plugin-transform-modules-commonjs": "^7.24.1", + "@babel/plugin-transform-typescript": "^7.24.1" }, "engines": { "node": ">=6.9.0" @@ -1230,10 +2201,15 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==" + }, "node_modules/@babel/runtime": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", - "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.1.tgz", + "integrity": "sha512-+BIznRzyqBf+2wCTxcKE3wDjfGeCoVE61KSHGpkzqrLi8qxqFwBeUFyId2cxkTmm55fzDGnm0+yCxaxygrLUnQ==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -1242,13 +2218,13 @@ } }, "node_modules/@babel/template": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.23.9.tgz", - "integrity": "sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", + "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", "dependencies": { "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.23.9", - "@babel/types": "^7.23.9" + "@babel/parser": "^7.24.0", + "@babel/types": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -1275,9 +2251,9 @@ } }, "node_modules/@babel/types": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.9.tgz", - "integrity": "sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz", + "integrity": "sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==", "dependencies": { "@babel/helper-string-parser": "^7.23.4", "@babel/helper-validator-identifier": "^7.22.20", @@ -1377,148 +2353,493 @@ "@lezer/markdown": "^1.0.0" } }, - "node_modules/@codemirror/language": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.0.tgz", - "integrity": "sha512-2vaNn9aPGCRFKWcHPFksctzJ8yS5p7YoaT+jHpc0UGKzNuAIx4qy6R5wiqbP+heEEdyaABA582mNqSHzSoYdmg==", - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.23.0", - "@lezer/common": "^1.1.0", - "@lezer/highlight": "^1.0.0", - "@lezer/lr": "^1.0.0", - "style-mod": "^4.0.0" + "node_modules/@codemirror/language": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.0.tgz", + "integrity": "sha512-2vaNn9aPGCRFKWcHPFksctzJ8yS5p7YoaT+jHpc0UGKzNuAIx4qy6R5wiqbP+heEEdyaABA582mNqSHzSoYdmg==", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.23.0", + "@lezer/common": "^1.1.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@codemirror/lint": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.5.0.tgz", + "integrity": "sha512-+5YyicIaaAZKU8K43IQi8TBy6mF6giGeWAH7N96Z5LC30Wm5JMjqxOYIE9mxwMG1NbhT2mA3l9hA4uuKUM3E5g==", + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "crelt": "^1.0.5" + } + }, + "node_modules/@codemirror/state": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==" + }, + "node_modules/@codemirror/view": { + "version": "6.26.1", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.1.tgz", + "integrity": "sha512-wLw0t3R9AwOSQThdZ5Onw8QQtem5asE7+bPlnzc57eubPqiuJKIzwjMZ+C42vQett+iva+J8VgFV4RYWDBh5FA==", + "dependencies": { + "@codemirror/state": "^6.4.0", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", + "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "devOptional": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "devOptional": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@cypress/request": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", + "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", + "dev": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "http-signature": "~1.3.6", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "performance-now": "^2.1.0", + "qs": "6.10.4", + "safe-buffer": "^5.1.2", + "tough-cookie": "^4.1.3", + "tunnel-agent": "^0.6.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@cypress/request/node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/@cypress/xvfb": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", + "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", + "dev": true, + "dependencies": { + "debug": "^3.1.0", + "lodash.once": "^4.1.1" + } + }, + "node_modules/@cypress/xvfb/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/@emotion/is-prop-valid": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", + "optional": true, + "dependencies": { + "@emotion/memoize": "0.7.4" + } + }, + "node_modules/@emotion/memoize": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", + "optional": true + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@codemirror/lint": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.5.0.tgz", - "integrity": "sha512-+5YyicIaaAZKU8K43IQi8TBy6mF6giGeWAH7N96Z5LC30Wm5JMjqxOYIE9mxwMG1NbhT2mA3l9hA4uuKUM3E5g==", - "dependencies": { - "@codemirror/state": "^6.0.0", - "@codemirror/view": "^6.0.0", - "crelt": "^1.0.5" + "node_modules/@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@codemirror/state": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.0.tgz", - "integrity": "sha512-hm8XshYj5Fo30Bb922QX9hXB/bxOAVH+qaqHBzw5TKa72vOeslyGwd4X8M0c1dJ9JqxlaMceOQ8RsL9tC7gU0A==" + "node_modules/@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } }, - "node_modules/@codemirror/view": { - "version": "6.23.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.23.1.tgz", - "integrity": "sha512-J2Xnn5lFYT1ZN/5ewEoMBCmLlL71lZ3mBdb7cUEuHhX2ESoSrNEucpsDXpX22EuTGm9LOgC9v4Z0wx+Ez8QmGA==", - "dependencies": { - "@codemirror/state": "^6.4.0", - "style-mod": "^4.1.0", - "w3c-keyname": "^2.2.4" + "node_modules/@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", - "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==", - "dev": true, + "node_modules/@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "cpu": [ + "s390x" + ], "optional": true, + "os": [ + "linux" + ], "engines": { - "node": ">=0.1.90" + "node": ">=12" } }, - "node_modules/@cspotcode/source-map-support": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", - "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", - "devOptional": true, - "dependencies": { - "@jridgewell/trace-mapping": "0.3.9" - }, + "node_modules/@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], "engines": { "node": ">=12" } }, - "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", - "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", - "devOptional": true, - "dependencies": { - "@jridgewell/resolve-uri": "^3.0.3", - "@jridgewell/sourcemap-codec": "^1.4.10" + "node_modules/@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@cypress/request": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@cypress/request/-/request-3.0.1.tgz", - "integrity": "sha512-TWivJlJi8ZDx2wGOw1dbLuHJKUYX7bWySw377nlnGOW3hP9/MUKIsEdXT/YngWxVdgNCHRBmFlBipE+5/2ZZlQ==", - "dev": true, - "dependencies": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "http-signature": "~1.3.6", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "performance-now": "^2.1.0", - "qs": "6.10.4", - "safe-buffer": "^5.1.2", - "tough-cookie": "^4.1.3", - "tunnel-agent": "^0.6.0", - "uuid": "^8.3.2" - }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], "engines": { - "node": ">= 6" + "node": ">=12" } }, - "node_modules/@cypress/request/node_modules/form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], "engines": { - "node": ">= 0.12" + "node": ">=12" } }, - "node_modules/@cypress/xvfb": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz", - "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==", - "dev": true, - "dependencies": { - "debug": "^3.1.0", - "lodash.once": "^4.1.1" + "node_modules/@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@cypress/xvfb/node_modules/debug": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", - "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", - "dev": true, - "dependencies": { - "ms": "^2.1.1" + "node_modules/@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", - "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", + "node_modules/@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", "cpu": [ - "arm64" + "x64" ], "optional": true, "os": [ - "darwin" + "win32" ], "engines": { "node": ">=12" @@ -1599,9 +2920,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", - "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1624,60 +2945,186 @@ "@floating-ui/utils": "^0.2.1" } }, - "node_modules/@floating-ui/dom": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", - "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", + "node_modules/@floating-ui/dom": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", + "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", + "dependencies": { + "@floating-ui/core": "^1.0.0", + "@floating-ui/utils": "^0.2.0" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.8.tgz", + "integrity": "sha512-HOdqOt3R3OGeTKidaLvJKcgg75S6tibQ3Tif4eyd91QnIJWr0NLvoXFpJA/j8HqkFSL68GDca9AuyWEHlhyClw==", + "dependencies": { + "@floating-ui/dom": "^1.6.1" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", + "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" + }, + "node_modules/@graphiql/plugin-explorer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@graphiql/plugin-explorer/-/plugin-explorer-1.0.4.tgz", + "integrity": "sha512-Z0UDhHSX1u4PfiqtlOMrXVrSE11ifC0zycGwhzK+BeglS9z56hknEky7NwJvUb9qC7sTlTmXEgfGLsYb5DjKrg==", + "dependencies": { + "graphiql-explorer": "^0.9.0" + }, + "peerDependencies": { + "@graphiql/react": "^0.20.4", + "graphql": "^15.5.0 || ^16.0.0", + "react": "^16.8.0 || ^17 || ^18", + "react-dom": "^16.8.0 || ^17 || ^18" + } + }, + "node_modules/@graphiql/plugin-explorer/node_modules/graphiql-explorer": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/graphiql-explorer/-/graphiql-explorer-0.9.0.tgz", + "integrity": "sha512-fZC/wsuatqiQDO2otchxriFO0LaWIo/ovF/CQJ1yOudmY0P7pzDiP+l9CEHUiWbizk3e99x6DQG4XG1VxA+d6A==", + "peerDependencies": { + "graphql": "^0.6.0 || ^0.7.0 || ^0.8.0-b || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0", + "react": "^15.6.0 || ^16.0.0", + "react-dom": "^15.6.0 || ^16.0.0" + } + }, + "node_modules/@graphiql/react": { + "version": "0.20.4", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.20.4.tgz", + "integrity": "sha512-LDgIlHa65pSngk8G2O0hvohNz4B41VUa7Yg6iPwifa1XreXxHIXjhV6FC1qi5oSjdCIRp4T8dkZnHA6iI5eElg==", + "dependencies": { + "@graphiql/toolkit": "^0.9.1", + "@headlessui/react": "^1.7.15", + "@radix-ui/react-dialog": "^1.0.4", + "@radix-ui/react-dropdown-menu": "^2.0.5", + "@radix-ui/react-tooltip": "^1.0.6", + "@radix-ui/react-visually-hidden": "^1.0.3", + "@types/codemirror": "^5.60.8", + "clsx": "^1.2.1", + "codemirror": "^5.65.3", + "codemirror-graphql": "^2.0.11", + "copy-to-clipboard": "^3.2.0", + "framer-motion": "^6.5.1", + "graphql-language-service": "^5.2.0", + "markdown-it": "^12.2.0", + "set-value": "^4.1.0" + }, + "peerDependencies": { + "graphql": "^15.5.0 || ^16.0.0", + "react": "^16.8.0 || ^17 || ^18", + "react-dom": "^16.8.0 || ^17 || ^18" + } + }, + "node_modules/@graphiql/react/node_modules/@codemirror/language": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", + "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "node_modules/@graphiql/react/node_modules/clsx": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/@graphiql/react/node_modules/codemirror-graphql": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.11.tgz", + "integrity": "sha512-j1QDDXKVkpin2VsyS0ke2nAhKal6/N1UJtgnBGrPe3gj9ZSP6/K8Xytft94k0xW6giIU/JhZjvW0GwwERNzbFA==", + "dependencies": { + "@types/codemirror": "^0.0.90", + "graphql-language-service": "5.2.0" + }, + "peerDependencies": { + "@codemirror/language": "6.0.0", + "codemirror": "^5.65.3", + "graphql": "^15.5.0 || ^16.0.0" + } + }, + "node_modules/@graphiql/react/node_modules/codemirror-graphql/node_modules/@types/codemirror": { + "version": "0.0.90", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.90.tgz", + "integrity": "sha512-8Z9+tSg27NPRGubbUPUCrt5DDG/OWzLph5BvcDykwR5D7RyZh5mhHG0uS1ePKV1YFCA+/cwc4Ey2AJAEFfV3IA==", "dependencies": { - "@floating-ui/core": "^1.0.0", - "@floating-ui/utils": "^0.2.0" + "@types/tern": "*" } }, - "node_modules/@floating-ui/react-dom": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.8.tgz", - "integrity": "sha512-HOdqOt3R3OGeTKidaLvJKcgg75S6tibQ3Tif4eyd91QnIJWr0NLvoXFpJA/j8HqkFSL68GDca9AuyWEHlhyClw==", + "node_modules/@graphiql/toolkit": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.9.1.tgz", + "integrity": "sha512-LVt9pdk0830so50ZnU2Znb2rclcoWznG8r8asqAENzV0U1FM1kuY0sdPpc/rBc9MmmNgnB6A+WZzDhq6dbhTHA==", "dependencies": { - "@floating-ui/dom": "^1.6.1" + "@n1ru4l/push-pull-async-iterable-iterator": "^3.1.0", + "meros": "^1.1.4" }, "peerDependencies": { - "react": ">=16.8.0", - "react-dom": ">=16.8.0" + "graphql": "^15.5.0 || ^16.0.0", + "graphql-ws": ">= 4.5.0" + }, + "peerDependenciesMeta": { + "graphql-ws": { + "optional": true + } } }, - "node_modules/@floating-ui/utils": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", - "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" + "node_modules/@graphql-codegen/add": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/add/-/add-5.0.2.tgz", + "integrity": "sha512-ouBkSvMFUhda5VoKumo/ZvsZM9P5ZTyDsI8LW18VxSNWOjrTeLXBWHG8Gfaai0HwhflPtCYVABbriEcOmrRShQ==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } }, "node_modules/@graphql-codegen/cli": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-3.3.1.tgz", - "integrity": "sha512-4Es8Y9zFeT0Zx2qRL7L3qXDbbqvXK6aID+8v8lP6gaYD+uWx3Jd4Hsq5vxwVBR+6flm0BW/C85Qm0cvmT7O6LA==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-5.0.2.tgz", + "integrity": "sha512-MBIaFqDiLKuO4ojN6xxG9/xL9wmfD3ZjZ7RsPjwQnSHBCUXnEkdKvX+JVpx87Pq29Ycn8wTJUguXnTZ7Di0Mlw==", "dev": true, "dependencies": { "@babel/generator": "^7.18.13", "@babel/template": "^7.18.10", "@babel/types": "^7.18.13", - "@graphql-codegen/core": "^3.1.0", - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-tools/apollo-engine-loader": "^7.3.6", - "@graphql-tools/code-file-loader": "^7.3.17", - "@graphql-tools/git-loader": "^7.2.13", - "@graphql-tools/github-loader": "^7.3.20", - "@graphql-tools/graphql-file-loader": "^7.5.0", - "@graphql-tools/json-file-loader": "^7.4.1", - "@graphql-tools/load": "^7.8.0", - "@graphql-tools/prisma-loader": "^7.2.49", - "@graphql-tools/url-loader": "^7.13.2", - "@graphql-tools/utils": "^9.0.0", - "@parcel/watcher": "^2.1.0", + "@graphql-codegen/client-preset": "^4.2.2", + "@graphql-codegen/core": "^4.0.2", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/apollo-engine-loader": "^8.0.0", + "@graphql-tools/code-file-loader": "^8.0.0", + "@graphql-tools/git-loader": "^8.0.0", + "@graphql-tools/github-loader": "^8.0.0", + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/prisma-loader": "^8.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", "@whatwg-node/fetch": "^0.8.0", "chalk": "^4.1.0", - "cosmiconfig": "^7.0.0", + "cosmiconfig": "^8.1.3", "debounce": "^1.2.0", "detect-indent": "^6.0.0", - "graphql-config": "^4.5.0", + "graphql-config": "^5.0.2", "inquirer": "^8.0.0", "is-glob": "^4.0.1", "jiti": "^1.17.1", @@ -1689,7 +3136,7 @@ "string-env-interpolation": "^1.0.1", "ts-log": "^2.2.3", "tslib": "^2.4.0", - "yaml": "^1.10.0", + "yaml": "^2.3.1", "yargs": "^17.0.0" }, "bin": { @@ -1698,459 +3145,656 @@ "graphql-codegen": "cjs/bin.js", "graphql-codegen-esm": "esm/bin.js" }, + "peerDependencies": { + "@parcel/watcher": "^2.1.0", + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + }, + "peerDependenciesMeta": { + "@parcel/watcher": { + "optional": true + } + } + }, + "node_modules/@graphql-codegen/client-preset": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/@graphql-codegen/client-preset/-/client-preset-4.2.5.tgz", + "integrity": "sha512-hAdB6HN8EDmkoBtr0bPUN/7NH6svzqbcTDMWBCRXPESXkl7y80po+IXrXUjsSrvhKG8xkNXgJNz/2mjwHzywcA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/template": "^7.20.7", + "@graphql-codegen/add": "^5.0.2", + "@graphql-codegen/gql-tag-operations": "4.0.6", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typed-document-node": "^5.0.6", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/typescript-operations": "^4.2.0", + "@graphql-codegen/visitor-plugin-common": "^5.1.0", + "@graphql-tools/documents": "^1.0.0", + "@graphql-tools/utils": "^10.0.0", + "@graphql-typed-document-node/core": "3.2.0", + "tslib": "~2.6.0" + }, "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, "node_modules/@graphql-codegen/core": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-3.1.0.tgz", - "integrity": "sha512-DH1/yaR7oJE6/B+c6ZF2Tbdh7LixF1K8L+8BoSubjNyQ8pNwR4a70mvc1sv6H7qgp6y1bPQ9tKE+aazRRshysw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-4.0.2.tgz", + "integrity": "sha512-IZbpkhwVqgizcjNiaVzNAzm/xbWT6YnGgeOLwVjm4KbJn3V2jchVtuzHH09G5/WkkLSk2wgbXNdwjM41JxO6Eg==", "dev": true, "dependencies": { - "@graphql-codegen/plugin-helpers": "^4.1.0", - "@graphql-tools/schema": "^9.0.0", - "@graphql-tools/utils": "^9.1.1", - "tslib": "~2.5.0" + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/schema": "^10.0.0", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" }, "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, - "node_modules/@graphql-codegen/core/node_modules/tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true + "node_modules/@graphql-codegen/gql-tag-operations": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/gql-tag-operations/-/gql-tag-operations-4.0.6.tgz", + "integrity": "sha512-y6iXEDpDNjwNxJw3WZqX1/Znj0QHW7+y8O+t2V8qvbTT+3kb2lr9ntc8By7vCr6ctw9tXI4XKaJgpTstJDOwFA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } }, "node_modules/@graphql-codegen/plugin-helpers": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-4.2.0.tgz", - "integrity": "sha512-THFTCfg+46PXlXobYJ/OoCX6pzjI+9woQqCjdyKtgoI0tn3Xq2HUUCiidndxUpEYVrXb5pRiRXb7b/ZbMQqD0A==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.3.tgz", + "integrity": "sha512-yZ1rpULIWKBZqCDlvGIJRSyj1B2utkEdGmXZTBT/GVayP4hyRYlkd36AJV/LfEsVD8dnsKL5rLz2VTYmRNlJ5Q==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.0.0", + "@graphql-tools/utils": "^10.0.0", "change-case-all": "1.0.15", "common-tags": "1.8.2", "import-from": "4.0.0", "lodash": "~4.17.0", - "tslib": "~2.5.0" + "tslib": "~2.6.0" }, "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, - "node_modules/@graphql-codegen/plugin-helpers/node_modules/tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - }, "node_modules/@graphql-codegen/schema-ast": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-3.0.1.tgz", - "integrity": "sha512-rTKTi4XiW4QFZnrEqetpiYEWVsOFNoiR/v3rY9mFSttXFbIwNXPme32EspTiGWmEEdHY8UuTDtZN3vEcs/31zw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-4.0.2.tgz", + "integrity": "sha512-5mVAOQQK3Oz7EtMl/l3vOQdc2aYClUzVDHHkMvZlunc+KlGgl81j8TLa+X7ANIllqU4fUEsQU3lJmk4hXP6K7Q==", "dev": true, "dependencies": { - "@graphql-codegen/plugin-helpers": "^4.1.0", - "@graphql-tools/utils": "^9.0.0", - "tslib": "~2.5.0" + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" }, "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, - "node_modules/@graphql-codegen/schema-ast/node_modules/tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true + "node_modules/@graphql-codegen/typed-document-node": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typed-document-node/-/typed-document-node-5.0.6.tgz", + "integrity": "sha512-US0J95hOE2/W/h42w4oiY+DFKG7IetEN1mQMgXXeat1w6FAR5PlIz4JrRrEkiVfVetZ1g7K78SOwBD8/IJnDiA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } }, "node_modules/@graphql-codegen/typescript": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-3.0.4.tgz", - "integrity": "sha512-x4O47447DZrWNtE/l5CU9QzzW4m1RbmCEdijlA3s2flG/y1Ckqdemob4CWfilSm5/tZ3w1junVDY616RDTSvZw==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-4.0.6.tgz", + "integrity": "sha512-IBG4N+Blv7KAL27bseruIoLTjORFCT3r+QYyMC3g11uY3/9TPpaUyjSdF70yBe5GIQ6dAgDU+ENUC1v7EPi0rw==", "dev": true, "dependencies": { - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-codegen/schema-ast": "^3.0.1", - "@graphql-codegen/visitor-plugin-common": "3.1.1", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/schema-ast": "^4.0.2", + "@graphql-codegen/visitor-plugin-common": "5.1.0", "auto-bind": "~4.0.0", - "tslib": "~2.5.0" + "tslib": "~2.6.0" }, "peerDependencies": { "graphql": "^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, - "node_modules/@graphql-codegen/typescript/node_modules/tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true + "node_modules/@graphql-codegen/typescript-operations": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript-operations/-/typescript-operations-4.2.0.tgz", + "integrity": "sha512-lmuwYb03XC7LNRS8oo9M4/vlOrq/wOKmTLBHlltK2YJ1BO/4K/Q9Jdv/jDmJpNydHVR1fmeF4wAfsIp1f9JibA==", + "dev": true, + "dependencies": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" + }, + "peerDependencies": { + "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" + } }, "node_modules/@graphql-codegen/visitor-plugin-common": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-3.1.1.tgz", - "integrity": "sha512-uAfp+zu/009R3HUAuTK2AamR1bxIltM6rrYYI6EXSmkM3rFtFsLTuJhjUDj98HcUCszJZrADppz8KKLGRUVlNg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-5.1.0.tgz", + "integrity": "sha512-eamQxtA9bjJqI2lU5eYoA1GbdMIRT2X8m8vhWYsVQVWD3qM7sx/IqJU0kx0J3Vd4/CSd36BzL6RKwksibytDIg==", "dev": true, "dependencies": { - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-tools/optimize": "^1.3.0", - "@graphql-tools/relay-operation-optimizer": "^6.5.0", - "@graphql-tools/utils": "^9.0.0", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/optimize": "^2.0.0", + "@graphql-tools/relay-operation-optimizer": "^7.0.0", + "@graphql-tools/utils": "^10.0.0", "auto-bind": "~4.0.0", "change-case-all": "1.0.15", "dependency-graph": "^0.11.0", "graphql-tag": "^2.11.0", "parse-filepath": "^1.0.2", - "tslib": "~2.5.0" + "tslib": "~2.6.0" }, "peerDependencies": { "graphql": "^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0" } }, - "node_modules/@graphql-codegen/visitor-plugin-common/node_modules/tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - }, "node_modules/@graphql-tools/apollo-engine-loader": { - "version": "7.3.26", - "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-7.3.26.tgz", - "integrity": "sha512-h1vfhdJFjnCYn9b5EY1Z91JTF0KB3hHVJNQIsiUV2mpQXZdeOXQoaWeYEKaiI5R6kwBw5PP9B0fv3jfUIG8LyQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.1.tgz", + "integrity": "sha512-NaPeVjtrfbPXcl+MLQCJLWtqe2/E4bbAqcauEOQ+3sizw1Fc2CNmhHRF8a6W4D0ekvTRRXAMptXYgA2uConbrA==", "dev": true, "dependencies": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/utils": "^9.2.1", - "@whatwg-node/fetch": "^0.8.0", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/apollo-engine-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, "node_modules/@graphql-tools/batch-execute": { - "version": "8.5.22", - "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-8.5.22.tgz", - "integrity": "sha512-hcV1JaY6NJQFQEwCKrYhpfLK8frSXDbtNMoTur98u10Cmecy1zrqNKSqhEyGetpgHxaJRqszGzKeI3RuroDN6A==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-9.0.4.tgz", + "integrity": "sha512-kkebDLXgDrep5Y0gK1RN3DMUlLqNhg60OAz0lTCqrYeja6DshxLtLkj+zV4mVbBA4mQOEoBmw6g1LZs3dA84/w==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "dataloader": "^2.2.2", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/code-file-loader": { - "version": "7.3.23", - "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-7.3.23.tgz", - "integrity": "sha512-8Wt1rTtyTEs0p47uzsPJ1vAtfAx0jmxPifiNdmo9EOCuUPyQGEbMaik/YkqZ7QUFIEYEQu+Vgfo8tElwOPtx5Q==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-8.1.1.tgz", + "integrity": "sha512-q4KN25EPSUztc8rA8YUU3ufh721Yk12xXDbtUA+YstczWS7a1RJlghYMFEfR1HsHSYbF7cUqkbnTKSGM3o52bQ==", "dev": true, "dependencies": { - "@graphql-tools/graphql-tag-pluck": "7.5.2", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/delegate": { - "version": "9.0.35", - "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-9.0.35.tgz", - "integrity": "sha512-jwPu8NJbzRRMqi4Vp/5QX1vIUeUPpWmlQpOkXQD2r1X45YsVceyUUBnktCrlJlDB4jPRVy7JQGwmYo3KFiOBMA==", + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-10.0.4.tgz", + "integrity": "sha512-WswZRbQZMh/ebhc8zSomK9DIh6Pd5KbuiMsyiKkKz37TWTrlCOe+4C/fyrBFez30ksq6oFyCeSKMwfrCbeGo0Q==", "dev": true, "dependencies": { - "@graphql-tools/batch-execute": "^8.5.22", - "@graphql-tools/executor": "^0.0.20", - "@graphql-tools/schema": "^9.0.19", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/batch-execute": "^9.0.4", + "@graphql-tools/executor": "^1.2.1", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", "dataloader": "^2.2.2", - "tslib": "^2.5.0", - "value-or-promise": "^1.0.12" + "tslib": "^2.5.0" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + } + }, + "node_modules/@graphql-tools/documents": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/documents/-/documents-1.0.0.tgz", + "integrity": "sha512-rHGjX1vg/nZ2DKqRGfDPNC55CWZBMldEVcH+91BThRa6JeT80NqXknffLLEZLRUxyikCfkwMsk6xR3UNMqG0Rg==", + "dev": true, + "dependencies": { + "lodash.sortby": "^4.7.0", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/executor": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-0.0.20.tgz", - "integrity": "sha512-GdvNc4vszmfeGvUqlcaH1FjBoguvMYzxAfT6tDd4/LgwymepHhinqLNA5otqwVLW+JETcDaK7xGENzFomuE6TA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-1.2.5.tgz", + "integrity": "sha512-s7sW4K3BUNsk9sjq+vNicwb9KwcR3G55uS/CI8KZQ4x0ZdeYMIwpeU9MVeORCCpHuQyTaV+/VnO0hFrS/ygzsg==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.1.1", "@graphql-typed-document-node/core": "3.2.0", "@repeaterjs/repeater": "^3.0.4", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/executor-graphql-ws": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-0.0.14.tgz", - "integrity": "sha512-P2nlkAsPZKLIXImFhj0YTtny5NQVGSsKnhi7PzXiaHSXc6KkzqbWZHKvikD4PObanqg+7IO58rKFpGXP7eeO+w==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.2.tgz", + "integrity": "sha512-+9ZK0rychTH1LUv4iZqJ4ESbmULJMTsv3XlFooPUngpxZkk00q6LqHKJRrsLErmQrVaC7cwQCaRBJa0teK17Lg==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", - "@repeaterjs/repeater": "3.0.4", + "@graphql-tools/utils": "^10.0.13", "@types/ws": "^8.0.0", - "graphql-ws": "5.12.1", - "isomorphic-ws": "5.0.0", + "graphql-ws": "^5.14.0", + "isomorphic-ws": "^5.0.0", "tslib": "^2.4.0", - "ws": "8.13.0" + "ws": "^8.13.0" }, - "peerDependencies": { - "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" - } - }, - "node_modules/@graphql-tools/executor-graphql-ws/node_modules/@repeaterjs/repeater": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@repeaterjs/repeater/-/repeater-3.0.4.tgz", - "integrity": "sha512-AW8PKd6iX3vAZ0vA43nOUOnbq/X5ihgU+mSXXqunMkeQADGiqw/PY0JNeYtD5sr0PAy51YPgAPbDoeapv9r8WA==", - "dev": true - }, - "node_modules/@graphql-tools/executor-graphql-ws/node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", - "dev": true, "engines": { - "node": ">=10.0.0" + "node": ">=16.0.0" }, "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/executor-http": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-0.1.10.tgz", - "integrity": "sha512-hnAfbKv0/lb9s31LhWzawQ5hghBfHS+gYWtqxME6Rl0Aufq9GltiiLBcl7OVVOnkLF0KhwgbYP1mB5VKmgTGpg==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-1.0.9.tgz", + "integrity": "sha512-+NXaZd2MWbbrWHqU4EhXcrDbogeiCDmEbrAN+rMn4Nu2okDjn2MTFDbTIab87oEubQCH4Te1wDkWPKrzXup7+Q==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "@repeaterjs/repeater": "^3.0.4", - "@whatwg-node/fetch": "^0.8.1", - "dset": "^3.1.2", + "@whatwg-node/fetch": "^0.9.0", "extract-files": "^11.0.0", "meros": "^1.2.1", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, - "node_modules/@graphql-tools/executor-legacy-ws": { - "version": "0.0.11", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-0.0.11.tgz", - "integrity": "sha512-4ai+NnxlNfvIQ4c70hWFvOZlSUN8lt7yc+ZsrwtNFbFPH/EroIzFMapAxM9zwyv9bH38AdO3TQxZ5zNxgBdvUw==", + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", - "@types/ws": "^8.0.0", - "isomorphic-ws": "5.0.0", - "tslib": "^2.4.0", - "ws": "8.13.0" + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" }, - "peerDependencies": { - "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" + "engines": { + "node": ">=16.0.0" } }, - "node_modules/@graphql-tools/executor-legacy-ws/node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "node_modules/@graphql-tools/executor-http/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, "engines": { - "node": ">=10.0.0" + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/executor-http/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, + "node_modules/@graphql-tools/executor-legacy-ws": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.6.tgz", + "integrity": "sha512-lDSxz9VyyquOrvSuCCnld3256Hmd+QI2lkmkEv7d4mdzkxkK4ddAWW1geQiWrQvWmdsmcnGGlZ7gDGbhEExwqg==", + "dev": true, + "dependencies": { + "@graphql-tools/utils": "^10.0.13", + "@types/ws": "^8.0.0", + "isomorphic-ws": "^5.0.0", + "tslib": "^2.4.0", + "ws": "^8.15.0" }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" + "engines": { + "node": ">=16.0.0" }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } + "peerDependencies": { + "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/git-loader": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-7.3.0.tgz", - "integrity": "sha512-gcGAK+u16eHkwsMYqqghZbmDquh8QaO24Scsxq+cVR+vx1ekRlsEiXvu+yXVDbZdcJ6PBIbeLcQbEu+xhDLmvQ==", + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-8.0.5.tgz", + "integrity": "sha512-P97/1mhruDiA6D5WUmx3n/aeGPLWj2+4dpzDOxFGGU+z9NcI/JdygMkeFpGZNHeJfw+kHfxgPcMPnxHcyhAoVA==", "dev": true, "dependencies": { - "@graphql-tools/graphql-tag-pluck": "7.5.2", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", "is-glob": "4.0.3", "micromatch": "^4.0.4", "tslib": "^2.4.0", "unixify": "^1.0.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/github-loader": { - "version": "7.3.28", - "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-7.3.28.tgz", - "integrity": "sha512-OK92Lf9pmxPQvjUNv05b3tnVhw0JRfPqOf15jZjyQ8BfdEUrJoP32b4dRQQem/wyRL24KY4wOfArJNqzpsbwCA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-8.0.1.tgz", + "integrity": "sha512-W4dFLQJ5GtKGltvh/u1apWRFKBQOsDzFxO9cJkOYZj1VzHCpRF43uLST4VbCfWve+AwBqOuKr7YgkHoxpRMkcg==", "dev": true, "dependencies": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/executor-http": "^0.1.9", - "@graphql-tools/graphql-tag-pluck": "^7.4.6", - "@graphql-tools/utils": "^9.2.1", - "@whatwg-node/fetch": "^0.8.0", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/graphql-tag-pluck": "^8.0.0", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/github-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, "node_modules/@graphql-tools/graphql-file-loader": { - "version": "7.5.17", - "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-7.5.17.tgz", - "integrity": "sha512-hVwwxPf41zOYgm4gdaZILCYnKB9Zap7Ys9OhY1hbwuAuC4MMNY9GpUjoTU3CQc3zUiPoYStyRtUGkHSJZ3HxBw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.1.tgz", + "integrity": "sha512-7gswMqWBabTSmqbaNyWSmRRpStWlcCkBc73E6NZNlh4YNuiyKOwbvSkOUYFOqFMfEL+cFsXgAvr87Vz4XrYSbA==", "dev": true, "dependencies": { - "@graphql-tools/import": "6.7.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/import": "7.0.1", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/graphql-tag-pluck": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-7.5.2.tgz", - "integrity": "sha512-RW+H8FqOOLQw0BPXaahYepVSRjuOHw+7IL8Opaa5G5uYGOBxoXR7DceyQ7BcpMgktAOOmpDNQ2WtcboChOJSRA==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.3.0.tgz", + "integrity": "sha512-gNqukC+s7iHC7vQZmx1SEJQmLnOguBq+aqE2zV2+o1hxkExvKqyFli1SY/9gmukFIKpKutCIj+8yLOM+jARutw==", "dev": true, "dependencies": { + "@babel/core": "^7.22.9", "@babel/parser": "^7.16.8", "@babel/plugin-syntax-import-assertions": "^7.20.0", "@babel/traverse": "^7.16.8", "@babel/types": "^7.16.8", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/import": { - "version": "6.7.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-6.7.18.tgz", - "integrity": "sha512-XQDdyZTp+FYmT7as3xRWH/x8dx0QZA2WZqfMF5EWb36a0PiH7WwlRQYIdyYXj8YCLpiWkeBXgBRHmMnwEYR8iQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-7.0.1.tgz", + "integrity": "sha512-935uAjAS8UAeXThqHfYVr4HEAp6nHJ2sximZKO1RzUTq5WoALMAhhGARl0+ecm6X+cqNUwIChJbjtaa6P/ML0w==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "resolve-from": "5.0.0", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/json-file-loader": { - "version": "7.4.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-7.4.18.tgz", - "integrity": "sha512-AJ1b6Y1wiVgkwsxT5dELXhIVUPs/u3VZ8/0/oOtpcoyO/vAeM5rOvvWegzicOOnQw8G45fgBRMkkRfeuwVt6+w==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-8.0.1.tgz", + "integrity": "sha512-lAy2VqxDAHjVyqeJonCP6TUemrpYdDuKt25a10X6zY2Yn3iFYGnuIDQ64cv3ytyGY6KPyPB+Kp+ZfOkNDG3FQA==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/load": { - "version": "7.8.14", - "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-7.8.14.tgz", - "integrity": "sha512-ASQvP+snHMYm+FhIaLxxFgVdRaM0vrN9wW2BKInQpktwWTXVyk+yP5nQUCEGmn0RTdlPKrffBaigxepkEAJPrg==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-8.0.2.tgz", + "integrity": "sha512-S+E/cmyVmJ3CuCNfDuNF2EyovTwdWfQScXv/2gmvJOti2rGD8jTt9GYVzXaxhblLivQR9sBUCNZu/w7j7aXUCA==", "dev": true, "dependencies": { - "@graphql-tools/schema": "^9.0.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", "p-limit": "3.1.0", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/merge": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-8.4.2.tgz", - "integrity": "sha512-XbrHAaj8yDuINph+sAfuq3QCZ/tKblrTLOpirK0+CAgNlZUCHs0Fa+xtMUURgwCVThLle1AF7svJCxFizygLsw==", + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-9.0.3.tgz", + "integrity": "sha512-FeKv9lKLMwqDu0pQjPpF59GY3HReUkWXKsMIuMuJQOKh9BETu7zPEFUELvcw8w+lwZkl4ileJsHXC9+AnsT2Lw==", "dev": true, "dependencies": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/optimize": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-1.4.0.tgz", - "integrity": "sha512-dJs/2XvZp+wgHH8T5J2TqptT9/6uVzIYvA6uFACha+ufvdMBedkfR4b4GbT8jAKLRARiqRTxy3dctnwkTM2tdw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-2.0.0.tgz", + "integrity": "sha512-nhdT+CRGDZ+bk68ic+Jw1OZ99YCDIKYA5AlVAnBHJvMawSx9YQqQAIj4refNc1/LRieGiuWvhbG3jvPVYho0Dg==", "dev": true, "dependencies": { "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/prisma-loader": { - "version": "7.2.72", - "resolved": "https://registry.npmjs.org/@graphql-tools/prisma-loader/-/prisma-loader-7.2.72.tgz", - "integrity": "sha512-0a7uV7Fky6yDqd0tI9+XMuvgIo6GAqiVzzzFV4OSLry4AwiQlI3igYseBV7ZVOGhedOTqj/URxjpiv07hRcwag==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/prisma-loader/-/prisma-loader-8.0.3.tgz", + "integrity": "sha512-oZhxnMr3Jw2WAW1h9FIhF27xWzIB7bXWM8olz4W12oII4NiZl7VRkFw9IT50zME2Bqi9LGh9pkmMWkjvbOpl+Q==", "dev": true, "dependencies": { - "@graphql-tools/url-loader": "^7.17.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/url-loader": "^8.0.2", + "@graphql-tools/utils": "^10.0.13", "@types/js-yaml": "^4.0.0", "@types/json-stable-stringify": "^1.0.32", - "@whatwg-node/fetch": "^0.8.2", + "@whatwg-node/fetch": "^0.9.0", "chalk": "^4.1.0", "debug": "^4.3.1", "dotenv": "^16.0.0", "graphql-request": "^6.0.0", - "http-proxy-agent": "^6.0.0", - "https-proxy-agent": "^6.0.0", - "jose": "^4.11.4", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "jose": "^5.0.0", "js-yaml": "^4.0.0", "json-stable-stringify": "^1.0.1", "lodash": "^4.17.20", @@ -2158,88 +3802,196 @@ "tslib": "^2.4.0", "yaml-ast-parser": "^0.0.43" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/prisma-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, "node_modules/@graphql-tools/relay-operation-optimizer": { - "version": "6.5.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-6.5.18.tgz", - "integrity": "sha512-mc5VPyTeV+LwiM+DNvoDQfPqwQYhPV/cl5jOBjTgSniyaq8/86aODfMkrE2OduhQ5E00hqrkuL2Fdrgk0w1QJg==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-7.0.1.tgz", + "integrity": "sha512-y0ZrQ/iyqWZlsS/xrJfSir3TbVYJTYmMOu4TaSz6F4FRDTQ3ie43BlKkhf04rC28pnUOS4BO9pDcAo1D30l5+A==", "dev": true, "dependencies": { "@ardatan/relay-compiler": "12.0.0", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/schema": { - "version": "9.0.19", - "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-9.0.19.tgz", - "integrity": "sha512-oBRPoNBtCkk0zbUsyP4GaIzCt8C0aCI4ycIRUL67KK5pOHljKLBBtGT+Jr6hkzA74C8Gco8bpZPe7aWFjiaK2w==", + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-10.0.3.tgz", + "integrity": "sha512-p28Oh9EcOna6i0yLaCFOnkcBDQECVf3SCexT6ktb86QNj9idnkhI+tCxnwZDh58Qvjd2nURdkbevvoZkvxzCog==", "dev": true, "dependencies": { - "@graphql-tools/merge": "^8.4.1", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/merge": "^9.0.3", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/url-loader": { - "version": "7.17.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-7.17.18.tgz", - "integrity": "sha512-ear0CiyTj04jCVAxi7TvgbnGDIN2HgqzXzwsfcqiVg9cvjT40NcMlZ2P1lZDgqMkZ9oyLTV8Bw6j+SyG6A+xPw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-8.0.2.tgz", + "integrity": "sha512-1dKp2K8UuFn7DFo1qX5c1cyazQv2h2ICwA9esHblEqCYrgf69Nk8N7SODmsfWg94OEaI74IqMoM12t7eIGwFzQ==", "dev": true, "dependencies": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/delegate": "^9.0.31", - "@graphql-tools/executor-graphql-ws": "^0.0.14", - "@graphql-tools/executor-http": "^0.1.7", - "@graphql-tools/executor-legacy-ws": "^0.0.11", - "@graphql-tools/utils": "^9.2.1", - "@graphql-tools/wrap": "^9.4.2", + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/executor-graphql-ws": "^1.1.2", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/executor-legacy-ws": "^1.0.6", + "@graphql-tools/utils": "^10.0.13", + "@graphql-tools/wrap": "^10.0.2", "@types/ws": "^8.0.0", - "@whatwg-node/fetch": "^0.8.0", + "@whatwg-node/fetch": "^0.9.0", "isomorphic-ws": "^5.0.0", "tslib": "^2.4.0", "value-or-promise": "^1.0.11", "ws": "^8.12.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "dependencies": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "dependencies": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@graphql-tools/url-loader/node_modules/urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + }, "node_modules/@graphql-tools/utils": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-9.2.1.tgz", - "integrity": "sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-10.1.2.tgz", + "integrity": "sha512-fX13CYsDnX4yifIyNdiN0cVygz/muvkreWWem6BBw130+ODbRRgfiVveL0NizCEnKXkpvdeTy9Bxvo9LIKlhrw==", "dev": true, "dependencies": { "@graphql-typed-document-node/core": "^3.1.1", + "cross-inspect": "1.0.0", + "dset": "^3.1.2", "tslib": "^2.4.0" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } }, "node_modules/@graphql-tools/wrap": { - "version": "9.4.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-9.4.2.tgz", - "integrity": "sha512-DFcd9r51lmcEKn0JW43CWkkI2D6T9XI1juW/Yo86i04v43O9w2/k4/nx2XTJv4Yv+iXwUw7Ok81PGltwGJSDSA==", + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-10.0.5.tgz", + "integrity": "sha512-Cbr5aYjr3HkwdPvetZp1cpDWTGdD1Owgsb3z/ClzhmrboiK86EnQDxDvOJiQkDCPWE9lNBwj8Y4HfxroY0D9DQ==", "dev": true, "dependencies": { - "@graphql-tools/delegate": "^9.0.31", - "@graphql-tools/schema": "^9.0.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.1.1", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" }, + "engines": { + "node": ">=16.0.0" + }, "peerDependencies": { "graphql": "^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0" } @@ -2269,9 +4021,9 @@ } }, "node_modules/@heroicons/react": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.1.1.tgz", - "integrity": "sha512-JyyN9Lo66kirbCMuMMRPtJxtKJoIsXKS569ebHGGRKbl8s4CtUfLnyKJxteA+vIKySocO4s1SkTkGS4xtG/yEA==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.1.3.tgz", + "integrity": "sha512-fEcPfo4oN345SoqdlCDdSa4ivjaKbk0jTd+oubcgNxnNgAfzysfwWfQUr+51wigiWHQQRiZNd1Ao0M5Y3M2EGg==", "peerDependencies": { "react": ">= 16" } @@ -2320,11 +4072,11 @@ "dev": true }, "node_modules/@iconify-icon/react": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@iconify-icon/react/-/react-1.0.8.tgz", - "integrity": "sha512-T8Hzz0cQ+08hboS1KFt5t/lBxwpZZuMNVxld2O7vfarPd6mZt9DRRQiM5RIYfB6hxVNddDuzyxbqqfGcMU27pA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@iconify-icon/react/-/react-2.0.1.tgz", + "integrity": "sha512-1m6L2yNsSJ25k5baQRqNqh2J0w+91PwOn1WdBIR6ZTwxePbsZC8k3NNVc6m9BJObsIQdUlMA1NGj8el4tfbsVg==", "dependencies": { - "iconify-icon": "^1.0.8" + "iconify-icon": "^2.0.0" }, "funding": { "url": "https://github.com/sponsors/cyberalien" @@ -2444,6 +4196,18 @@ "node": ">=8" } }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", @@ -2479,14 +4243,20 @@ "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz", - "integrity": "sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw==", + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@kamilkisiela/fast-url-parser": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@kamilkisiela/fast-url-parser/-/fast-url-parser-1.1.4.tgz", + "integrity": "sha512-gbkePEBupNydxCelHCESvFSFM8XPh1Zs/OAVRW/rKpEqPAl5PbOM90Si8mv9bvnR53uPD2s/FiRxdvSejpRJew==", + "dev": true + }, "node_modules/@lezer/common": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", @@ -2547,6 +4317,92 @@ "@lezer/highlight": "^1.0.0" } }, + "node_modules/@loadable/component": { + "version": "5.16.3", + "resolved": "https://registry.npmjs.org/@loadable/component/-/component-5.16.3.tgz", + "integrity": "sha512-2mVvHs2988oVX2/zM0y6nYhJ4rTVHhkhRnpupBA0Rjl5tS8op9uSR4u5SLVfMLxzpspr2UiIBQD+wEuMsuq4Dg==", + "dependencies": { + "@babel/runtime": "^7.7.7", + "hoist-non-react-statics": "^3.3.1", + "react-is": "^16.12.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "react": "^16.3.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/@motionone/animation": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.17.0.tgz", + "integrity": "sha512-ANfIN9+iq1kGgsZxs+Nz96uiNcPLGTXwfNo2Xz/fcJXniPYpaz/Uyrfa+7I5BPLxCP82sh7quVDudf1GABqHbg==", + "dependencies": { + "@motionone/easing": "^10.17.0", + "@motionone/types": "^10.17.0", + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" + } + }, + "node_modules/@motionone/dom": { + "version": "10.12.0", + "resolved": "https://registry.npmjs.org/@motionone/dom/-/dom-10.12.0.tgz", + "integrity": "sha512-UdPTtLMAktHiqV0atOczNYyDd/d8Cf5fFsd1tua03PqTwwCe/6lwhLSQ8a7TbnQ5SN0gm44N1slBfj+ORIhrqw==", + "dependencies": { + "@motionone/animation": "^10.12.0", + "@motionone/generators": "^10.12.0", + "@motionone/types": "^10.12.0", + "@motionone/utils": "^10.12.0", + "hey-listen": "^1.0.8", + "tslib": "^2.3.1" + } + }, + "node_modules/@motionone/easing": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/easing/-/easing-10.17.0.tgz", + "integrity": "sha512-Bxe2wSuLu/qxqW4rBFS5m9tMLOw+QBh8v5A7Z5k4Ul4sTj5jAOfZG5R0bn5ywmk+Fs92Ij1feZ5pmC4TeXA8Tg==", + "dependencies": { + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" + } + }, + "node_modules/@motionone/generators": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/generators/-/generators-10.17.0.tgz", + "integrity": "sha512-T6Uo5bDHrZWhIfxG/2Aut7qyWQyJIWehk6OB4qNvr/jwA/SRmixwbd7SOrxZi1z5rH3LIeFFBKK1xHnSbGPZSQ==", + "dependencies": { + "@motionone/types": "^10.17.0", + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" + } + }, + "node_modules/@motionone/types": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/types/-/types-10.17.0.tgz", + "integrity": "sha512-EgeeqOZVdRUTEHq95Z3t8Rsirc7chN5xFAPMYFobx8TPubkEfRSm5xihmMUkbaR2ErKJTUw3347QDPTHIW12IA==" + }, + "node_modules/@motionone/utils": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/utils/-/utils-10.17.0.tgz", + "integrity": "sha512-bGwrki4896apMWIj9yp5rAS2m0xyhxblg6gTB/leWDPt+pb410W8lYWsxyurX+DH+gO1zsQsfx2su/c1/LtTpg==", + "dependencies": { + "@motionone/types": "^10.17.0", + "hey-listen": "^1.0.8", + "tslib": "^2.3.1" + } + }, + "node_modules/@n1ru4l/push-pull-async-iterable-iterator": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@n1ru4l/push-pull-async-iterable-iterator/-/push-pull-async-iterable-iterator-3.2.0.tgz", + "integrity": "sha512-3fkKj25kEjsfObL6IlKPAlHYPq/oYwUkkQ03zsTTiDjD7vg/RxjdiLeCydqtxHZP0JgsXL3D/X5oAkMGzuUp/Q==", + "engines": { + "node": ">=12" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -2585,6 +4441,8 @@ "integrity": "sha512-XJLGVL0DEclX5pcWa2N9SX1jCGTDd8l972biNooLFtjneuGqodupPQh6XseXIBBeVIMaaJ7bTcs3qGvXwsp4vg==", "dev": true, "hasInstallScript": true, + "optional": true, + "peer": true, "dependencies": { "detect-libc": "^1.0.3", "is-glob": "^4.0.3", @@ -2625,6 +4483,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": ">= 10.0.0" }, @@ -2657,16 +4516,16 @@ } }, "node_modules/@peculiar/webcrypto": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.5.tgz", - "integrity": "sha512-oDk93QCDGdxFRM8382Zdminzs44dg3M2+E5Np+JWkpqLDyJC9DviMh8F8mEJkYuUcUOGA5jHO5AJJ10MFWdbZw==", + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.6.tgz", + "integrity": "sha512-YBcMfqNSwn3SujUJvAaySy5tlYbYm6tVt9SKoXu8BaTdKGROiJDgPR3TXpZdAKUfklzm3lRapJEAltiMQtBgZg==", "dev": true, "dependencies": { "@peculiar/asn1-schema": "^2.3.8", "@peculiar/json-schema": "^1.1.12", "pvtsutils": "^1.3.5", "tslib": "^2.6.2", - "webcrypto-core": "^1.7.8" + "webcrypto-core": "^1.7.9" }, "engines": { "node": ">=10.12.0" @@ -2681,45 +4540,185 @@ "node": ">=14" } }, - "node_modules/@playwright/test": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.41.1.tgz", - "integrity": "sha512-9g8EWTjiQ9yFBXc6HjCWe41msLpxEX0KhmfmPl9RPLJdfzL4F0lg2BdJ91O9azFdl11y1pmpwdjBiSxvqc+btw==", - "dev": true, + "node_modules/@playwright/test": { + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", + "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", + "dev": true, + "dependencies": { + "playwright": "1.42.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@popperjs/core": { + "version": "2.11.8", + "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", + "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/popperjs" + } + }, + "node_modules/@radix-ui/primitive": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz", + "integrity": "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw==", + "dependencies": { + "@babel/runtime": "^7.13.10" + } + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz", + "integrity": "sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.0.3.tgz", + "integrity": "sha512-3SzW+0PW7yBBoQlT8wNcGtaxaD0XSu0uLUFgrtHY08Acx05TaHaOmVLR73c0j/cqpDy53KBMO7s0dx2wmOIDIA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz", + "integrity": "sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz", + "integrity": "sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.5.tgz", + "integrity": "sha512-GjWJX/AUpB703eEBanuBnIWdIXg6NvJFCXcNlSZk4xdszCdhrJgBoUd1cGk67vFO+WdA2pfI/plOpqz/5GUP6Q==", "dependencies": { - "playwright": "1.41.1" + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-controllable-state": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" }, - "bin": { - "playwright": "cli.js" + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" }, - "engines": { - "node": ">=16" - } - }, - "node_modules/@popperjs/core": { - "version": "2.11.8", - "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", - "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/popperjs" + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } } }, - "node_modules/@radix-ui/primitive": { + "node_modules/@radix-ui/react-direction": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.1.tgz", - "integrity": "sha512-yQ8oGX2GVsEYMWGxcovu1uGWPCxV5BFfeeYxqPmuAzUyLT9qmaMXSAhXpb0WrspIeqYzdJpkh2vHModJPgRIaw==", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.0.1.tgz", + "integrity": "sha512-RXcvnXgyvYvBEOhCBuddKecVkoMiI10Jcm5cTI7abJRAHYfFxeu+FBQs/DvdxSYucxR5mna0dNsL6QFlds5TMA==", "dependencies": { "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } } }, - "node_modules/@radix-ui/react-arrow": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.0.3.tgz", - "integrity": "sha512-wSP+pHsB/jQRaL6voubsQ/ZlrGBHHrOjmBnr19hxYgtS0WvAFwZhK2WP/YY5yF9uKECCEEDGxuLxq1NBK51wFA==", + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", + "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==", "dependencies": { "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-escape-keydown": "1.0.3" }, "peerDependencies": { "@types/react": "*", @@ -2736,27 +4735,39 @@ } } }, - "node_modules/@radix-ui/react-compose-refs": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz", - "integrity": "sha512-fDSBgd44FKHa1FRMU59qBMPFcl2PZE+2nmqunj+BWFyYYjnhIDWL2ItDs3rrbJDQOtzt5nIebLCQc4QRfz6LJw==", + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.6.tgz", + "integrity": "sha512-i6TuFOoWmLWq+M/eCLGd/bQ2HfAX1RJgvrBQ6AQLmzfvsLdefxbWu8G9zczcPFfcSPehz9GcpF6K9QYreFV8hA==", "dependencies": { - "@babel/runtime": "^7.13.10" + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-menu": "2.0.6", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-controllable-state": "1.0.1" }, "peerDependencies": { "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0" + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" }, "peerDependenciesMeta": { "@types/react": { "optional": true + }, + "@types/react-dom": { + "optional": true } } }, - "node_modules/@radix-ui/react-context": { + "node_modules/@radix-ui/react-focus-guards": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.1.tgz", - "integrity": "sha512-ebbrdFoYTcuZ0v4wG5tedGnp9tzcV8awzsxYph7gXUyvnNLuTIcCk1q17JEbnVhXAKG9oX3KtchwiMIAYp9NLg==", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz", + "integrity": "sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA==", "dependencies": { "@babel/runtime": "^7.13.10" }, @@ -2770,17 +4781,15 @@ } } }, - "node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", - "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==", + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz", + "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==", "dependencies": { "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-escape-keydown": "1.0.3" + "@radix-ui/react-use-callback-ref": "1.0.1" }, "peerDependencies": { "@types/react": "*", @@ -2815,6 +4824,83 @@ } } }, + "node_modules/@radix-ui/react-menu": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.6.tgz", + "integrity": "sha512-BVkFLS+bUC8HcImkRKPSiVumA1VPOOEC5WBMiT+QAVsPzW1FJzI9KnqgGxVDPBcql5xXrHkD3JOVoXWEXD8SYA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-collection": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-direction": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-roving-focus": "1.0.4", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-callback-ref": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz", + "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-controllable-state": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-popper": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz", @@ -2917,6 +5003,61 @@ } } }, + "node_modules/@radix-ui/react-progress": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.0.3.tgz", + "integrity": "sha512-5G6Om/tYSxjSeEdrb1VfKkfZfn/1IlPWd731h2RfPuSbIfNUgfqAwbKfJCg/PP6nuUCTrYzalwHSpSinoWoCag==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz", + "integrity": "sha512-2mUg5Mgcu001VkGy+FfzZyzbmuUWzgWkj3rvv4yu+mLw03+mTzbxZHvfcGyFp2b8EkQeMkpRQ5FiA2Vr2O6TeQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-collection": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-direction": "1.0.1", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-controllable-state": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-slot": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz", @@ -3107,9 +5248,9 @@ } }, "node_modules/@remix-run/router": { - "version": "1.14.2", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.14.2.tgz", - "integrity": "sha512-ACXpdMM9hmKZww21yEqWwiLws/UPLhNKvimN8RrYSqPSvB3ov7sLvAcfvaxePeLvccTQKGdkDIhLYApZVDFuKg==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.15.3.tgz", + "integrity": "sha512-Oy8rmScVrVxWZVOpEF57ovlnhpZ8CCPlnIIumVcV9nFdiSIrus99+Lw78ekXyGvVDlIsFJbSfmSovJUhCWYV3w==", "engines": { "node": ">=14.0.0" } @@ -3141,136 +5282,191 @@ } } }, - "node_modules/@sentry-internal/feedback": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/feedback/-/feedback-7.99.0.tgz", - "integrity": "sha512-exIO1o+bE0MW4z30FxC0cYzJ4ZHSMlDPMHCBDPzU+MWGQc/fb8s58QUrx5Dnm6HTh9G3H+YlroCxIo9u0GSwGQ==", - "dependencies": { - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=12" - } + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.14.0.tgz", + "integrity": "sha512-jwXtxYbRt1V+CdQSy6Z+uZti7JF5irRKF8hlKfEnF/xJpcNGuuiZMBvuoYM+x9sr9iWGnzrlM0+9hvQ1kgkf1w==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ] }, - "node_modules/@sentry-internal/replay-canvas": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/replay-canvas/-/replay-canvas-7.99.0.tgz", - "integrity": "sha512-PoIkfusToDq0snfl2M6HJx/1KJYtXxYhQplrn11kYadO04SdG0XGXf4h7wBTMEQ7LDEAtQyvsOu4nEQtTO3YjQ==", - "dependencies": { - "@sentry/core": "7.99.0", - "@sentry/replay": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=12" - } + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.14.0.tgz", + "integrity": "sha512-fI9nduZhCccjzlsA/OuAwtFGWocxA4gqXGTLvOyiF8d+8o0fZUeSztixkYjcGq1fGZY3Tkq4yRvHPFxU+jdZ9Q==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.14.0.tgz", + "integrity": "sha512-BcnSPRM76/cD2gQC+rQNGBN6GStBs2pl/FpweW8JYuz5J/IEa0Fr4AtrPv766DB/6b2MZ/AfSIOSGw3nEIP8SA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.14.0.tgz", + "integrity": "sha512-LDyFB9GRolGN7XI6955aFeI3wCdCUszFWumWU0deHA8VpR3nWRrjG6GtGjBrQxQKFevnUTHKCfPR4IvrW3kCgQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.14.0.tgz", + "integrity": "sha512-ygrGVhQP47mRh0AAD0zl6QqCbNsf0eTo+vgwkY6LunBcg0f2Jv365GXlDUECIyoXp1kKwL5WW6rsO429DBY/bA==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.14.0.tgz", + "integrity": "sha512-x+uJ6MAYRlHGe9wi4HQjxpaKHPM3d3JjqqCkeC5gpnnI6OWovLdXTpfa8trjxPLnWKyBsSi5kne+146GAxFt4A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.14.0.tgz", + "integrity": "sha512-nrRw8ZTQKg6+Lttwqo6a2VxR9tOroa2m91XbdQ2sUUzHoedXlsyvY1fN4xWdqz8PKmf4orDwejxXHjh7YBGUCA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry-internal/tracing": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.99.0.tgz", - "integrity": "sha512-z3JQhHjoM1KdM20qrHwRClKJrNLr2CcKtCluq7xevLtXHJWNAQQbafnWD+Aoj85EWXBzKt9yJMv2ltcXJ+at+w==", - "dependencies": { - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.14.0.tgz", + "integrity": "sha512-xV0d5jDb4aFu84XKr+lcUJ9y3qpIWhttO3Qev97z8DKLXR62LC3cXT/bMZXrjLF9X+P5oSmJTzAhqwUbY96PnA==", + "cpu": [ + "ppc64le" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry/browser": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-7.99.0.tgz", - "integrity": "sha512-bgfoUv3wkwwLgN5YUOe0ibB3y268ZCnamZh6nLFqnY/UBKC1+FXWFdvzVON/XKUm62LF8wlpCybOf08ebNj2yg==", - "dependencies": { - "@sentry-internal/feedback": "7.99.0", - "@sentry-internal/replay-canvas": "7.99.0", - "@sentry-internal/tracing": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/replay": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.14.0.tgz", + "integrity": "sha512-SDDhBQwZX6LPRoPYjAZWyL27LbcBo7WdBFWJi5PI9RPCzU8ijzkQn7tt8NXiXRiFMJCVpkuMkBf4OxSxVMizAw==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry/core": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.99.0.tgz", - "integrity": "sha512-vOAtzcAXEUtS/oW7wi3wMkZ3hsb5Ch96gKyrrj/mXdOp2zrcwdNV6N9/pawq2E9P/7Pw8AXw4CeDZztZrjQLuA==", - "dependencies": { - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.14.0.tgz", + "integrity": "sha512-RxB/qez8zIDshNJDufYlTT0ZTVut5eCpAZ3bdXDU9yTxBzui3KhbGjROK2OYTTor7alM7XBhssgoO3CZ0XD3qA==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry/react": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/react/-/react-7.99.0.tgz", - "integrity": "sha512-RtHwgzMHJhzJfSQpVG0SDPQYMTGDX3Q37/YWI59S4ALMbSW4/F6n/eQAvGVYZKbh2UCSqgFuRWaXOYkSZT17wA==", - "dependencies": { - "@sentry/browser": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0", - "hoist-non-react-statics": "^3.3.2" - }, - "engines": { - "node": ">=8" - }, - "peerDependencies": { - "react": "15.x || 16.x || 17.x || 18.x" - } + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.14.0.tgz", + "integrity": "sha512-C6y6z2eCNCfhZxT9u+jAM2Fup89ZjiG5pIzZIDycs1IwESviLxwkQcFRGLjnDrP+PT+v5i4YFvlcfAs+LnreXg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry/replay": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/replay/-/replay-7.99.0.tgz", - "integrity": "sha512-gyN/I2WpQrLAZDT+rScB/0jnFL2knEVBo8U8/OVt8gNP20Pq8T/rDZKO/TG0cBfvULDUbJj2P4CJryn2p/O2rA==", - "dependencies": { - "@sentry-internal/tracing": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - }, - "engines": { - "node": ">=12" - } + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.14.0.tgz", + "integrity": "sha512-i0QwbHYfnOMYsBEyjxcwGu5SMIi9sImDVjDg087hpzXqhBSosxkE7gyIYFHgfFl4mr7RrXksIBZ4DoLoP4FhJg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ] }, - "node_modules/@sentry/tracing": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-7.99.0.tgz", - "integrity": "sha512-Cf622gSeamiSsi0JEj3PTXnq019OymaCrGf91x1d6OPyJ5jAXdlNuhw7NkqCEw8euIhhULuS81l5nGfBrgjj9Q==", - "dependencies": { - "@sentry-internal/tracing": "7.99.0" - }, - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.14.0.tgz", + "integrity": "sha512-Fq52EYb0riNHLBTAcL0cun+rRwyZ10S9vKzhGKKgeD+XbwunszSY0rVMco5KbOsTlwovP2rTOkiII/fQ4ih/zQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/@sentry/types": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.99.0.tgz", - "integrity": "sha512-94qwOw4w40sAs5mCmzcGyj8ZUu/KhnWnuMZARRq96k+SjRW/tHFAOlIdnFSrt3BLPvSOK7R3bVAskZQ0N4FTmA==", - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.14.0.tgz", + "integrity": "sha512-e/PBHxPdJ00O9p5Ui43+vixSgVf4NlLsmV6QneGERJ3lnjIua/kim6PRFe3iDueT1rQcgSkYP8ZBBXa/h4iPvw==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ] }, - "node_modules/@sentry/utils": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.99.0.tgz", - "integrity": "sha512-cYZy5WNTkWs5GgggGnjfGqC44CWir0pAv4GVVSx0fsup4D4pMKBJPrtub15f9uC+QkUf3vVkqwpBqeFxtmJQTQ==", - "dependencies": { - "@sentry/types": "7.99.0" - }, - "engines": { - "node": ">=8" - } + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.14.0.tgz", + "integrity": "sha512-aGg7iToJjdklmxlUlJh/PaPNa4PmqHfyRMLunbL3eaMO0gp656+q1zOKkpJ/CVe9CryJv6tAN1HDoR8cNGzkag==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", @@ -3436,31 +5632,6 @@ "url": "https://github.com/sponsors/gregberge" } }, - "node_modules/@svgr/core/node_modules/cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "dependencies": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, "node_modules/@svgr/hast-util-to-babel-ast": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz", @@ -3498,6 +5669,49 @@ "@svgr/core": "*" } }, + "node_modules/@svgr/plugin-svgo": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz", + "integrity": "sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA==", + "dependencies": { + "cosmiconfig": "^8.1.3", + "deepmerge": "^4.3.1", + "svgo": "^3.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@svgr/core": "*" + } + }, + "node_modules/@svgr/rollup": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/rollup/-/rollup-8.1.0.tgz", + "integrity": "sha512-0XR1poYvPQoPpmfDYLEqUGu5ePAQ4pdgN3VFsZBNAeze7qubVpsIY1o1R6PZpKep/DKu33GSm2NhwpCLkMs2Cw==", + "dependencies": { + "@babel/core": "^7.21.3", + "@babel/plugin-transform-react-constant-elements": "^7.21.3", + "@babel/preset-env": "^7.20.2", + "@babel/preset-react": "^7.18.6", + "@babel/preset-typescript": "^7.21.0", + "@rollup/pluginutils": "^5.0.2", + "@svgr/core": "8.1.0", + "@svgr/plugin-jsx": "8.1.0", + "@svgr/plugin-svgo": "8.1.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/gregberge" + } + }, "node_modules/@tailwindcss/forms": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.7.tgz", @@ -3554,9 +5768,9 @@ } }, "node_modules/@testing-library/react": { - "version": "14.2.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.0.tgz", - "integrity": "sha512-7uBnPHyOG6nDGCzv8SLeJbSa33ZoYw7swYpSLIgJvBALdq7l9zPNk33om4USrxy1lKTxXaVfufzLmq83WNfWIw==", + "version": "14.2.2", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.2.tgz", + "integrity": "sha512-SOUuM2ysCvjUWBXTNfQ/ztmnKDmqaiPV3SvoIuyxMUca45rbSWWAT/qB8CUs/JQ/ux/8JFs9DNdFQ3f6jH3crA==", "dev": true, "dependencies": { "@babel/runtime": "^7.12.5", @@ -3571,13 +5785,12 @@ "react-dom": "^18.0.0" } }, - "node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true, + "node_modules/@trysound/sax": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==", "engines": { - "node": ">= 10" + "node": ">=10.13.0" } }, "node_modules/@tsconfig/node10": { @@ -3647,19 +5860,12 @@ "@babel/types": "^7.20.7" } }, - "node_modules/@types/chai": { - "version": "4.3.11", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.11.tgz", - "integrity": "sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==", - "dev": true - }, - "node_modules/@types/chai-subset": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.5.tgz", - "integrity": "sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A==", - "dev": true, + "node_modules/@types/codemirror": { + "version": "5.60.15", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.15.tgz", + "integrity": "sha512-dTOvwEQ+ouKJ/rE9LT1Ue2hmP6H1mZv5+CCnNWu2qtiOe2LQa9lCprEY20HxiDmV/Bxh+dXjywmy5aKvoGjULA==", "dependencies": { - "@types/chai": "*" + "@types/tern": "*" } }, "node_modules/@types/d3-array": { @@ -3775,6 +5981,15 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/loadable__component": { + "version": "5.13.9", + "resolved": "https://registry.npmjs.org/@types/loadable__component/-/loadable__component-5.13.9.tgz", + "integrity": "sha512-QWOtIkwZqHNdQj3nixQ8oyihQiTMKZLk/DNuvNxMSbTfxf47w+kqcbnxlUeBgAxdOtW0Dh48dTAIp83iJKtnrQ==", + "dev": true, + "dependencies": { + "@types/react": "*" + } + }, "node_modules/@types/mdast": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.3.tgz", @@ -3789,16 +6004,13 @@ "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" }, "node_modules/@types/node": { - "version": "16.18.77", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.77.tgz", - "integrity": "sha512-zwqAbRkHjGlxH9PBv8i9dmeaDpBRgfQDSFuREMF2Z+WUi8uc13gfRquMV/8LxBqwm+7jBz+doTVkEEA1CIWOnQ==", - "devOptional": true - }, - "node_modules/@types/parse-json": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", - "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", - "dev": true + "version": "20.12.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.3.tgz", + "integrity": "sha512-sD+ia2ubTeWrOu+YMF+MTAB7E+O7qsMqAbMfW7DG3K1URwhZ5hN1pLlRVGbf4wDFzSfikL05M17EyorS86jShw==", + "devOptional": true, + "dependencies": { + "undici-types": "~5.26.4" + } }, "node_modules/@types/prismjs": { "version": "1.26.3", @@ -3812,21 +6024,20 @@ "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==" }, "node_modules/@types/ramda": { - "version": "0.28.25", - "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.28.25.tgz", - "integrity": "sha512-HrQNqQAGcITpn9HAJFamDxm7iZeeXiP/95pN5OMbNniDjzCCeOHbBKNGmUy8NRi0fhYS+/cXeo91MFC+06gbow==", + "version": "0.29.12", + "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.12.tgz", + "integrity": "sha512-sgIEjpJhdQPB52gDF4aphs9nl0xe54CR22DPdWqT8gQHjZYmVApgA0R3/CpMbl0Y8az2TEZrPNL2zy0EvjbkLA==", "dev": true, "dependencies": { - "ts-toolbelt": "^6.15.1" + "types-ramda": "^0.29.10" } }, "node_modules/@types/react": { - "version": "18.2.48", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.48.tgz", - "integrity": "sha512-qboRCl6Ie70DQQG9hhNREz81jqC1cs9EVNcjQ1AU+jH6NFfSAhVVbrrY/+nSF+Bsk4AOwm9Qa61InvMCyV+H3w==", + "version": "18.2.74", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.74.tgz", + "integrity": "sha512-9AEqNZZyBx8OdZpxzQlaFEVCSFUM2YXJH46yPOiOpm078k6ZLOCcuAzGum/zK8YBwY+dbahVNbHrbgrAwIRlqw==", "dependencies": { "@types/prop-types": "*", - "@types/scheduler": "*", "csstype": "^3.0.2" } }, @@ -3842,10 +6053,26 @@ "react-popper": "^2.2.5" } }, + "node_modules/@types/react-datepicker/node_modules/date-fns": { + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", + "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "dev": true, + "dependencies": { + "@babel/runtime": "^7.21.0" + }, + "engines": { + "node": ">=0.11" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/date-fns" + } + }, "node_modules/@types/react-dom": { - "version": "18.2.18", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.18.tgz", - "integrity": "sha512-TJxDm6OfAX2KJWJdMEVTwWke5Sc/E/RlnPGvGfS0W7+6ocy2xhDVQVh/KvC2Uf7kACs+gDytdusDSdWfWkaNzw==", + "version": "18.2.23", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.23.tgz", + "integrity": "sha512-ZQ71wgGOTmDYpnav2knkjr3qXdAFu0vsk8Ci5w3pGAIdj7/kKAyn+VsQDhXsmzzzepAiI9leWMmubXz690AI/A==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -3860,11 +6087,6 @@ "@types/react": "*" } }, - "node_modules/@types/scheduler": { - "version": "0.16.8", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", - "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==" - }, "node_modules/@types/semver": { "version": "7.5.6", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", @@ -3892,6 +6114,14 @@ "integrity": "sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg==", "dev": true }, + "node_modules/@types/tern": { + "version": "0.23.9", + "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.9.tgz", + "integrity": "sha512-ypzHFE/wBzh+BlH6rrBgS5I/Z7RD21pGhZ2rltb/+ZrVM1awdZwjx7hE5XfuYgHWk9uvV5HLZN3SloevCAp3Bw==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/unist": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", @@ -3917,32 +6147,33 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz", - "integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.5.0.tgz", + "integrity": "sha512-HpqNTH8Du34nLxbKgVMGljZMG0rJd2O9ecvr2QLYp+7512ty1j42KnsFwspPXg1Vh8an9YImf6CokUBltisZFQ==", "dev": true, "dependencies": { - "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/type-utils": "5.62.0", - "@typescript-eslint/utils": "5.62.0", + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/type-utils": "7.5.0", + "@typescript-eslint/utils": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^5.0.0", - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "@typescript-eslint/parser": "^7.0.0", + "eslint": "^8.56.0" }, "peerDependenciesMeta": { "typescript": { @@ -3984,25 +6215,26 @@ "dev": true }, "node_modules/@typescript-eslint/parser": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz", - "integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.5.0.tgz", + "integrity": "sha512-cj+XGhNujfD2/wzR1tabNsidnYRaFfEkcULdcIyVBYcXjBvBKOes+mpMBP7hMpOyk+gBcfXsrg4NBGAStQyxjQ==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^8.56.0" }, "peerDependenciesMeta": { "typescript": { @@ -4011,16 +6243,16 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz", - "integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.5.0.tgz", + "integrity": "sha512-Z1r7uJY0MDeUlql9XJ6kRVgk/sP11sr3HKXn268HZyqL7i4cEfrdFuSSY/0tUqT37l5zT0tJOsuDP16kio85iA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0" + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -4028,25 +6260,25 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz", - "integrity": "sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.5.0.tgz", + "integrity": "sha512-A021Rj33+G8mx2Dqh0nMO9GyjjIBK3MqgVgZ2qlKf6CJy51wY/lkkFqq3TqqnH34XyAHUkq27IjlUkWlQRpLHw==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "5.62.0", - "@typescript-eslint/utils": "5.62.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "@typescript-eslint/utils": "7.5.0", "debug": "^4.3.4", - "tsutils": "^3.21.0" + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "*" + "eslint": "^8.56.0" }, "peerDependenciesMeta": { "typescript": { @@ -4055,12 +6287,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz", - "integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.5.0.tgz", + "integrity": "sha512-tv5B4IHeAdhR7uS4+bf8Ov3k793VEVHd45viRRkehIUZxm0WF82VPiLgHzA/Xl4TGPg1ZD49vfxBKFPecD5/mg==", "dev": true, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -4068,21 +6300,22 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz", - "integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.5.0.tgz", + "integrity": "sha512-YklQQfe0Rv2PZEueLTUffiQGKQneiIEKKnfIqPIOxgM9lKSZFCjT5Ad4VqRKj/U4+kQE3fa8YQpskViL7WjdPQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -4094,6 +6327,15 @@ } } }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, "node_modules/@typescript-eslint/typescript-estree/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -4106,10 +6348,25 @@ "node": ">=10" } }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" @@ -4128,29 +6385,28 @@ "dev": true }, "node_modules/@typescript-eslint/utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz", - "integrity": "sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.5.0.tgz", + "integrity": "sha512-3vZl9u0R+/FLQcpy2EHyRGNqAS/ofJ3Ji8aebilfJe+fobK8+LbIFmrHciLVDxjDoONmufDcnVSF38KwMEOjzw==", "dev": true, "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", - "eslint-scope": "^5.1.1", - "semver": "^7.3.7" + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "semver": "^7.5.4" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^8.56.0" } }, "node_modules/@typescript-eslint/utils/node_modules/lru-cache": { @@ -4166,9 +6422,9 @@ } }, "node_modules/@typescript-eslint/utils/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" @@ -4187,16 +6443,16 @@ "dev": true }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz", - "integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.5.0.tgz", + "integrity": "sha512-mcuHM/QircmA6O7fy6nn2w/3ditQkj+SgtOc8DW3uQ10Yfj42amm2i+6F2K4YAOPNNTmE6iM1ynM6lrSwdendA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.62.0", - "eslint-visitor-keys": "^3.3.0" + "@typescript-eslint/types": "7.5.0", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^18.18.0 || >=20.0.0" }, "funding": { "type": "opencollective", @@ -4204,9 +6460,9 @@ } }, "node_modules/@uiw/color-convert": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/color-convert/-/color-convert-2.0.6.tgz", - "integrity": "sha512-LDu9r8geEjUgGzsuTkUu6rV5SCqR9r2liVYQxH42tZ5NkFx87Oswz/lpwthCoulhgjSWA+QEjjE3XB8Z9ove1g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/color-convert/-/color-convert-2.1.1.tgz", + "integrity": "sha512-L421mBAT2NRsmYv7BQvofOEwV0iKee1upPVxMjo2NnkJWyIu4I+H1RxK9m3uT8yvcOlStZhv7BQBsFyJCGmIMg==", "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" }, @@ -4215,29 +6471,30 @@ } }, "node_modules/@uiw/react-color": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color/-/react-color-2.0.6.tgz", - "integrity": "sha512-0QdRfGFgIUtIMnoUzopVApORPXP8kvUqmk9iWJFF+VcKVyrNCTc8x05HLi/8HwPeawS/tQk4LdeGiwasba8/MA==", - "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-block": "2.0.6", - "@uiw/react-color-chrome": "2.0.6", - "@uiw/react-color-circle": "2.0.6", - "@uiw/react-color-colorful": "2.0.6", - "@uiw/react-color-compact": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-hsla": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-github": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-material": "2.0.6", - "@uiw/react-color-saturation": "2.0.6", - "@uiw/react-color-shade-slider": "2.0.6", - "@uiw/react-color-sketch": "2.0.6", - "@uiw/react-color-slider": "2.0.6", - "@uiw/react-color-swatch": "2.0.6", - "@uiw/react-color-wheel": "2.0.6" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color/-/react-color-2.1.1.tgz", + "integrity": "sha512-RE95rGzlOej848nK0onqxk2N+asrHpp3LEH2h7VJkcdJLOK54jccnGKdCc2seNue3zpCIcwPcR38hOeHhfJLJg==", + "dependencies": { + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-block": "2.1.1", + "@uiw/react-color-chrome": "2.1.1", + "@uiw/react-color-circle": "2.1.1", + "@uiw/react-color-colorful": "2.1.1", + "@uiw/react-color-compact": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-hsla": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-github": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-material": "2.1.1", + "@uiw/react-color-name": "2.1.1", + "@uiw/react-color-saturation": "2.1.1", + "@uiw/react-color-shade-slider": "2.1.1", + "@uiw/react-color-sketch": "2.1.1", + "@uiw/react-color-slider": "2.1.1", + "@uiw/react-color-swatch": "2.1.1", + "@uiw/react-color-wheel": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4249,12 +6506,12 @@ } }, "node_modules/@uiw/react-color-alpha": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-alpha/-/react-color-alpha-2.0.6.tgz", - "integrity": "sha512-EWV54kU6cWT+cpq6QTqJwtSgh3Hjdu/1umVrEC7v6vp3abHgKpmLBMznybY6zoeIh6+TnLnNLoyc8M+Jm0Aj7g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-alpha/-/react-color-alpha-2.1.1.tgz", + "integrity": "sha512-6wvWLn4Dgb3jIaveLdjhSg2RJIWKJbRU/uHSFtEd8rvXebRt9P7NFr5YsnkHDBUitx9KFxRL6kaI/GQCYU+8nA==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4266,13 +6523,13 @@ } }, "node_modules/@uiw/react-color-block": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-block/-/react-color-block-2.0.6.tgz", - "integrity": "sha512-GV3c7OCElnNq+BwdOD4HfbL3gGQlc1d4kod9XlzjaR6oMlWZ5PMEXeirf8Pn0c7xPUV+ltwAbgCmLkwaSxwTew==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-block/-/react-color-block-2.1.1.tgz", + "integrity": "sha512-c4xA42z7aLou8jBjxwLgUYZ+SiaZbVMADPLE/CcBi8EY/NcvvvtrL2wJGqE0g2Aqfey5RjB7nFxUeqSG1N00aA==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4284,18 +6541,18 @@ } }, "node_modules/@uiw/react-color-chrome": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-chrome/-/react-color-chrome-2.0.6.tgz", - "integrity": "sha512-lcT9/r+IjYSCHJf6EH1Gb4XAlJUIkDy/1XJe7SYYmySig55daIs0LjenTw9U8mWQlqgGfMItGp9RFEQy9aJelQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-chrome/-/react-color-chrome-2.1.1.tgz", + "integrity": "sha512-tv51lG6Wol8skiclLXXc8yf5nAVig5OjYtuNxsnFr165GP1YJ/mdnS7OIprYF/wP5mz66W7K0iz/8hAIof5/ug==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-hsla": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-github": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-hsla": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-github": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4307,12 +6564,12 @@ } }, "node_modules/@uiw/react-color-circle": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-circle/-/react-color-circle-2.0.6.tgz", - "integrity": "sha512-jCwzlKXC0YVQPP6zhogYjFifNxS8J6VBx2ADKv8t50We0lc3gcicrT2Db/EAC0WGp8yigp/J+4OD5BROiST2rw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-circle/-/react-color-circle-2.1.1.tgz", + "integrity": "sha512-t/Wr6eT9GLOzywaTmJclOZ3NuirJlMk8eVxoLKM7eRePbN5WowIEMwug/towSU3YrBrEbSSWjZRhfVUqdh7kMw==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4324,14 +6581,14 @@ } }, "node_modules/@uiw/react-color-colorful": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-colorful/-/react-color-colorful-2.0.6.tgz", - "integrity": "sha512-ZCUFeuviRJcJiu1MB1n1MbouzmrUMYYgLcWU3ZvGQOT0NkJ3PgCRJrKR/9p6X3zkDBfQ4QCUG88c9X7XiEb+SQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-colorful/-/react-color-colorful-2.1.1.tgz", + "integrity": "sha512-7cGRtYv+llXO7Tmpfska9HxjQAbkqBP5P63wned6JD/0lOM4KXELxhWI3044nO/Osi5r3FDsGh0HRqiLgUK75Q==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4343,14 +6600,14 @@ } }, "node_modules/@uiw/react-color-compact": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-compact/-/react-color-compact-2.0.6.tgz", - "integrity": "sha512-NB3vDw6aNDzKFH4z9lnSMwd4QCc2SmAwAhglns38CwhoD75iwciMsv9iNLMiW7CCZhuJtWpIiSR36Vb9eIRm8Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-compact/-/react-color-compact-2.1.1.tgz", + "integrity": "sha512-Af9skc0Bx3lot2zg58SlpMoQEKqyMbKvr7y2O541Hodc89ykgEDJZXqLrKiKnacBWMZxSAcAHoSFf70RLeoTlw==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4362,9 +6619,9 @@ } }, "node_modules/@uiw/react-color-editable-input": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input/-/react-color-editable-input-2.0.6.tgz", - "integrity": "sha512-m2aGFZU0nOtP6XYRaa4mRKv7654tG4YYnGtYXMkR32j4eCOAUEpAmMYbKtSFiB+vUPQKyA3tcEoKNa6alNp0vw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input/-/react-color-editable-input-2.1.1.tgz", + "integrity": "sha512-mEohydHWV49iQ3RuH/3My20T7wDtOPzzGEBOMJeHIxMnN6FMwl9U1bAAgDb2ovnt5Ws0PaCWcBjNKHARpVSZ1Q==", "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" }, @@ -4375,12 +6632,12 @@ } }, "node_modules/@uiw/react-color-editable-input-hsla": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-hsla/-/react-color-editable-input-hsla-2.0.6.tgz", - "integrity": "sha512-ME3WcgP5piU8BED6pcSTe0LryIecr0GGLWTw2p9rfZgUlAaUIfdDHOTCXTCzxh9EWEQcclBlCotj1Nckcsjplg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-hsla/-/react-color-editable-input-hsla-2.1.1.tgz", + "integrity": "sha512-2Eqcd0hUa5qLVxT062Vf9vsxS2/6X+AJ6f6Wfs6/UAM6iUWqWPkJxajRmEtFfB6Zv5bcaYjhSZNGgeEf7azAvQ==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4392,12 +6649,12 @@ } }, "node_modules/@uiw/react-color-editable-input-rgba": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-rgba/-/react-color-editable-input-rgba-2.0.6.tgz", - "integrity": "sha512-EcEcjHkiQX8Ecuv6nYK2DWmR5oSp5d3VMifZovNCr8Q41C2p08AeFA16CgPnmlsWlghRYqe5ekY6bkwdIC5Q2Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-rgba/-/react-color-editable-input-rgba-2.1.1.tgz", + "integrity": "sha512-6YtDaBWTXu27MK6s3HZty0qg3mYb4GN/8dI8T39R/qEiMX/SButMfC09pnygN74InyuG8MzobUg2GowfTRUG5A==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4409,12 +6666,29 @@ } }, "node_modules/@uiw/react-color-github": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-github/-/react-color-github-2.0.6.tgz", - "integrity": "sha512-oDLRId+r+YEMr9vCDBzAZbrCW3qNGcgtzhWEgCupMZfIpglTMwR2iJmWQjylq66kXhwuBqLJe5r+uvXljqo4VA==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-github/-/react-color-github-2.1.1.tgz", + "integrity": "sha512-5balACHzjVqrkdEsGXI2Ir4iXQrTAHQ7uRzqY+op41uuciIb8yGI1PecJd2qIjUJhk/kZ4nmp6KAjQEAK2iVIQ==", + "dependencies": { + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + }, + "peerDependencies": { + "@babel/runtime": ">=7.19.0", + "react": ">=16.9.0", + "react-dom": ">=16.9.0" + } + }, + "node_modules/@uiw/react-color-hue": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-hue/-/react-color-hue-2.1.1.tgz", + "integrity": "sha512-IvN2acCV35yRfmbscUQbNfwjKF+g51kMONv9j0zxDlTct2R0x4gatsVjA1tTpLv5UCIkFvhw80xg04QATrJ4Nw==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4425,13 +6699,14 @@ "react-dom": ">=16.9.0" } }, - "node_modules/@uiw/react-color-hue": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-hue/-/react-color-hue-2.0.6.tgz", - "integrity": "sha512-ytn3yH2SDM761IwoyXPBLKFSGpGxBHKopE3bgyw/5nuDq12J/oFy2uOLjBNAUy0qfZC2FARt6yCYqQcON9aChQ==", + "node_modules/@uiw/react-color-material": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-material/-/react-color-material-2.1.1.tgz", + "integrity": "sha512-Pcp/kpBnTGYXqP0up3rqTdJWKtnD2XdiA5Zdh5bdwqssI+qHo0cVELXOnpwU3LiSCZykTDauvGoOqTWprvox4g==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4442,31 +6717,28 @@ "react-dom": ">=16.9.0" } }, - "node_modules/@uiw/react-color-material": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-material/-/react-color-material-2.0.6.tgz", - "integrity": "sha512-HylgLq4bArp8NFx/Ub78/Rt28+xnqhk72hK5sjd5PJyoD+1wuca+Hi1JhnRKCb4X4jNytMRol2Ny5Up/YTyiAg==", + "node_modules/@uiw/react-color-name": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-name/-/react-color-name-2.1.1.tgz", + "integrity": "sha512-k+19NgPHPZ88oqzCfAcVd7fT4F6XywkeZkX3DDyRG3Skc8zuGdIS2xT7Ne7ZSQb31UT0+UfuOiwOST+r+kGnFA==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6" + "colors-named": "^1.0.1", + "colors-named-hex": "^1.0.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" }, "peerDependencies": { - "@babel/runtime": ">=7.19.0", - "react": ">=16.9.0", - "react-dom": ">=16.9.0" + "@babel/runtime": ">=7.19.0" } }, "node_modules/@uiw/react-color-saturation": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-saturation/-/react-color-saturation-2.0.6.tgz", - "integrity": "sha512-wnM1GlxKjvFuEHhSOA/rxho2lqZyywcwscTPzgAxg59hrQ6ddUdDaiAWJiujJ+mkmGqf4xO6llAvf9epJC3AZg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-saturation/-/react-color-saturation-2.1.1.tgz", + "integrity": "sha512-lg3ElCNuiHt7wsfR9FQpgFcg9zht+GAuVhemvgLq6twR62ZUgFd58in42T1F8l2ZpimXu8SgLGEtvc7XB2i8CQ==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4478,12 +6750,12 @@ } }, "node_modules/@uiw/react-color-shade-slider": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-shade-slider/-/react-color-shade-slider-2.0.6.tgz", - "integrity": "sha512-N9BmJ4HoeYiHKvOXWRNp9if4o+gNN4VK7/OPo4IjWiyZUKqskYIi0qqvlafIQix1SmZPa8C4Ip9e4dPq80qrFQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-shade-slider/-/react-color-shade-slider-2.1.1.tgz", + "integrity": "sha512-7DO2d53GGFR6fXPS7g3hUNTlQCwNjKSQum90h4HeJa5jxIQiuSpeMVPo6IoG8EAuP88Au5C55SYV+qjNgLvG8Q==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4495,17 +6767,17 @@ } }, "node_modules/@uiw/react-color-sketch": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-sketch/-/react-color-sketch-2.0.6.tgz", - "integrity": "sha512-tSN/R0JsvLJMuIN3nptfuYhkD7brY9BH6nJOPLaydPCLzj5jMftIcTyqY5H/bDMd8ZNwlU71TCLkPS4CNdHJSQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-sketch/-/react-color-sketch-2.1.1.tgz", + "integrity": "sha512-hiHhwSJnMzRs9mUfOiqekPosQQ60mhzRN1LDfD/z4sW7GHxWgV9sl9jdoBeN3RRS0O4i/qHjNrJqTad6D4rV7g==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4517,12 +6789,12 @@ } }, "node_modules/@uiw/react-color-slider": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-slider/-/react-color-slider-2.0.6.tgz", - "integrity": "sha512-cImrIBaqU04RmzJxkwS4QQATtF5OD2jj+wJCsPkRhcT1Zc+WD4JSe3CXgliVsyXQux4DHbND6NcCJjXyj5W3ig==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-slider/-/react-color-slider-2.1.1.tgz", + "integrity": "sha512-J0nqSpiJS4lZCUudAFo8sFMTZgiPgQ0iR4ADx1Hc5vGJr5KfpGwOVq68cUvqiAqXplUQZPVcjwoBhxl4M4fCzg==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4534,11 +6806,11 @@ } }, "node_modules/@uiw/react-color-swatch": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-swatch/-/react-color-swatch-2.0.6.tgz", - "integrity": "sha512-fDS5oo25tJEW+Xzm8ZM+0t6WjTE7poOQhxbw9ePxVdlcg12SGIvOCiY0kQrox3QjCAgyjTx33poYNEa3uO6BAg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-swatch/-/react-color-swatch-2.1.1.tgz", + "integrity": "sha512-soKsfVgflcKSBx47PaBocKZ0beIXfk9ruE4r9778mGnDDpxc2RC5zPNfvzQkSLKW+siXIS0cscuvb/8s1zK5jw==", "dependencies": { - "@uiw/color-convert": "2.0.6" + "@uiw/color-convert": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4550,12 +6822,12 @@ } }, "node_modules/@uiw/react-color-wheel": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-wheel/-/react-color-wheel-2.0.6.tgz", - "integrity": "sha512-7EfZ3db+NUd+2AQ7Nm7lsXmmVXCD65cZFhpTQuM3PIF8VgzwVTumXKBV2WyfCdo+alQowvN8DQCxdcmmjIozFg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-wheel/-/react-color-wheel-2.1.1.tgz", + "integrity": "sha512-88TYL9GCStBNULEkZ6qlQt8z/jnAf1ZSJwpbVK1JGUwogPJaMAJt8FRSUfzTNpIwYA8ymgK9Y1seng6Z8YkS9Q==", "dependencies": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" }, "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" @@ -4567,9 +6839,9 @@ } }, "node_modules/@uiw/react-drag-event-interactive": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-drag-event-interactive/-/react-drag-event-interactive-2.0.6.tgz", - "integrity": "sha512-LO3Q5x7NMAiIiZMwd6Yulpim+bYafs8ZYOhp+uOgY3Isvbke/me2Ix62qM+gQimEQjOYuH/9GZx5aC+uOcYo2w==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-drag-event-interactive/-/react-drag-event-interactive-2.1.1.tgz", + "integrity": "sha512-hJjoJg9ZASzhY6HFwZSnNhx+BJ5rfqqUnpTm6ZtfjCO5DKRZW3CDios0cmMu2Ojvdu0a9qE9x8CURCUuoCzqxw==", "funding": { "url": "https://jaywcjlove.github.io/#/sponsor" }, @@ -4602,65 +6874,72 @@ "vite": "^4.2.0 || ^5.0.0" } }, - "node_modules/@vitest/coverage-c8": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-c8/-/coverage-c8-0.31.4.tgz", - "integrity": "sha512-VPx368m4DTcpA/P0v3YdVxl4QOSh1DbUcXURLRvDShrIB5KxOgfzw4Bn2R8AhAe/GyiWW/FIsJ/OJdYXCCiC1w==", - "deprecated": "v8 coverage is moved to @vitest/coverage-v8 package", + "node_modules/@vitest/coverage-v8": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-1.4.0.tgz", + "integrity": "sha512-4hDGyH1SvKpgZnIByr9LhGgCEuF9DKM34IBLCC/fVfy24Z3+PZ+Ii9hsVBsHvY1umM1aGPEjceRkzxCfcQ10wg==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.1", - "c8": "^7.13.0", - "magic-string": "^0.30.0", + "@bcoe/v8-coverage": "^0.2.3", + "debug": "^4.3.4", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.4", + "istanbul-reports": "^3.1.6", + "magic-string": "^0.30.5", + "magicast": "^0.3.3", "picocolors": "^1.0.0", - "std-env": "^3.3.2" + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.2.0" }, "funding": { "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "vitest": ">=0.30.0 <1" + "vitest": "1.4.0" } }, "node_modules/@vitest/expect": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-0.31.4.tgz", - "integrity": "sha512-tibyx8o7GUyGHZGyPgzwiaPaLDQ9MMuCOrc03BYT0nryUuhLbL7NV2r/q98iv5STlwMgaKuFJkgBW/8iPKwlSg==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.4.0.tgz", + "integrity": "sha512-Jths0sWCJZ8BxjKe+p+eKsoqev1/T8lYcrjavEaz8auEJ4jAVY0GwW3JKmdVU4mmNPLPHixh4GNXP7GFtAiDHA==", "dev": true, "dependencies": { - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "chai": "^4.3.7" + "@vitest/spy": "1.4.0", + "@vitest/utils": "1.4.0", + "chai": "^4.3.10" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-0.31.4.tgz", - "integrity": "sha512-Wgm6UER+gwq6zkyrm5/wbpXGF+g+UBB78asJlFkIOwyse0pz8lZoiC6SW5i4gPnls/zUcPLWS7Zog0LVepXnpg==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.4.0.tgz", + "integrity": "sha512-EDYVSmesqlQ4RD2VvWo3hQgTJ7ZrFQ2VSJdfiJiArkCerDAGeyF1i6dHkmySqk573jLp6d/cfqCN+7wUB5tLgg==", "dev": true, "dependencies": { - "@vitest/utils": "0.31.4", - "concordance": "^5.0.4", - "p-limit": "^4.0.0", - "pathe": "^1.1.0" + "@vitest/utils": "1.4.0", + "p-limit": "^5.0.0", + "pathe": "^1.1.1" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/runner/node_modules/p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "dev": true, "dependencies": { "yocto-queue": "^1.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -4679,45 +6958,119 @@ } }, "node_modules/@vitest/snapshot": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-0.31.4.tgz", - "integrity": "sha512-LemvNumL3NdWSmfVAMpXILGyaXPkZbG5tyl6+RQSdcHnTj6hvA49UAI8jzez9oQyE/FWLKRSNqTGzsHuk89LRA==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.4.0.tgz", + "integrity": "sha512-saAFnt5pPIA5qDGxOHxJ/XxhMFKkUSBJmVt5VgDsAqPTX6JP326r5C/c9UuCMPoXNzuudTPsYDZCoJ5ilpqG2A==", "dev": true, "dependencies": { - "magic-string": "^0.30.0", - "pathe": "^1.1.0", - "pretty-format": "^27.5.1" + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "pretty-format": "^29.7.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/snapshot/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@vitest/snapshot/node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@vitest/snapshot/node_modules/react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + }, "node_modules/@vitest/spy": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-0.31.4.tgz", - "integrity": "sha512-3ei5ZH1s3aqbEyftPAzSuunGICRuhE+IXOmpURFdkm5ybUADk+viyQfejNk6q8M5QGX8/EVKw+QWMEP3DTJDag==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.4.0.tgz", + "integrity": "sha512-Ywau/Qs1DzM/8Uc+yA77CwSegizMlcgTJuYGAi0jujOteJOUf1ujunHThYo243KG9nAyWT3L9ifPYZ5+As/+6Q==", "dev": true, "dependencies": { - "tinyspy": "^2.1.0" + "tinyspy": "^2.2.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-0.31.4.tgz", - "integrity": "sha512-DobZbHacWznoGUfYU8XDPY78UubJxXfMNY1+SUdOp1NsI34eopSA6aZMeaGu10waSOeYwE8lxrd/pLfT0RMxjQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.4.0.tgz", + "integrity": "sha512-mx3Yd1/6e2Vt/PUC98DcqTirtfxUyAZ32uK82r8rZzbtBeBo+nqgnjx/LvqQdWsrvNtm14VmurNgcf4nqY5gJg==", "dev": true, "dependencies": { - "concordance": "^5.0.4", - "loupe": "^2.3.6", - "pretty-format": "^27.5.1" + "diff-sequences": "^29.6.3", + "estree-walker": "^3.0.3", + "loupe": "^2.3.7", + "pretty-format": "^29.7.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, + "node_modules/@vitest/utils/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@vitest/utils/node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/@vitest/utils/node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@vitest/utils/node_modules/react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + }, "node_modules/@whatwg-node/events": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.0.3.tgz", @@ -4794,13 +7147,6 @@ "node": ">=8" } }, - "node_modules/abab": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", - "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", - "deprecated": "Use your platform's native atob() and btoa() methods instead", - "dev": true - }, "node_modules/acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", @@ -4813,16 +7159,6 @@ "node": ">=0.4.0" } }, - "node_modules/acorn-globals": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", - "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", - "dev": true, - "dependencies": { - "acorn": "^8.1.0", - "acorn-walk": "^8.0.2" - } - }, "node_modules/acorn-jsx": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", @@ -4842,9 +7178,9 @@ } }, "node_modules/agent-base": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", - "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", "dev": true, "dependencies": { "debug": "^4.3.4" @@ -4975,6 +7311,17 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/aria-hidden": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", + "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/aria-query": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", @@ -4985,28 +7332,32 @@ } }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array-includes": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", - "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", "is-string": "^1.0.7" }, "engines": { @@ -5025,17 +7376,38 @@ "node": ">=8" } }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array.prototype.findlastindex": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", - "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5080,31 +7452,44 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.tosorted": { + "node_modules/array.prototype.toreversed": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.2.tgz", - "integrity": "sha512-HuQCHOlk1Weat5jzStICBCd83NxiIMwqDg/dHEsoefabn/hJRj5pVdWcPUSpRrwhwxZOsQassMpgN/xRYFBMIg==", + "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", + "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "es-shim-unscopables": "^1.0.0" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz", + "integrity": "sha512-/DdH4TiTmOKzyQbp/eadcCVexiCb36xJg7HshYOYJnNZFDj33GEv0P7GxsynpShhq4OLYJzbGcBDkLsDt7MnNg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.1.0", + "es-shim-unscopables": "^1.0.2" } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", - "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" }, "engines": { @@ -5176,15 +7561,6 @@ "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", "dev": true }, - "node_modules/asynciterator.prototype": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz", - "integrity": "sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.3" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -5213,9 +7589,9 @@ } }, "node_modules/autoprefixer": { - "version": "10.4.17", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz", - "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==", + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", "funding": [ { "type": "opencollective", @@ -5231,8 +7607,8 @@ } ], "dependencies": { - "browserslist": "^4.22.2", - "caniuse-lite": "^1.0.30001578", + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -5249,10 +7625,13 @@ } }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -5275,6 +7654,42 @@ "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", "dev": true }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.4.10", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.10.tgz", + "integrity": "sha512-rpIuu//y5OX6jVU+a5BCn1R5RSZYWAl2Nar76iwaOdycqb6JPxediskWFMMl7stfwNJR4b7eiQvh5fB5TEQJTQ==", + "dependencies": { + "@babel/compat-data": "^7.22.6", + "@babel/helper-define-polyfill-provider": "^0.6.1", + "semver": "^6.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.10.4", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.4.tgz", + "integrity": "sha512-25J6I8NGfa5YkCDogHRID3fVCadIR8/pGl1/spvCkzb6lVn6SR3ojpx9nOn9iEBcUsjY24AmdKm5khcfKdylcg==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.1", + "core-js-compat": "^3.36.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.1.tgz", + "integrity": "sha512-JfTApdE++cgcTWjsiCQlLyFBMbTUft9ja17saCc93lgV33h4tuCVj7tlvu//qpLwaG+3yEz7/KhahGrUMkVq9g==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, "node_modules/babel-plugin-syntax-trailing-function-commas": { "version": "7.0.0-beta.0", "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz", @@ -5398,11 +7813,10 @@ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", "dev": true }, - "node_modules/blueimp-md5": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz", - "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==", - "dev": true + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, "node_modules/brace-expansion": { "version": "1.1.11", @@ -5426,9 +7840,9 @@ } }, "node_modules/browserslist": { - "version": "4.22.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.3.tgz", - "integrity": "sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A==", + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", "funding": [ { "type": "opencollective", @@ -5444,8 +7858,8 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001580", - "electron-to-chromium": "^1.4.648", + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", "node-releases": "^2.0.14", "update-browserslist-db": "^1.0.13" }, @@ -5498,6 +7912,18 @@ "node": "*" } }, + "node_modules/builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/builtins": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", @@ -5520,108 +7946,36 @@ } }, "node_modules/builtins/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/builtins/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/busboy": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", - "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", - "dev": true, - "dependencies": { - "streamsearch": "^1.1.0" - }, - "engines": { - "node": ">=10.16.0" - } - }, - "node_modules/c8": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-7.14.0.tgz", - "integrity": "sha512-i04rtkkcNcCf7zsQcSv/T9EbUn4RXQ6mropeMcjFOsQXQ0iGLAr/xT6TImQg4+U9hmNpN9XdvPkjUL1IzbgxJw==", - "dev": true, - "dependencies": { - "@bcoe/v8-coverage": "^0.2.3", - "@istanbuljs/schema": "^0.1.3", - "find-up": "^5.0.0", - "foreground-child": "^2.0.0", - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-reports": "^3.1.4", - "rimraf": "^3.0.2", - "test-exclude": "^6.0.0", - "v8-to-istanbul": "^9.0.0", - "yargs": "^16.2.0", - "yargs-parser": "^20.2.9" - }, - "bin": { - "c8": "bin/c8.js" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/c8/node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/c8/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" + "bin": { + "semver": "bin/semver.js" }, "engines": { "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/c8/node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "node_modules/builtins/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", "dev": true, "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" + "streamsearch": "^1.1.0" }, "engines": { - "node": ">=10" + "node": ">=10.16.0" } }, "node_modules/cac": { @@ -5643,14 +7997,19 @@ } }, "node_modules/call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -5694,9 +8053,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001581", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001581.tgz", - "integrity": "sha512-whlTkwhqV2tUmP3oYhtNfaWGYHDdS3JYFQBKXxcUR9qqPWsRhFHhoISO2Xnl/g0xyKzht9mI1LZpiNWfMzHixQ==", + "version": "1.0.30001605", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001605.tgz", + "integrity": "sha512-nXwGlFWo34uliI9z3n6Qc0wZaf7zaZWA1CPZ169La5mV3I/gem7bst0vr5XQH5TJXZIMfDeZyOrZnSlVzKxxHQ==", "funding": [ { "type": "opencollective", @@ -6083,6 +8442,11 @@ "@lezer/highlight": "^1.0.0" } }, + "node_modules/codemirror": { + "version": "5.65.16", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.16.tgz", + "integrity": "sha512-br21LjYmSlVL0vFCPWPfhzUCT34FM/pAdK7rRIZwa0rrtrIdotvP4Oh4GUHsu2E3IrQMCfRkL/fN3ytMNxVQvg==" + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -6105,6 +8469,28 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", "dev": true }, + "node_modules/colors-named": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/colors-named/-/colors-named-1.0.2.tgz", + "integrity": "sha512-2ANq2r393PV9njYUD66UdfBcxR1slMqRA3QRTWgCx49JoCJ+kOhyfbQYxKJbPZQIhZUcNjVOs5AlyY1WwXec3w==", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, + "node_modules/colors-named-hex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/colors-named-hex/-/colors-named-hex-1.0.2.tgz", + "integrity": "sha512-k6kq1e1pUCQvSVwIaGFq2l0LrkAPQZWyeuZn1Z8nOiYSEZiKoFj4qx690h2Kd34DFl9Me0gKS6MUwAMBJj8nuA==", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://jaywcjlove.github.io/#/sponsor" + } + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -6150,58 +8536,6 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, - "node_modules/concordance": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/concordance/-/concordance-5.0.4.tgz", - "integrity": "sha512-OAcsnTEYu1ARJqWVGwf4zh4JDfHZEaSNlNccFmt8YjB2l/n19/PF2viLINHc57vO4FKIAFl2FWASIGZZWZ2Kxw==", - "dev": true, - "dependencies": { - "date-time": "^3.1.0", - "esutils": "^2.0.3", - "fast-diff": "^1.2.0", - "js-string-escape": "^1.0.1", - "lodash": "^4.17.15", - "md5-hex": "^3.0.1", - "semver": "^7.3.2", - "well-known-symbols": "^2.0.0" - }, - "engines": { - "node": ">=10.18.0 <11 || >=12.14.0 <13 || >=14" - } - }, - "node_modules/concordance/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/concordance/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/concordance/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/constant-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-3.0.4.tgz", @@ -6218,6 +8552,26 @@ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, + "node_modules/copy-to-clipboard": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", + "dependencies": { + "toggle-selection": "^1.0.6" + } + }, + "node_modules/core-js-compat": { + "version": "3.36.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.36.1.tgz", + "integrity": "sha512-Dk997v9ZCt3X/npqzyGdTlq6t7lDBhZwGvV94PKzDArjp7BTRm7WlDAXYd/OWdeFHO8OChQYRJNJvUCqCbrtKA==", + "dependencies": { + "browserslist": "^4.23.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, "node_modules/core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", @@ -6225,19 +8579,28 @@ "dev": true }, "node_modules/cosmiconfig": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", - "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", - "dev": true, + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" }, "engines": { - "node": ">=10" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, "node_modules/create-require": { @@ -6259,6 +8622,18 @@ "node-fetch": "^2.6.12" } }, + "node_modules/cross-inspect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/cross-inspect/-/cross-inspect-1.0.0.tgz", + "integrity": "sha512-4PFfn4b5ZN6FMNGSZlyb7wUhuN8wvj8t/VQHZdM4JsDcruGJ8L2kf9zao98QIrBPFCpdk27qst/AGTl7pL3ypQ==", + "dev": true, + "dependencies": { + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -6280,6 +8655,44 @@ "node": "*" } }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, "node_modules/cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", @@ -6291,16 +8704,46 @@ "node": ">=4" } }, + "node_modules/csso": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", + "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", + "dependencies": { + "css-tree": "~2.2.0" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/csso/node_modules/css-tree": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", + "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", + "dependencies": { + "mdn-data": "2.0.28", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/csso/node_modules/mdn-data": { + "version": "2.0.28", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", + "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==" + }, "node_modules/cssstyle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-3.0.0.tgz", - "integrity": "sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz", + "integrity": "sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==", "dev": true, "dependencies": { "rrweb-cssom": "^0.6.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/csstype": { @@ -6309,9 +8752,9 @@ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" }, "node_modules/cypress": { - "version": "13.6.4", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.6.4.tgz", - "integrity": "sha512-pYJjCfDYB+hoOoZuhysbbYhEmNW7DEDsqn+ToCLwuVowxUXppIWRr7qk4TVRIU471ksfzyZcH+mkoF0CQUKnpw==", + "version": "13.7.3", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.7.3.tgz", + "integrity": "sha512-uoecY6FTCAuIEqLUYkTrxamDBjMHTYak/1O7jtgwboHiTnS1NaMOoR08KcTrbRZFCBvYOiS4tEkQRmsV+xcrag==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -6322,7 +8765,7 @@ "arch": "^2.2.0", "blob-util": "^2.0.2", "bluebird": "^3.7.2", - "buffer": "^5.6.0", + "buffer": "^5.7.1", "cachedir": "^2.3.0", "chalk": "^4.1.0", "check-more-types": "^2.24.0", @@ -6340,7 +8783,7 @@ "figures": "^3.2.0", "fs-extra": "^9.1.0", "getos": "^3.2.1", - "is-ci": "^3.0.0", + "is-ci": "^3.0.1", "is-installed-globally": "~0.4.0", "lazy-ass": "^1.6.0", "listr2": "^3.8.3", @@ -6365,19 +8808,6 @@ "node": "^16.0.0 || ^18.0.0 || >=20.0.0" } }, - "node_modules/cypress-vite": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/cypress-vite/-/cypress-vite-1.5.0.tgz", - "integrity": "sha512-vvTMqJZgI3sN2ylQTi4OQh8LRRjSrfrIdkQD5fOj+EC/e9oHkxS96lif1SyDF1PwailG1tnpJE+VpN6+AwO/rg==", - "dev": true, - "dependencies": { - "chokidar": "^3.5.3", - "debug": "^4.3.4" - }, - "peerDependencies": { - "vite": "^2.9.0 || ^3.0.0 || ^4.0.0 || ^5.0.0" - } - }, "node_modules/cypress/node_modules/listr2": { "version": "3.14.0", "resolved": "https://registry.npmjs.org/listr2/-/listr2-3.14.0.tgz", @@ -6599,50 +9029,82 @@ } }, "node_modules/data-urls": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-4.0.0.tgz", - "integrity": "sha512-/mMTei/JXPqvFqQtfyTowxmJVwr2PVAeCcDxyFf6LhoOu/09TX2OX3kb2wzi4DMXcfj4OItwDOnhl5oziPnT6g==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", + "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==", "dev": true, "dependencies": { - "abab": "^2.0.6", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.0" + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, - "node_modules/dataloader": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.2.tgz", - "integrity": "sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g==", - "dev": true + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, - "node_modules/date-fns": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", - "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, "dependencies": { - "@babel/runtime": "^7.21.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" }, "engines": { - "node": ">=0.11" + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/date-fns" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/date-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/date-time/-/date-time-3.1.0.tgz", - "integrity": "sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==", + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", "dev": true, "dependencies": { - "time-zone": "^1.0.0" + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" }, "engines": { - "node": ">=6" + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dataloader": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.2.tgz", + "integrity": "sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g==", + "dev": true + }, + "node_modules/date-fns": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", + "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/kossnocorp" } }, "node_modules/dayjs": { @@ -6706,11 +9168,11 @@ } }, "node_modules/decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.4.1.tgz", + "integrity": "sha512-+8VxcR21HhTy8nOt6jf20w0c9CADrw1O8d+VZ/YzzCt4bJ3uBjw+D1q2osAB8RnpwwaeYBxy0HyKQxD5JBMuuQ==", "engines": { - "node": ">=0.10" + "node": ">=14.16" } }, "node_modules/deep-eql": { @@ -6763,6 +9225,14 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/defaults": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", @@ -6776,17 +9246,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -6846,6 +9319,8 @@ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", "dev": true, + "optional": true, + "peer": true, "bin": { "detect-libc": "bin/detect-libc.js" }, @@ -6853,6 +9328,11 @@ "node": ">=0.10" } }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" + }, "node_modules/devlop": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", @@ -6884,6 +9364,15 @@ "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==" }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -6928,17 +9417,55 @@ "csstype": "^3.0.2" } }, - "node_modules/domexception": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", - "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", - "deprecated": "Use your platform's native DOMException instead", - "dev": true, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", "dependencies": { - "webidl-conversions": "^7.0.0" + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" }, - "engines": { - "node": ">=12" + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" } }, "node_modules/dot-case": { @@ -6951,15 +9478,15 @@ } }, "node_modules/dotenv": { - "version": "16.4.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.1.tgz", - "integrity": "sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ==", + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", "dev": true, "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/motdotla/dotenv?sponsor=1" + "url": "https://dotenvx.com" } }, "node_modules/dset": { @@ -6987,9 +9514,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.651", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.651.tgz", - "integrity": "sha512-jjks7Xx+4I7dslwsbaFocSwqBbGHQmuXBJUK9QBZTIrzPq3pzn6Uf2szFSP728FtLYE3ldiccmlkOM/zhGKCpA==" + "version": "1.4.724", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.724.tgz", + "integrity": "sha512-RTRvkmRkGhNBPPpdrgtDKvmOEYTrPlXDfc0J/Nfq5s29tEahAwhiX4mmhNzj6febWMleulxVYPh7QwCSL/EldA==" }, "node_modules/emoji-regex": { "version": "8.0.0", @@ -7038,50 +9565,57 @@ } }, "node_modules/es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", - "dev": true, - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.2", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", - "get-symbol-description": "^1.0.0", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.12", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.1", - "safe-array-concat": "^1.0.1", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.8", - "string.prototype.trimend": "^1.0.7", - "string.prototype.trimstart": "^1.0.7", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -7090,6 +9624,27 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-get-iterator": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", @@ -7111,36 +9666,51 @@ } }, "node_modules/es-iterator-helpers": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz", - "integrity": "sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g==", + "version": "1.0.18", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.18.tgz", + "integrity": "sha512-scxAJaewsahbqTYrGKJihhViaM6DDZDDoucfvzNbK0pOren1g/daDQ3IAhzn+1G14rBG7w+i5N+qul60++zlKA==", "dev": true, "dependencies": { - "asynciterator.prototype": "^1.0.0", - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.22.1", - "es-set-tostringtag": "^2.0.1", - "function-bind": "^1.1.1", - "get-intrinsic": "^1.2.1", + "es-abstract": "^1.23.0", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", + "internal-slot": "^1.0.7", "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.0.1" + "safe-array-concat": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -7173,9 +9743,9 @@ } }, "node_modules/esbuild": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", - "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", "hasInstallScript": true, "bin": { "esbuild": "bin/esbuild" @@ -7184,28 +9754,29 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.18.20", - "@esbuild/android-arm64": "0.18.20", - "@esbuild/android-x64": "0.18.20", - "@esbuild/darwin-arm64": "0.18.20", - "@esbuild/darwin-x64": "0.18.20", - "@esbuild/freebsd-arm64": "0.18.20", - "@esbuild/freebsd-x64": "0.18.20", - "@esbuild/linux-arm": "0.18.20", - "@esbuild/linux-arm64": "0.18.20", - "@esbuild/linux-ia32": "0.18.20", - "@esbuild/linux-loong64": "0.18.20", - "@esbuild/linux-mips64el": "0.18.20", - "@esbuild/linux-ppc64": "0.18.20", - "@esbuild/linux-riscv64": "0.18.20", - "@esbuild/linux-s390x": "0.18.20", - "@esbuild/linux-x64": "0.18.20", - "@esbuild/netbsd-x64": "0.18.20", - "@esbuild/openbsd-x64": "0.18.20", - "@esbuild/sunos-x64": "0.18.20", - "@esbuild/win32-arm64": "0.18.20", - "@esbuild/win32-ia32": "0.18.20", - "@esbuild/win32-x64": "0.18.20" + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" } }, "node_modules/escalade": { @@ -7228,38 +9799,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "dev": true, - "dependencies": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2" - }, - "bin": { - "escodegen": "bin/escodegen.js", - "esgenerate": "bin/esgenerate.js" - }, - "engines": { - "node": ">=6.0" - }, - "optionalDependencies": { - "source-map": "~0.6.1" - } - }, "node_modules/eslint": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", - "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.56.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -7304,60 +9854,64 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-config-prettier": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz", - "integrity": "sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg==", + "node_modules/eslint-compat-utils": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.5.0.tgz", + "integrity": "sha512-dc6Y8tzEcSYZMHa+CMPLi/hyo1FzNeonbhJL7Ol0ccuKQkwopJcJBA9YL/xmMTLU1eKigXo9vj9nALElWYSowg==", "dev": true, - "bin": { - "eslint-config-prettier": "bin/cli.js" + "dependencies": { + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12" }, "peerDependencies": { - "eslint": ">=7.0.0" + "eslint": ">=6.0.0" } }, - "node_modules/eslint-config-standard": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", - "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "node_modules/eslint-compat-utils/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "peerDependencies": { - "eslint": "^8.0.1", - "eslint-plugin-import": "^2.25.2", - "eslint-plugin-n": "^15.0.0", - "eslint-plugin-promise": "^6.0.0" + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" } }, - "node_modules/eslint-config-standard-with-typescript": { - "version": "34.0.1", - "resolved": "https://registry.npmjs.org/eslint-config-standard-with-typescript/-/eslint-config-standard-with-typescript-34.0.1.tgz", - "integrity": "sha512-J7WvZeLtd0Vr9F+v4dZbqJCLD16cbIy4U+alJMq4MiXdpipdBM3U5NkXaGUjePc4sb1ZE01U9g6VuTBpHHz1fg==", + "node_modules/eslint-compat-utils/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { - "@typescript-eslint/parser": "^5.43.0", - "eslint-config-standard": "17.0.0" + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-compat-utils/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", + "dev": true, + "bin": { + "eslint-config-prettier": "bin/cli.js" }, "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^5.43.0", - "eslint": "^8.0.1", - "eslint-plugin-import": "^2.25.2", - "eslint-plugin-n": "^15.0.0", - "eslint-plugin-promise": "^6.0.0", - "typescript": "*" + "eslint": ">=7.0.0" } }, "node_modules/eslint-import-resolver-node": { @@ -7381,9 +9935,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", + "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -7445,47 +9999,24 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint-plugin-es": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", - "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "node_modules/eslint-plugin-es-x": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.6.0.tgz", + "integrity": "sha512-I0AmeNgevgaTR7y2lrVCJmGYF0rjoznpDvqV/kIkZSZbZ8Rw3eu4cGlvBBULScfkSOCzqKbff5LR4CNrV7mZHA==", "dev": true, "dependencies": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" + "@eslint-community/eslint-utils": "^4.1.2", + "@eslint-community/regexpp": "^4.6.0", + "eslint-compat-utils": "^0.5.0" }, "engines": { - "node": ">=8.10.0" + "node": "^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://github.com/sponsors/mysticatea" + "url": "https://github.com/sponsors/ota-meshi" }, "peerDependencies": { - "eslint": ">=4.19.1" - } - }, - "node_modules/eslint-plugin-es/node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-plugin-es/node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true, - "engines": { - "node": ">=4" + "eslint": ">=8" } }, "node_modules/eslint-plugin-import": { @@ -7541,22 +10072,25 @@ } }, "node_modules/eslint-plugin-n": { - "version": "15.7.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz", - "integrity": "sha512-jDex9s7D/Qial8AGVIHq4W7NswpUD5DPDL2RH8Lzd9EloWUuvUkHfv4FRLMipH5q2UtyurorBkPeNi1wVWNh3Q==", + "version": "16.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.6.2.tgz", + "integrity": "sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==", "dev": true, "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", "builtins": "^5.0.1", - "eslint-plugin-es": "^4.1.0", - "eslint-utils": "^3.0.0", - "ignore": "^5.1.1", - "is-core-module": "^2.11.0", + "eslint-plugin-es-x": "^7.5.0", + "get-tsconfig": "^4.7.0", + "globals": "^13.24.0", + "ignore": "^5.2.4", + "is-builtin-module": "^3.2.1", + "is-core-module": "^2.12.1", "minimatch": "^3.1.2", - "resolve": "^1.22.1", - "semver": "^7.3.8" + "resolve": "^1.22.2", + "semver": "^7.5.3" }, "engines": { - "node": ">=12.22.0" + "node": ">=16.0.0" }, "funding": { "url": "https://github.com/sponsors/mysticatea" @@ -7565,6 +10099,21 @@ "eslint": ">=7.0.0" } }, + "node_modules/eslint-plugin-n/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-n/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -7578,9 +10127,9 @@ } }, "node_modules/eslint-plugin-n/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" @@ -7592,6 +10141,18 @@ "node": ">=10" } }, + "node_modules/eslint-plugin-n/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-n/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -7611,27 +10172,29 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.33.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz", - "integrity": "sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==", + "version": "7.34.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.1.tgz", + "integrity": "sha512-N97CxlouPT1AHt8Jn0mhhN2RrADlUAsk1/atcT2KyA/l9Q/E6ll7OIGwNumFmWfZ9skV3XXccYS19h80rHtgkw==", "dev": true, "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", + "array-includes": "^3.1.7", + "array.prototype.findlast": "^1.2.4", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.toreversed": "^1.1.2", + "array.prototype.tosorted": "^1.1.3", "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.0.12", + "es-iterator-helpers": "^1.0.17", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7", + "object.hasown": "^1.1.3", + "object.values": "^1.1.7", "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", + "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.8" + "string.prototype.matchall": "^4.0.10" }, "engines": { "node": ">=4" @@ -7670,9 +10233,9 @@ } }, "node_modules/eslint-plugin-unused-imports": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-2.0.0.tgz", - "integrity": "sha512-3APeS/tQlTrFa167ThtP0Zm0vctjr4M44HMpeg1P4bK6wItarumq0Ma82xorMKdFsWpphQBlRPzw/pxiVELX1A==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-3.1.0.tgz", + "integrity": "sha512-9l1YFCzXKkw1qtAru1RWUtG2EVDZY0a0eChKXcL+EZ5jitG7qxdctu4RnvhOJHv4xfmUf7h+JJPINlVpGhZMrw==", "dev": true, "dependencies": { "eslint-rule-composer": "^0.3.0" @@ -7681,8 +10244,8 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "peerDependencies": { - "@typescript-eslint/eslint-plugin": "^5.0.0", - "eslint": "^8.0.0" + "@typescript-eslint/eslint-plugin": "6 - 7", + "eslint": "8" }, "peerDependenciesMeta": { "@typescript-eslint/eslint-plugin": { @@ -7699,55 +10262,6 @@ "node": ">=4.0.0" } }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/eslint-scope/node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^2.0.0" - }, - "engines": { - "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=5" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/eslint-visitor-keys": { "version": "3.4.3", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", @@ -7832,19 +10346,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/esquery": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", @@ -7896,7 +10397,6 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, "engines": { "node": ">=0.10.0" } @@ -8032,12 +10532,6 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "dev": true }, - "node_modules/fast-diff": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", - "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "dev": true - }, "node_modules/fast-equals": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz", @@ -8195,11 +10689,14 @@ } }, "node_modules/filter-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", - "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-5.1.0.tgz", + "integrity": "sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==", "engines": { - "node": ">=0.10.0" + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/find-up": { @@ -8247,19 +10744,6 @@ "is-callable": "^1.1.3" } }, - "node_modules/foreground-child": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", @@ -8295,6 +10779,34 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/framer-motion": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.5.1.tgz", + "integrity": "sha512-o1BGqqposwi7cgDrtg0dNONhkmPsUFDaLcKXigzuTFC5x58mE8iyTazxSudFzmT6MEyJKfjjU8ItoMe3W+3fiw==", + "dependencies": { + "@motionone/dom": "10.12.0", + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "popmotion": "11.0.3", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + }, + "optionalDependencies": { + "@emotion/is-prop-valid": "^0.8.2" + }, + "peerDependencies": { + "react": ">=16.8 || ^17.0.0 || ^18.0.0", + "react-dom": ">=16.8 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/framesync": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz", + "integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==", + "dependencies": { + "tslib": "^2.1.0" + } + }, "node_modules/fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -8391,20 +10903,32 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "dependencies": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "engines": { + "node": ">=6" + } + }, "node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -8421,13 +10945,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -8436,6 +10961,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-tsconfig": { + "version": "4.7.3", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.3.tgz", + "integrity": "sha512-ZvkrzoUA0PQZM6fy6+/Hce561s+faD1rsNwhnO5FelNjyy7EMGJ3Rz1AQ8GYDWjhRs/7dBLOEJvhK8MiEJOAFg==", + "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, "node_modules/getos": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/getos/-/getos-3.2.1.tgz", @@ -8577,35 +11114,50 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, + "node_modules/graphiql": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.1.2.tgz", + "integrity": "sha512-k3p2k+7ZgARdLnqMDV192VL47cTmPNn02n5ullULnBE1nv1dtJfUve+AJxaU+kU8JcbwCxSxu3qlIxuu1N3mDQ==", + "dependencies": { + "@graphiql/react": "^0.20.4", + "@graphiql/toolkit": "^0.9.1", + "graphql-language-service": "^5.2.0", + "markdown-it": "^12.2.0" + }, + "peerDependencies": { + "graphql": "^15.5.0 || ^16.0.0", + "react": "^16.8.0 || ^17 || ^18", + "react-dom": "^16.8.0 || ^17 || ^18" + } + }, "node_modules/graphql": { "version": "16.8.1", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", - "peer": true, "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" } }, "node_modules/graphql-config": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-4.5.0.tgz", - "integrity": "sha512-x6D0/cftpLUJ0Ch1e5sj1TZn6Wcxx4oMfmhaG9shM0DKajA9iR+j1z86GSTQ19fShbGvrSSvbIQsHku6aQ6BBw==", - "dev": true, - "dependencies": { - "@graphql-tools/graphql-file-loader": "^7.3.7", - "@graphql-tools/json-file-loader": "^7.3.7", - "@graphql-tools/load": "^7.5.5", - "@graphql-tools/merge": "^8.2.6", - "@graphql-tools/url-loader": "^7.9.7", - "@graphql-tools/utils": "^9.0.0", - "cosmiconfig": "8.0.0", - "jiti": "1.17.1", - "minimatch": "4.2.3", - "string-env-interpolation": "1.0.1", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-5.0.3.tgz", + "integrity": "sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ==", + "dev": true, + "dependencies": { + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/merge": "^9.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", + "cosmiconfig": "^8.1.0", + "jiti": "^1.18.2", + "minimatch": "^4.2.3", + "string-env-interpolation": "^1.0.1", "tslib": "^2.4.0" }, "engines": { - "node": ">= 10.0.0" + "node": ">= 16.0.0" }, "peerDependencies": { "cosmiconfig-toml-loader": "^1.0.0", @@ -8617,30 +11169,6 @@ } } }, - "node_modules/graphql-config/node_modules/cosmiconfig": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.0.0.tgz", - "integrity": "sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ==", - "dev": true, - "dependencies": { - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/graphql-config/node_modules/jiti": { - "version": "1.17.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.17.1.tgz", - "integrity": "sha512-NZIITw8uZQFuzQimqjUxIrIcEdxYDFIe/0xYfIlVXTkiBjjyBEvgasj5bb0/cHtPRD/NziPbT312sFrkI5ALpw==", - "dev": true, - "bin": { - "jiti": "bin/jiti.js" - } - }, "node_modules/graphql-config/node_modules/minimatch": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-4.2.3.tgz", @@ -8653,6 +11181,21 @@ "node": ">=10" } }, + "node_modules/graphql-language-service": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/graphql-language-service/-/graphql-language-service-5.2.0.tgz", + "integrity": "sha512-o/ZgTS0pBxWm3hSF4+6GwiV1//DxzoLWEbS38+jqpzzy1d/QXBidwQuVYTOksclbtOJZ3KR/tZ8fi/tI6VpVMg==", + "dependencies": { + "nullthrows": "^1.0.0", + "vscode-languageserver-types": "^3.17.1" + }, + "bin": { + "graphql": "dist/temp-bin.js" + }, + "peerDependencies": { + "graphql": "^15.5.0 || ^16.0.0" + } + }, "node_modules/graphql-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/graphql-request/-/graphql-request-6.1.0.tgz", @@ -8690,9 +11233,9 @@ } }, "node_modules/graphql-ws": { - "version": "5.12.1", - "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.12.1.tgz", - "integrity": "sha512-umt4f5NnMK46ChM2coO36PTFhHouBrK9stWWBczERguwYrGnPNxJ9dimU6IyOBfOkC6Izhkg4H8+F51W/8CYDg==", + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.16.0.tgz", + "integrity": "sha512-Ju2RCU2dQMgSKtArPbEtsK5gNLnsQyTNIo/T7cZNp96niC1x0KdJNZV0TIoilceBPQwfb5itrGl8pkFeOUMl4A==", "devOptional": true, "engines": { "node": ">=10" @@ -8740,21 +11283,21 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true, "engines": { "node": ">= 0.4" @@ -8776,12 +11319,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -8791,9 +11334,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { "function-bind": "^1.1.2" }, @@ -8849,6 +11392,11 @@ "tslib": "^2.0.3" } }, + "node_modules/hey-listen": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/hey-listen/-/hey-listen-1.0.8.tgz", + "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==" + }, "node_modules/hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", @@ -8858,15 +11406,15 @@ } }, "node_modules/html-encoding-sniffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", - "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", + "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", "dev": true, "dependencies": { - "whatwg-encoding": "^2.0.0" + "whatwg-encoding": "^3.1.1" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/html-escaper": { @@ -8885,9 +11433,9 @@ } }, "node_modules/http-proxy-agent": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-6.1.1.tgz", - "integrity": "sha512-JRCz+4Whs6yrrIoIlrH+ZTmhrRwtMnmOHsHn8GFEn9O2sVfSE+DAZ3oyyGIKF8tjJEeSJmP89j7aTjVsSqsU0g==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", "dev": true, "dependencies": { "agent-base": "^7.1.0", @@ -8912,9 +11460,9 @@ } }, "node_modules/https-proxy-agent": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-6.2.1.tgz", - "integrity": "sha512-ONsE3+yfZF2caH5+bJlcddtWqNI3Gvs5A38+ngvljxaBiRXRswym2c7yf8UAeFpRFKjFNHIFEHqR/OLAWJzyiA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", "dev": true, "dependencies": { "agent-base": "^7.0.2", @@ -8949,9 +11497,9 @@ } }, "node_modules/iconify-icon": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/iconify-icon/-/iconify-icon-1.0.8.tgz", - "integrity": "sha512-jvbUKHXf8EnGGArmhlP2IG8VqQLFFyTvTqb9LVL2TKTh7/eCCD1o2HHE9thpbJJb6B8hzhcFb6rOKhvo7reNKA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/iconify-icon/-/iconify-icon-2.0.0.tgz", + "integrity": "sha512-38ArOkxmyD9oDbJBkxaFpE6eZ0K3F9Sk+3x4mWGfjMJaxi3EKrix9Du4iWhgBFT3imKC4FJJE34ur2Rc7Xm+Uw==", "dependencies": { "@iconify/types": "^2.0.0" }, @@ -9119,12 +11667,12 @@ } }, "node_modules/internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", + "es-errors": "^1.3.0", "hasown": "^2.0.0", "side-channel": "^1.0.4" }, @@ -9144,7 +11692,6 @@ "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dev": true, "dependencies": { "loose-envify": "^1.0.0" } @@ -9201,14 +11748,16 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9273,6 +11822,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "dependencies": { + "builtin-modules": "^3.3.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -9308,6 +11872,21 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dev": true, + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", @@ -9439,9 +12018,9 @@ } }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, "engines": { "node": ">= 0.4" @@ -9493,12 +12072,31 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, + "node_modules/is-primitive": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz", + "integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -9537,12 +12135,15 @@ } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -9591,12 +12192,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, "dependencies": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -9698,6 +12299,14 @@ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/isomorphic-ws": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", @@ -9736,10 +12345,24 @@ "node": ">=10" } }, + "node_modules/istanbul-lib-source-maps": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.4.tgz", + "integrity": "sha512-wHOoEsNJTVltaJp8eVkm8w+GVkVNHT2YDYo53YdzQEL2gWm1hBX5cGFR9hQJtuGLebidVX7et3+dmDZrmclduw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/istanbul-reports": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.6.tgz", - "integrity": "sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, "dependencies": { "html-escaper": "^2.0.0", @@ -9793,18 +12416,18 @@ } }, "node_modules/jose": { - "version": "4.15.5", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", - "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/jose/-/jose-5.2.3.tgz", + "integrity": "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA==", "dev": true, "funding": { "url": "https://github.com/sponsors/panva" } }, "node_modules/jotai": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/jotai/-/jotai-2.6.3.tgz", - "integrity": "sha512-0htSJ2d6426ZdSEYHncJHXY6Lkgde1Hc2HE/ADIRi9d2L3hQL+jLKY1LkWBMeCNyOSlKH8+1u/Gc33Ox0uq21Q==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/jotai/-/jotai-2.7.2.tgz", + "integrity": "sha512-6Ft5kpNu8p93Ssf1Faoza3hYQZRIYp7rioK8MwTTFnbQKwUyZElwquPwl1h6U0uo9hC0jr+ghO3gcSjc6P35/Q==", "engines": { "node": ">=12.20.0" }, @@ -9821,15 +12444,6 @@ } } }, - "node_modules/js-string-escape": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", - "integrity": "sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -9853,43 +12467,38 @@ "dev": true }, "node_modules/jsdom": { - "version": "21.1.2", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-21.1.2.tgz", - "integrity": "sha512-sCpFmK2jv+1sjff4u7fzft+pUh2KSUbUrEHYHyfSIbGTIcmnjyp83qg6qLwdJ/I3LpTXx33ACxeRL7Lsyc6lGQ==", + "version": "24.0.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.0.0.tgz", + "integrity": "sha512-UDS2NayCvmXSXVP6mpTj+73JnNQadZlr9N68189xib2tx5Mls7swlTNao26IoHv46BZJFvXygyRtyXd1feAk1A==", "dev": true, "dependencies": { - "abab": "^2.0.6", - "acorn": "^8.8.2", - "acorn-globals": "^7.0.0", - "cssstyle": "^3.0.0", - "data-urls": "^4.0.0", + "cssstyle": "^4.0.1", + "data-urls": "^5.0.0", "decimal.js": "^10.4.3", - "domexception": "^4.0.0", - "escodegen": "^2.0.0", "form-data": "^4.0.0", - "html-encoding-sniffer": "^3.0.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.1", + "html-encoding-sniffer": "^4.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.2", "is-potential-custom-element-name": "^1.0.1", - "nwsapi": "^2.2.4", + "nwsapi": "^2.2.7", "parse5": "^7.1.2", "rrweb-cssom": "^0.6.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", - "tough-cookie": "^4.1.2", - "w3c-xmlserializer": "^4.0.0", + "tough-cookie": "^4.1.3", + "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", - "whatwg-encoding": "^2.0.0", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.1", - "ws": "^8.13.0", - "xml-name-validator": "^4.0.0" + "whatwg-encoding": "^3.1.1", + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0", + "ws": "^8.16.0", + "xml-name-validator": "^5.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" }, "peerDependencies": { - "canvas": "^2.5.0" + "canvas": "^2.11.2" }, "peerDependenciesMeta": { "canvas": { @@ -9897,45 +12506,6 @@ } } }, - "node_modules/jsdom/node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dev": true, - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/jsdom/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/jsdom/node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dev": true, - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", @@ -10125,6 +12695,14 @@ "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lint-staged": { "version": "13.3.0", "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.3.0.tgz", @@ -10598,10 +13176,14 @@ } }, "node_modules/local-pkg": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.3.tgz", - "integrity": "sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz", + "integrity": "sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==", "dev": true, + "dependencies": { + "mlly": "^1.4.2", + "pkg-types": "^1.0.3" + }, "engines": { "node": ">=14" }, @@ -10629,6 +13211,11 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" + }, "node_modules/lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -10641,6 +13228,12 @@ "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", "dev": true }, + "node_modules/lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true + }, "node_modules/log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", @@ -10756,9 +13349,9 @@ } }, "node_modules/magic-string": { - "version": "0.30.5", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.5.tgz", - "integrity": "sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==", + "version": "0.30.8", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.8.tgz", + "integrity": "sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==", "dev": true, "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" @@ -10767,6 +13360,17 @@ "node": ">=12" } }, + "node_modules/magicast": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.3.tgz", + "integrity": "sha512-ZbrP1Qxnpoes8sz47AM0z08U+jW6TyRgZzcWy3Ma3vDhJttwMwAFDMMQFobwdBxByBD46JYmxRzeF7w2+wJEuw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.23.6", + "@babel/types": "^7.23.6", + "source-map-js": "^1.0.2" + } + }, "node_modules/make-dir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", @@ -10795,9 +13399,9 @@ } }, "node_modules/make-dir/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "dependencies": { "lru-cache": "^6.0.0" @@ -10830,6 +13434,29 @@ "node": ">=0.10.0" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/markdown-it/node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/markdown-table": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz", @@ -10839,18 +13466,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/md5-hex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-3.0.1.tgz", - "integrity": "sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==", - "dev": true, - "dependencies": { - "blueimp-md5": "^2.10.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/mdast-util-find-and-replace": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", @@ -11117,6 +13732,16 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==" + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -11135,7 +13760,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/meros/-/meros-1.3.0.tgz", "integrity": "sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w==", - "dev": true, "engines": { "node": ">=13" }, @@ -11762,9 +14386,9 @@ } }, "node_modules/mlly": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.5.0.tgz", - "integrity": "sha512-NPVQvAY1xr1QoVeG0cy8yUYC7FQcOx6evl/RjT1wL5FvzPnzOysoqB/jmx/DhssT2dYa8nxECLAaFI/+gVLhDQ==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.6.1.tgz", + "integrity": "sha512-vLgaHvaeunuOXHSmEbZ9izxPx3USsk8KCQ8iC+aTlp5sKRSoZvwhHh5L9VbKSaVC6sJDqbyohIS76E2VmHIPAA==", "dev": true, "dependencies": { "acorn": "^8.11.3", @@ -11826,12 +14450,6 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, - "node_modules/natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, "node_modules/neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", @@ -11851,6 +14469,8 @@ "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.0.tgz", "integrity": "sha512-mNcltoe1R8o7STTegSOHdnJNN7s5EUvhoS7ShnTHDyOSd+8H+UdWODq6qSv67PjC8Zc5JRT8+oLAMCr0SIXw7g==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": "^16 || ^18 || >= 20" } @@ -11932,11 +14552,21 @@ "node": ">=8" } }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, "node_modules/nullthrows": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", - "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==", - "dev": true + "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==" }, "node_modules/nwsapi": { "version": "2.2.7", @@ -12013,28 +14643,29 @@ } }, "node_modules/object.entries": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.7.tgz", - "integrity": "sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", + "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" } }, "node_modules/object.fromentries": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", - "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -12044,39 +14675,45 @@ } }, "node_modules/object.groupby": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", - "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/object.hasown": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.3.tgz", - "integrity": "sha512-fFI4VcYpRHvSLXxP7yiZOMAd331cPfd2p7PFDVbgUsYOfCT3tICVqXWngbjr4m49OvsBwUBQ6O2uQoJvy3RexA==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", + "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", "dev": true, "dependencies": { - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/object.values": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", - "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", + "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -12552,12 +15189,12 @@ } }, "node_modules/playwright": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.41.1.tgz", - "integrity": "sha512-gdZAWG97oUnbBdRL3GuBvX3nDDmUOuqzV/D24dytqlKt+eI5KbwusluZRGljx1YoJKZ2NRPaeWiFTeGZO7SosQ==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", + "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", "dev": true, "dependencies": { - "playwright-core": "1.41.1" + "playwright-core": "1.42.1" }, "bin": { "playwright": "cli.js" @@ -12570,9 +15207,9 @@ } }, "node_modules/playwright-core": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.41.1.tgz", - "integrity": "sha512-/KPO5DzXSMlxSX77wy+HihKGOunh3hqndhqeo/nMxfigiKzogn8kfL0ZBDu0L1RKgan5XHCPmn6zXd2NUJgjhg==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", + "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", "dev": true, "bin": { "playwright-core": "cli.js" @@ -12595,10 +15232,30 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, + "node_modules/popmotion": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz", + "integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==", + "dependencies": { + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postcss": { - "version": "8.4.33", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.33.tgz", - "integrity": "sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg==", + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", "funding": [ { "type": "opencollective", @@ -12616,7 +15273,7 @@ "dependencies": { "nanoid": "^3.3.7", "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "source-map-js": "^1.2.0" }, "engines": { "node": "^10 || ^12 || >=14" @@ -12698,14 +15355,6 @@ "node": ">=14" } }, - "node_modules/postcss-load-config/node_modules/yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", - "engines": { - "node": ">= 14" - } - }, "node_modules/postcss-nested": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.1.tgz", @@ -12998,17 +15647,16 @@ } }, "node_modules/query-string": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", - "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-9.0.0.tgz", + "integrity": "sha512-4EWwcRGsO2H+yzq6ddHcVqkCQ2EFUSfDMEjF8ryp8ReymyZhIuaFRGLomeOQLkrzacMHoyky2HW0Qe30UbzkKw==", "dependencies": { - "decode-uri-component": "^0.2.2", - "filter-obj": "^1.1.0", - "split-on-first": "^1.0.0", - "strict-uri-encode": "^2.0.0" + "decode-uri-component": "^0.4.1", + "filter-obj": "^5.1.0", + "split-on-first": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -13040,9 +15688,9 @@ ] }, "node_modules/ramda": { - "version": "0.28.0", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.28.0.tgz", - "integrity": "sha512-9QnLuG/kPVgWvMQ4aODhsBUFKOUmnbUnsSXACv+NCQZcHbeb+v8Lodp8OVxtRULN1/xOyYLLaL6npE6dMq5QTA==", + "version": "0.29.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.29.1.tgz", + "integrity": "sha512-OfxIeWzd4xdUNxlWhgFazxsA/nl3mS4/jGZI5n00uWOoSSFRhC1b6gl6xvmzUamgmqELraWp0J/qqVlXYPDPyA==", "funding": { "type": "opencollective", "url": "https://opencollective.com/ramda" @@ -13059,6 +15707,17 @@ "node": ">=0.10.0" } }, + "node_modules/react-accessible-treeview": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/react-accessible-treeview/-/react-accessible-treeview-2.8.3.tgz", + "integrity": "sha512-taDTIYZ6p96/zIhJBUKvyGTXcInudatP/9fwKG0BW+VRf1PmU5hOT2FkDovDKzSwj2VSOj1PRx+E6ojhOA+2xA==", + "peerDependencies": { + "classnames": "^2.2.6", + "prop-types": "^15.7.2", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, "node_modules/react-datepicker": { "version": "4.25.0", "resolved": "https://registry.npmjs.org/react-datepicker/-/react-datepicker-4.25.0.tgz", @@ -13076,6 +15735,21 @@ "react-dom": "^16.9.0 || ^17 || ^18" } }, + "node_modules/react-datepicker/node_modules/date-fns": { + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", + "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "dependencies": { + "@babel/runtime": "^7.21.0" + }, + "engines": { + "node": ">=0.11" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/date-fns" + } + }, "node_modules/react-diff-view": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/react-diff-view/-/react-diff-view-3.2.0.tgz", @@ -13110,12 +15784,11 @@ "integrity": "sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==" }, "node_modules/react-hook-form": { - "version": "7.49.3", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.49.3.tgz", - "integrity": "sha512-foD6r3juidAT1cOZzpmD/gOKt7fRsDhXXZ0y28+Al1CHgX+AY1qIN9VSIIItXRq1dN68QrRwl1ORFlwjBaAqeQ==", + "version": "7.51.2", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.51.2.tgz", + "integrity": "sha512-y++lwaWjtzDt/XNnyGDQy6goHskFualmDlf+jzEZvjvz6KWDf7EboL7pUvRCzPTJd0EOPpdekYaQLEvvG6m6HA==", "engines": { - "node": ">=18", - "pnpm": "8" + "node": ">=12.22.0" }, "funding": { "type": "opencollective", @@ -13210,12 +15883,57 @@ "node": ">=0.10.0" } }, + "node_modules/react-remove-scroll": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", + "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==", + "dependencies": { + "react-remove-scroll-bar": "^2.3.3", + "react-style-singleton": "^2.2.1", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.0", + "use-sidecar": "^1.1.2" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", + "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", + "dependencies": { + "react-style-singleton": "^2.2.1", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/react-router": { - "version": "6.21.3", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.21.3.tgz", - "integrity": "sha512-a0H638ZXULv1OdkmiK6s6itNhoy33ywxmUFT/xtSoVyf9VnC7n7+VT4LjVzdIHSaF5TIh9ylUgxMXksHTgGrKg==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.3.tgz", + "integrity": "sha512-dr2eb3Mj5zK2YISHK++foM9w4eBnO23eKnZEDs7c880P6oKbrjz/Svg9+nxqtHQK+oMW4OtjZca0RqPglXxguQ==", "dependencies": { - "@remix-run/router": "1.14.2" + "@remix-run/router": "1.15.3" }, "engines": { "node": ">=14.0.0" @@ -13225,12 +15943,12 @@ } }, "node_modules/react-router-dom": { - "version": "6.21.3", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.21.3.tgz", - "integrity": "sha512-kNzubk7n4YHSrErzjLK72j0B5i969GsuCGazRl3G6j1zqZBLjuSlYBdVdkDOgzGdPIffUOc9nmgiadTEVoq91g==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.22.3.tgz", + "integrity": "sha512-7ZILI7HjcE+p31oQvwbokjk6OA/bnFxrhJ19n82Ex9Ph8fNAq+Hm/7KchpMGlTgWhUxRHMMCut+vEtNpWpowKw==", "dependencies": { - "@remix-run/router": "1.14.2", - "react-router": "6.21.3" + "@remix-run/router": "1.15.3", + "react-router": "6.22.3" }, "engines": { "node": ">=14.0.0" @@ -13276,6 +15994,28 @@ "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, + "node_modules/react-style-singleton": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", + "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", + "dependencies": { + "get-nonce": "^1.0.0", + "invariant": "^2.2.4", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/react-test-renderer": { "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-test-renderer/-/react-test-renderer-18.2.0.tgz", @@ -13365,9 +16105,9 @@ } }, "node_modules/recharts": { - "version": "2.12.2", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.2.tgz", - "integrity": "sha512-9bpxjXSF5g81YsKkTSlaX7mM4b6oYI1mIYck6YkUcWuL3tomADccI51/6thY4LmvhYuRTwpfrOvE80Zc3oBRfQ==", + "version": "2.12.3", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.3.tgz", + "integrity": "sha512-vE/F7wTlokf5mtCqVDJlVKelCjliLSJ+DJxj79XlMREm7gpV7ljwbrwE3CfeaoDlOaLX+6iwHaVRn9587YkwIg==", "dependencies": { "clsx": "^2.0.0", "eventemitter3": "^4.0.1", @@ -13400,15 +16140,16 @@ "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, "node_modules/reflect.getprototypeof": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", - "integrity": "sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", + "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", "which-builtin-type": "^1.1.3" }, @@ -13419,20 +16160,45 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" + }, + "node_modules/regenerate-unicode-properties": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz", + "integrity": "sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==", + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/regenerator-runtime": { "version": "0.14.1", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, + "node_modules/regenerator-transform": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", + "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", + "dependencies": { + "@babel/runtime": "^7.8.4" + } + }, "node_modules/regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -13441,22 +16207,45 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, + "node_modules/regexpu-core": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", + "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", + "dependencies": { + "@babel/regjsgen": "^0.8.0", + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsparser": "^0.9.1", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.1.0" + }, "engines": { - "node": ">=8" + "node": ">=4" + } + }, + "node_modules/regjsparser": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", + "dependencies": { + "jsesc": "~0.5.0" }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "bin": { + "jsesc": "bin/jsesc" } }, "node_modules/rehackt": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/rehackt/-/rehackt-0.0.3.tgz", - "integrity": "sha512-aBRHudKhOWwsTvCbSoinzq+Lej/7R8e8UoPvLZo5HirZIIBLGAgdG7SL9QpdcBoQ7+3QYPi3lRLknAzXBlhZ7g==", + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/rehackt/-/rehackt-0.0.6.tgz", + "integrity": "sha512-l3WEzkt4ntlEc/IB3/mF6SRgNHA6zfQR7BlGOgBTOmx7IJJXojDASav+NsgXHFjHn+6RmwqsGPFgZpabWpeOdw==", "peerDependencies": { "@types/react": "*", "react": "*" @@ -13619,6 +16408,15 @@ "node": ">=8" } }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, "node_modules/response-iterator": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/response-iterator/-/response-iterator-0.2.6.tgz", @@ -13671,17 +16469,35 @@ } }, "node_modules/rollup": { - "version": "3.29.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", - "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.14.0.tgz", + "integrity": "sha512-Qe7w62TyawbDzB4yt32R0+AbIo6m1/sqO7UPzFS8Z/ksL5mrfhA0v4CavfdmFav3D+ub4QeAgsGEe84DoWe/nQ==", + "dependencies": { + "@types/estree": "1.0.5" + }, "bin": { "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=14.18.0", + "node": ">=18.0.0", "npm": ">=8.0.0" }, "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.14.0", + "@rollup/rollup-android-arm64": "4.14.0", + "@rollup/rollup-darwin-arm64": "4.14.0", + "@rollup/rollup-darwin-x64": "4.14.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.14.0", + "@rollup/rollup-linux-arm64-gnu": "4.14.0", + "@rollup/rollup-linux-arm64-musl": "4.14.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.14.0", + "@rollup/rollup-linux-riscv64-gnu": "4.14.0", + "@rollup/rollup-linux-s390x-gnu": "4.14.0", + "@rollup/rollup-linux-x64-gnu": "4.14.0", + "@rollup/rollup-linux-x64-musl": "4.14.0", + "@rollup/rollup-win32-arm64-msvc": "4.14.0", + "@rollup/rollup-win32-ia32-msvc": "4.14.0", + "@rollup/rollup-win32-x64-msvc": "4.14.0", "fsevents": "~2.3.2" } }, @@ -13732,13 +16548,13 @@ } }, "node_modules/safe-array-concat": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.0.tgz", - "integrity": "sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dev": true, "dependencies": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" }, @@ -13770,13 +16586,13 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.2.tgz", - "integrity": "sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, "dependencies": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, "engines": { @@ -13849,35 +16665,54 @@ "dev": true }, "node_modules/set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, "dependencies": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" } }, + "node_modules/set-value": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz", + "integrity": "sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw==", + "funding": [ + "https://github.com/sponsors/jonschlinkert", + "https://paypal.me/jonathanschlinkert", + "https://jonschlinkert.dev/sponsor" + ], + "dependencies": { + "is-plain-object": "^2.0.4", + "is-primitive": "^3.0.1" + }, + "engines": { + "node": ">=11.0" + } + }, "node_modules/setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", @@ -13930,14 +16765,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -14002,9 +16841,9 @@ } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "engines": { "node": ">=0.10.0" } @@ -14019,11 +16858,14 @@ } }, "node_modules/split-on-first": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", - "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-3.0.0.tgz", + "integrity": "sha512-qxQJTx2ryR0Dw0ITYyekNQWpz6f8dGd7vffGNflQQ3Iqj9NJ6qiZ7ELpZsJ/QBhIVAiDfXdag3+Gp8RvWa62AA==", "engines": { - "node": ">=6" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/sponge-case": { @@ -14093,14 +16935,6 @@ "node": ">=10.0.0" } }, - "node_modules/strict-uri-encode": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", - "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==", - "engines": { - "node": ">=4" - } - }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -14153,34 +16987,41 @@ } }, "node_modules/string.prototype.matchall": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz", - "integrity": "sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ==", + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", + "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "regexp.prototype.flags": "^1.5.0", - "set-function-name": "^2.0.0", - "side-channel": "^1.0.4" + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trim": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", - "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -14190,28 +17031,31 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", - "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", - "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -14284,17 +17128,23 @@ } }, "node_modules/strip-literal": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-1.3.0.tgz", - "integrity": "sha512-PugKzOsyXpArk0yWmUwqOZecSO0GH0bPoctLcqNDH9J04pVW3lflYE0ujElBGTloevcxF5MofAOZ7C5l2b+wLg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", + "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", "dev": true, "dependencies": { - "acorn": "^8.10.0" + "js-tokens": "^9.0.0" }, "funding": { "url": "https://github.com/sponsors/antfu" } }, + "node_modules/strip-literal/node_modules/js-tokens": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", + "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", + "dev": true + }, "node_modules/style-mod": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.0.tgz", @@ -14308,6 +17158,15 @@ "inline-style-parser": "0.2.2" } }, + "node_modules/style-value-types": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", + "integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==", + "dependencies": { + "hey-listen": "^1.0.8", + "tslib": "^2.1.0" + } + }, "node_modules/subscriptions-transport-ws": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/subscriptions-transport-ws/-/subscriptions-transport-ws-0.11.0.tgz", @@ -14483,6 +17342,38 @@ "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, + "node_modules/svgo": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", + "integrity": "sha512-4PP6CMW/V7l/GmKRKzsLR8xxjdHTV4IMvhTnpuHwwBazSIlw5W/5SmPjN8Dwyt7lKbSJrRDgp4t9ph0HgChFBQ==", + "dependencies": { + "@trysound/sax": "0.2.0", + "commander": "^7.2.0", + "css-select": "^5.1.0", + "css-tree": "^2.3.1", + "css-what": "^6.1.0", + "csso": "^5.0.5", + "picocolors": "^1.0.0" + }, + "bin": { + "svgo": "bin/svgo" + }, + "engines": { + "node": ">=14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/svgo" + } + }, + "node_modules/svgo/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "engines": { + "node": ">= 10" + } + }, "node_modules/swap-case": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/swap-case/-/swap-case-2.0.2.tgz", @@ -14507,11 +17398,11 @@ "dev": true }, "node_modules/tailwind-merge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.2.1.tgz", - "integrity": "sha512-o+2GTLkthfa5YUt4JxPfzMIpQzZ3adD1vLVkvKE1Twl9UAhGsEbIZhHHZVRttyW177S8PDJI3bTQNaebyofK3Q==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.2.2.tgz", + "integrity": "sha512-tWANXsnmJzgw6mQ07nE3aCDkCK4QdT3ThPMCzawoYA2Pws7vSTCvz3Vrjg61jVUGfFZPJzxEP+NimbcW+EdaDw==", "dependencies": { - "@babel/runtime": "^7.23.7" + "@babel/runtime": "^7.24.0" }, "funding": { "type": "github", @@ -14519,9 +17410,9 @@ } }, "node_modules/tailwindcss": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.1.tgz", - "integrity": "sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz", + "integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==", "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -14531,7 +17422,7 @@ "fast-glob": "^3.3.0", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", - "jiti": "^1.19.1", + "jiti": "^1.21.0", "lilconfig": "^2.1.0", "micromatch": "^4.0.5", "normalize-path": "^3.0.0", @@ -14614,19 +17505,10 @@ } }, "node_modules/through": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", - "dev": true - }, - "node_modules/time-zone": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/time-zone/-/time-zone-1.0.0.tgz", - "integrity": "sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==", - "dev": true, - "engines": { - "node": ">=4" - } + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", + "dev": true }, "node_modules/tiny-invariant": { "version": "1.3.3", @@ -14640,18 +17522,18 @@ "dev": true }, "node_modules/tinypool": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.5.0.tgz", - "integrity": "sha512-paHQtnrlS1QZYKF/GnLoOM/DN9fqaGOFbCbxzAhwniySnzl9Ebk8w73/dd34DAhe/obUbPAOldTyYXQZxnPBPQ==", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.8.3.tgz", + "integrity": "sha512-Ud7uepAklqRH1bvwy22ynrliC7Dljz7Tm8M/0RBUW+YRa4YHhZ6e4PpgE+fu1zr/WqB1kbeuVrdfeuyIBpy4tw==", "dev": true, "engines": { "node": ">=14.0.0" } }, "node_modules/tinyspy": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.0.tgz", - "integrity": "sha512-d2eda04AN/cPOR89F7Xv5bK/jrQEhmcLFe6HFldoeO9AJtps+fqEnh486vnT/8y4bw38pSyxDcTCAq+Ks2aJTg==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.1.tgz", + "integrity": "sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==", "dev": true, "engines": { "node": ">=14.0.0" @@ -14697,6 +17579,11 @@ "node": ">=8.0" } }, + "node_modules/toggle-selection": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz", + "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" + }, "node_modules/tough-cookie": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", @@ -14731,15 +17618,15 @@ } }, "node_modules/tr46": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-4.1.1.tgz", - "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz", + "integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==", "dev": true, "dependencies": { - "punycode": "^2.3.0" + "punycode": "^2.3.1" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/tr46/node_modules/punycode": { @@ -14769,6 +17656,18 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, "node_modules/ts-interface-checker": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", @@ -14841,11 +17740,30 @@ "devOptional": true }, "node_modules/ts-toolbelt": { - "version": "6.15.5", - "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-6.15.5.tgz", - "integrity": "sha512-FZIXf1ksVyLcfr7M317jbB67XFJhOO1YqdTcuGaq9q5jLUoTikukZ+98TPjKiP2jC5CgmYdWWYs0s2nLSU0/1A==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-9.6.0.tgz", + "integrity": "sha512-nsZd8ZeNUzukXPlJmTBwUAuABDe/9qtVDelJeT/qW0ow3ZS3BsQJtNkan1802aM9Uf68/Y8ljw86Hu0h5IUW3w==", "dev": true }, + "node_modules/tsconfck": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.0.3.tgz", + "integrity": "sha512-4t0noZX9t6GcPTfBAbIbbIU4pfpCwh0ueq3S4O/5qXI1VwK1outmxhe9dOiEWqMz3MW2LKgDTpqWV+37IWuVbA==", + "bin": { + "tsconfck": "bin/tsconfck.js" + }, + "engines": { + "node": "^18 || >=20" + }, + "peerDependencies": { + "typescript": "^5.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/tsconfig-paths": { "version": "3.15.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", @@ -14875,27 +17793,6 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "dependencies": { - "tslib": "^1.8.1" - }, - "engines": { - "node": ">= 6" - }, - "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - } - }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - }, "node_modules/tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -14948,29 +17845,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -14980,16 +17878,17 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -14999,30 +17898,71 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/types-ramda": { + "version": "0.29.10", + "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", + "integrity": "sha512-5PJiW/eiTPyXXBYGZOYGezMl6qj7keBiZheRwfjJZY26QPHsNrjfJnz0mru6oeqqoTHOni893Jfd6zyUXfQRWg==", + "dev": true, + "dependencies": { + "ts-toolbelt": "^9.6.0" + } + }, "node_modules/typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", + "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { - "node": ">=4.2.0" + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-7.5.0.tgz", + "integrity": "sha512-eKhF39LRi2xYvvXh3h3S+mCxC01dZTIZBlka25o39i81VeQG+OZyfC4i2GEDspNclMRdXkg9uGhmvWMhjph2XQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/eslint-plugin": "7.5.0", + "@typescript-eslint/parser": "7.5.0", + "@typescript-eslint/utils": "7.5.0" + }, + "engines": { + "node": "^18.18.0 || >=20.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.56.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, "node_modules/ua-parser-js": { @@ -15048,10 +17988,15 @@ "node": "*" } }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + }, "node_modules/ufo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.3.2.tgz", - "integrity": "sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz", + "integrity": "sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==", "dev": true }, "node_modules/uglify-js": { @@ -15091,9 +18036,9 @@ } }, "node_modules/undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dev": true, "dependencies": { "@fastify/busboy": "^2.0.0" @@ -15102,6 +18047,48 @@ "node": ">=14.0" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "devOptional": true + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz", + "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", + "engines": { + "node": ">=4" + } + }, "node_modules/unidiff": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/unidiff/-/unidiff-1.0.4.tgz", @@ -15335,6 +18322,26 @@ "integrity": "sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==", "dev": true }, + "node_modules/use-callback-ref": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", + "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/use-query-params": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/use-query-params/-/use-query-params-2.2.1.tgz", @@ -15357,6 +18364,27 @@ } } }, + "node_modules/use-sidecar": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", + "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.9.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -15463,28 +18491,28 @@ } }, "node_modules/vite": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz", - "integrity": "sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==", + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.8.tgz", + "integrity": "sha512-OyZR+c1CE8yeHw5V5t59aXsUPPVTHMDjEZz8MgguLL/Q7NblxhZUlTu9xSPqlsUO/y+X7dlU05jdhvyycD55DA==", "dependencies": { - "esbuild": "^0.18.10", - "postcss": "^8.4.27", - "rollup": "^3.27.1" + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^14.18.0 || >=16.0.0" + "node": "^18.0.0 || >=20.0.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" }, "optionalDependencies": { - "fsevents": "~2.3.2" + "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": ">= 14", + "@types/node": "^18.0.0 || >=20.0.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", @@ -15517,49 +18545,35 @@ } }, "node_modules/vite-node": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-0.31.4.tgz", - "integrity": "sha512-uzL377GjJtTbuc5KQxVbDu2xfU/x0wVjUtXQR2ihS21q/NK6ROr4oG0rsSkBBddZUVCwzfx22in76/0ZZHXgkQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.4.0.tgz", + "integrity": "sha512-VZDAseqjrHgNd4Kh8icYHWzTKSCZMhia7GyHfhtzLW33fZlG9SwsB6CEhgyVOWkJfJ2pFLrp/Gj1FSfAiqH9Lw==", "dev": true, "dependencies": { "cac": "^6.7.14", "debug": "^4.3.4", - "mlly": "^1.2.0", - "pathe": "^1.1.0", + "pathe": "^1.1.1", "picocolors": "^1.0.0", - "vite": "^3.0.0 || ^4.0.0" + "vite": "^5.0.0" }, "bin": { "vite-node": "vite-node.mjs" }, "engines": { - "node": ">=v14.18.0" + "node": "^18.0.0 || >=20.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, - "node_modules/vite-plugin-svgr": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.3.0.tgz", - "integrity": "sha512-vWZMCcGNdPqgziYFKQ3Y95XP0d0YGp28+MM3Dp9cTa/px5CKcHHrIoPl2Jw81rgVm6/ZUNONzjXbZQZ7Kw66og==", - "dependencies": { - "@rollup/pluginutils": "^5.0.4", - "@svgr/core": "^8.1.0", - "@svgr/plugin-jsx": "^8.1.0" - }, - "peerDependencies": { - "vite": "^2.6.0 || 3 || 4" - } - }, "node_modules/vite-tsconfig-paths": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.1.tgz", - "integrity": "sha512-cfgJwcGOsIxXOLU/nELPny2/LUD/lcf1IbfyeKTv2bsupVbTH/xpFtdQlBmIP1GEK2CjjLxYhFfB+QODFAx5aw==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.2.tgz", + "integrity": "sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==", "dependencies": { "debug": "^4.1.1", "globrex": "^0.1.2", - "tsconfck": "^3.0.1" + "tsconfck": "^3.0.3" }, "peerDependencies": { "vite": "*" @@ -15570,132 +18584,225 @@ } } }, - "node_modules/vite-tsconfig-paths/node_modules/tsconfck": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.0.1.tgz", - "integrity": "sha512-7ppiBlF3UEddCLeI1JRx5m2Ryq+xk4JrZuq4EuYXykipebaq1dV0Fhgr1hb7CkmHt32QSgOZlcqVLEtHBG4/mg==", + "node_modules/vitest": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-1.4.0.tgz", + "integrity": "sha512-gujzn0g7fmwf83/WzrDTnncZt2UiXP41mHuFYFrdwaLRVQ6JYQEiME2IfEjU3vcFL3VKa75XhI3lFgn+hfVsQw==", + "dev": true, + "dependencies": { + "@vitest/expect": "1.4.0", + "@vitest/runner": "1.4.0", + "@vitest/snapshot": "1.4.0", + "@vitest/spy": "1.4.0", + "@vitest/utils": "1.4.0", + "acorn-walk": "^8.3.2", + "chai": "^4.3.10", + "debug": "^4.3.4", + "execa": "^8.0.1", + "local-pkg": "^0.5.0", + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "picocolors": "^1.0.0", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "tinybench": "^2.5.1", + "tinypool": "^0.8.2", + "vite": "^5.0.0", + "vite-node": "1.4.0", + "why-is-node-running": "^2.2.2" + }, "bin": { - "tsconfck": "bin/tsconfck.js" + "vitest": "vitest.mjs" }, "engines": { - "node": "^18 || >=20" + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "typescript": "^5.0.0" + "@edge-runtime/vm": "*", + "@types/node": "^18.0.0 || >=20.0.0", + "@vitest/browser": "1.4.0", + "@vitest/ui": "1.4.0", + "happy-dom": "*", + "jsdom": "*" }, "peerDependenciesMeta": { - "typescript": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { "optional": true } } }, - "node_modules/vite-tsconfig-paths/node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", - "optional": true, - "peer": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" + "node_modules/vitest/node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/vitest/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/vitest/node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" }, "engines": { - "node": ">=14.17" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/vitest": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.31.4.tgz", - "integrity": "sha512-GoV0VQPmWrUFOZSg3RpQAPN+LPmHg2/gxlMNJlyxJihkz6qReHDV6b0pPDcqFLNEPya4tWJ1pgwUNP9MLmUfvQ==", + "node_modules/vitest/node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", "dev": true, "dependencies": { - "@types/chai": "^4.3.5", - "@types/chai-subset": "^1.3.3", - "@types/node": "*", - "@vitest/expect": "0.31.4", - "@vitest/runner": "0.31.4", - "@vitest/snapshot": "0.31.4", - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "acorn": "^8.8.2", - "acorn-walk": "^8.2.0", - "cac": "^6.7.14", - "chai": "^4.3.7", - "concordance": "^5.0.4", - "debug": "^4.3.4", - "local-pkg": "^0.4.3", - "magic-string": "^0.30.0", - "pathe": "^1.1.0", - "picocolors": "^1.0.0", - "std-env": "^3.3.2", - "strip-literal": "^1.0.1", - "tinybench": "^2.5.0", - "tinypool": "^0.5.0", - "vite": "^3.0.0 || ^4.0.0", - "vite-node": "0.31.4", - "why-is-node-running": "^2.2.2" + "mimic-fn": "^4.0.0" }, - "bin": { - "vitest": "vitest.mjs" + "engines": { + "node": ">=12" }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, "engines": { - "node": ">=v14.18.0" + "node": ">=12" }, "funding": { - "url": "https://opencollective.com/vitest" + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/vitest/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@vitest/browser": "*", - "@vitest/ui": "*", - "happy-dom": "*", - "jsdom": "*", - "playwright": "*", - "safaridriver": "*", - "webdriverio": "*" + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/vitest/node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - }, - "playwright": { - "optional": true - }, - "safaridriver": { - "optional": true - }, - "webdriverio": { - "optional": true - } + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" + }, "node_modules/w3c-keyname": { "version": "2.2.8", "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==" }, "node_modules/w3c-xmlserializer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", - "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", "dev": true, "dependencies": { - "xml-name-validator": "^4.0.0" + "xml-name-validator": "^5.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/warning": { @@ -15716,9 +18823,9 @@ } }, "node_modules/web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", "dev": true, "engines": { "node": ">= 8" @@ -15730,9 +18837,9 @@ "integrity": "sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg==" }, "node_modules/webcrypto-core": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.8.tgz", - "integrity": "sha512-eBR98r9nQXTqXt/yDRtInszPMjTaSAMJAFDg2AHsgrnczawT1asx9YNBX6k5p+MekbPF4+s/UJJrr88zsTqkSg==", + "version": "1.7.9", + "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.9.tgz", + "integrity": "sha512-FE+a4PPkOmBbgNDIyRmcHhgXn+2ClRl3JzJdDu/P4+B8y81LqKe6RAsI9b3lAOHe1T1BMkSjsRHTYRikImZnVA==", "dev": true, "dependencies": { "@peculiar/asn1-schema": "^2.3.8", @@ -15751,25 +18858,16 @@ "node": ">=12" } }, - "node_modules/well-known-symbols": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/well-known-symbols/-/well-known-symbols-2.0.0.tgz", - "integrity": "sha512-ZMjC3ho+KXo0BfJb7JgtQ5IBuvnShdlACNkKkdsqBmYw3bPAaJfPeYUo6tLUaT5tG/Gkh7xkpBhKRQ9e7pyg9Q==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/whatwg-encoding": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", - "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", "dev": true, "dependencies": { "iconv-lite": "0.6.3" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/whatwg-encoding/node_modules/iconv-lite": { @@ -15785,25 +18883,25 @@ } }, "node_modules/whatwg-mimetype": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", - "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/whatwg-url": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-12.0.1.tgz", - "integrity": "sha512-Ed/LrqB8EPlGxjS+TrsXcpUond1mhccS3pchLhzSgPCnTimUCKj3IZE75pAs5m6heB2U2TMerKFUXheyHY+VDQ==", + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz", + "integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==", "dev": true, "dependencies": { - "tr46": "^4.1.1", + "tr46": "^5.0.0", "webidl-conversions": "^7.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/which": { @@ -15884,16 +18982,16 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", - "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -15982,12 +19080,12 @@ } }, "node_modules/xml-name-validator": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", - "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", "dev": true, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/xmlchars": { @@ -16011,12 +19109,14 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true, + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", + "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", + "bin": { + "yaml": "bin.mjs" + }, "engines": { - "node": ">= 6" + "node": ">= 14" } }, "node_modules/yaml-ast-parser": { @@ -16043,15 +19143,6 @@ "node": ">=12" } }, - "node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/yargs/node_modules/yargs-parser": { "version": "21.1.1", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", @@ -16137,9 +19228,9 @@ } }, "@apollo/client": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.9.0.tgz", - "integrity": "sha512-M6I7h9UF0MmW/eK1oTzuHewZRZmvNzvw4c7nXhvQnxIk0V0VeJWSmscRGQNtsNmk8WnBmgyV/1KOVNow2aOM8w==", + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.9.10.tgz", + "integrity": "sha512-w8i/Lk1P0vvWZF0Xb00XPonn79/0rgRJ1vopBlVudVuy9QP29/NZXK0rI2xJIN6VrKuEqJZaVGJC+7k23I2sfA==", "requires": { "@graphql-typed-document-node/core": "^3.1.1", "@wry/caches": "^1.0.0", @@ -16149,7 +19240,7 @@ "hoist-non-react-statics": "^3.3.2", "optimism": "^0.18.0", "prop-types": "^15.7.2", - "rehackt": "0.0.3", + "rehackt": "0.0.6", "response-iterator": "^0.2.6", "symbol-observable": "^4.0.0", "ts-invariant": "^0.10.3", @@ -16343,9 +19434,9 @@ } }, "@babel/compat-data": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz", - "integrity": "sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==" + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.1.tgz", + "integrity": "sha512-Pc65opHDliVpRHuKfzI+gSA4zcgr65O4cl64fFJIWEEh8JoHIHh0Oez1Eo8Arz8zq/JhgKodQaxEwUPRtZylVA==" }, "@babel/core": { "version": "7.23.9", @@ -16384,11 +19475,18 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz", "integrity": "sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg==", - "dev": true, "requires": { "@babel/types": "^7.22.5" } }, + "@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz", + "integrity": "sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw==", + "requires": { + "@babel/types": "^7.22.15" + } + }, "@babel/helper-compilation-targets": { "version": "7.23.6", "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz", @@ -16402,22 +19500,43 @@ } }, "@babel/helper-create-class-features-plugin": { - "version": "7.23.10", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.23.10.tgz", - "integrity": "sha512-2XpP2XhkXzgxecPNEEK8Vz8Asj9aRxt08oKOqtiZoqV2UGZ5T+EkyP9sXQ9nwMxBIG34a7jmasVqoMop7VdPUw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.24.1.tgz", + "integrity": "sha512-1yJa9dX9g//V6fDebXoEfEsxkZHk3Hcbm+zLhyu6qVgYFLvmTALTeV+jNU9e5RnYtioBrGEOdoI2joMSNQ/+aA==", "requires": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-function-name": "^7.23.0", "@babel/helper-member-expression-to-functions": "^7.23.0", "@babel/helper-optimise-call-expression": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20", + "@babel/helper-replace-supers": "^7.24.1", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", "semver": "^6.3.1" } }, + "@babel/helper-create-regexp-features-plugin": { + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz", + "integrity": "sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "regexpu-core": "^5.3.1", + "semver": "^6.3.1" + } + }, + "@babel/helper-define-polyfill-provider": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.1.tgz", + "integrity": "sha512-o7SDgTJuvx5vLKD6SFvkydkSMBvahDKGiNJzG22IZYXhiqoe9efY7zocICBgzHV4IRg5wdgl2nEL/tulKIEIbA==", + "requires": { + "@babel/helper-compilation-targets": "^7.22.6", + "@babel/helper-plugin-utils": "^7.22.5", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2" + } + }, "@babel/helper-environment-visitor": { "version": "7.22.20", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", @@ -16444,17 +19563,16 @@ "version": "7.23.0", "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz", "integrity": "sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA==", - "dev": true, "requires": { "@babel/types": "^7.23.0" } }, "@babel/helper-module-imports": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", - "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", + "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", "requires": { - "@babel/types": "^7.22.15" + "@babel/types": "^7.24.0" } }, "@babel/helper-module-transforms": { @@ -16473,24 +19591,32 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz", "integrity": "sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw==", - "dev": true, "requires": { "@babel/types": "^7.22.5" } }, "@babel/helper-plugin-utils": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz", - "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==" + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.0.tgz", + "integrity": "sha512-9cUznXMG0+FxRuJfvL82QlTqIzhVW9sL0KjMPHhAOOvpQGL8QtdxnBKILjBqxlHyliz0yCa1G903ZXI/FuHy2w==" }, - "@babel/helper-replace-supers": { + "@babel/helper-remap-async-to-generator": { "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz", - "integrity": "sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw==", - "dev": true, + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz", + "integrity": "sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-wrap-function": "^7.22.20" + } + }, + "@babel/helper-replace-supers": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.24.1.tgz", + "integrity": "sha512-QCR1UqC9BzG5vZl8BMicmZ28RuUBnHhAMddD8yHFHDRH9lLTZ9uUPehX8ctVPT8l0TKblJidqcgUUKGVrePleQ==", "requires": { "@babel/helper-environment-visitor": "^7.22.20", - "@babel/helper-member-expression-to-functions": "^7.22.15", + "@babel/helper-member-expression-to-functions": "^7.23.0", "@babel/helper-optimise-call-expression": "^7.22.5" } }, @@ -16506,7 +19632,6 @@ "version": "7.22.5", "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz", "integrity": "sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q==", - "dev": true, "requires": { "@babel/types": "^7.22.5" } @@ -16534,6 +19659,16 @@ "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz", "integrity": "sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==" }, + "@babel/helper-wrap-function": { + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz", + "integrity": "sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw==", + "requires": { + "@babel/helper-function-name": "^7.22.5", + "@babel/template": "^7.22.15", + "@babel/types": "^7.22.19" + } + }, "@babel/helpers": { "version": "7.23.9", "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.9.tgz", @@ -16606,9 +19741,36 @@ } }, "@babel/parser": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.9.tgz", - "integrity": "sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==" + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.1.tgz", + "integrity": "sha512-Zo9c7N3xdOIQrNip7Lc9wvRPzlRtovHVE4lkz8WEDr7uYh/GMQhSiIgFxGIArRHYdJE5kxtZjAf8rT0xhdLCzg==" + }, + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.24.1.tgz", + "integrity": "sha512-y4HqEnkelJIOQGd+3g1bTeKsA5c6qM7eOn7VggGVbBc0y8MLSKHacwcIE2PplNlQSj0PqS9rrXL/nkPVK+kUNg==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.1.tgz", + "integrity": "sha512-Hj791Ii4ci8HqnaKHAlLNs+zaLXb0EzSDhiAWp5VNlyvCNymYfacs64pxTxbH1znW/NcArSmwpmG9IKE/TUVVQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/plugin-transform-optional-chaining": "^7.24.1" + } + }, + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.24.1.tgz", + "integrity": "sha512-m9m/fXsXLiHfwdgydIFnpk+7jlVbnvlK5B2EKiPdLUb6WX654ZaaEWJUjk8TftRbZpK0XibovlLWX4KIZhV6jw==", + "requires": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-plugin-utils": "^7.24.0" + } }, "@babel/plugin-proposal-class-properties": { "version": "7.18.6", @@ -16633,215 +19795,554 @@ "@babel/plugin-transform-parameters": "^7.20.7" } }, + "@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "requires": {} + }, + "@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, "@babel/plugin-syntax-class-properties": { "version": "7.12.13", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", - "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.12.13" } }, + "@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } + }, "@babel/plugin-syntax-flow": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.23.3.tgz", - "integrity": "sha512-YZiAIpkJAwQXBJLIQbRFayR5c+gJ35Vcz3bg954k7cd73zqjvhacJuL9RbrzPz8qPmZdgqP6EUKwy0PCNhaaPA==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.24.1.tgz", + "integrity": "sha512-sxi2kLTI5DeW5vDtMUsk4mTPwvlUDbjOnoWayhynCwrw4QXRld4QEYwqzY8JmQXaJUtgUuCIurtSRH5sn4c7mA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" } }, "@babel/plugin-syntax-import-assertions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz", - "integrity": "sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.24.1.tgz", + "integrity": "sha512-IuwnI5XnuF189t91XbxmXeCDz3qs6iDRO7GJ++wcfgeXNs/8FmIlKcpDSXNVyuLQxlwvskmI3Ct73wUODkJBlQ==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-syntax-import-attributes": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.1.tgz", + "integrity": "sha512-zhQTMH0X2nVLnb04tz+s7AMuasX8U0FnpE+nHTOhSOINjWMnopoZTxtIKsd45n4GQ/HIZLyfIpoul8e2m0DnRA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-jsx": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz", - "integrity": "sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.1.tgz", + "integrity": "sha512-2eCtxZXf+kbkMIsXS4poTvT4Yu5rXiRa+9xGVT56raghjmBTKMpFNc9R4IDiB4emao9eO22Ox7CxuJG7BgExqA==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-object-rest-spread": { "version": "7.8.3", "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", - "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.8.0" } }, + "@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "requires": { + "@babel/helper-plugin-utils": "^7.14.5" + } + }, + "@babel/plugin-syntax-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.1.tgz", + "integrity": "sha512-Yhnmvy5HZEnHUty6i++gcfH1/l68AHnItFHnaCv6hn9dNh0hQvvQJsxpi4BMBFN5DLeHBuucT/0DgzXif/OyRw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", + "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + } + }, "@babel/plugin-transform-arrow-functions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.23.3.tgz", - "integrity": "sha512-NzQcQrzaQPkaEwoTm4Mhyl8jI1huEL/WWIEvudjTCMJ9aBZNpsJbMASx7EQECtQQPS/DcnFpo0FIh3LvEO9cxQ==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.1.tgz", + "integrity": "sha512-ngT/3NkRhsaep9ck9uj2Xhv9+xB1zShY3tM3g6om4xxCELwCDN4g4Aq5dRn48+0hasAql7s2hdBOysCfNpr4fw==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-async-generator-functions": { + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.24.3.tgz", + "integrity": "sha512-Qe26CMYVjpQxJ8zxM1340JFNjZaF+ISWpr1Kt/jGo+ZTUzKkfw/pphEWbRCb+lmSM6k/TOgfYLvmbHkUQ0asIg==", + "requires": { + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-remap-async-to-generator": "^7.22.20", + "@babel/plugin-syntax-async-generators": "^7.8.4" + } + }, + "@babel/plugin-transform-async-to-generator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.1.tgz", + "integrity": "sha512-AawPptitRXp1y0n4ilKcGbRYWfbbzFWz2NqNu7dacYDtFtz0CMjG64b3LQsb3KIgnf4/obcUL78hfaOS7iCUfw==", + "requires": { + "@babel/helper-module-imports": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-remap-async-to-generator": "^7.22.20" } }, "@babel/plugin-transform-block-scoped-functions": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.23.3.tgz", - "integrity": "sha512-vI+0sIaPIO6CNuM9Kk5VmXcMVRiOpDh7w2zZt9GXzmE/9KD70CUEVhvPR/etAeNK/FAEkhxQtXOzVF3EuRL41A==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.1.tgz", + "integrity": "sha512-TWWC18OShZutrv9C6mye1xwtam+uNi2bnTOCBUd5sZxyHOiWbU6ztSROofIMrK84uweEZC219POICK/sTYwfgg==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-block-scoping": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.24.1.tgz", + "integrity": "sha512-h71T2QQvDgM2SmT29UYU6ozjMlAt7s7CSs5Hvy8f8cf/GM/Z4a2zMfN+fjVGaieeCrXR3EdQl6C4gQG+OgmbKw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-class-properties": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.24.1.tgz", + "integrity": "sha512-OMLCXi0NqvJfORTaPQBwqLXHhb93wkBKZ4aNwMl6WtehO7ar+cmp+89iPEQPqxAnxsOKTaMcs3POz3rKayJ72g==", + "requires": { + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0" } }, - "@babel/plugin-transform-block-scoping": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.4.tgz", - "integrity": "sha512-0QqbP6B6HOh7/8iNR4CQU2Th/bbRtBp4KS9vcaZd1fZ0wSh5Fyssg0UCIHwxh+ka+pNDREbVLQnHCMHKZfPwfw==", - "dev": true, + "@babel/plugin-transform-class-static-block": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.1.tgz", + "integrity": "sha512-FUHlKCn6J3ERiu8Dv+4eoz7w8+kFLSyeVG4vDAikwADGjUCoHw/JHokyGtr8OR4UjpwPVivyF+h8Q5iv/JmrtA==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-class-static-block": "^7.14.5" } }, "@babel/plugin-transform-classes": { - "version": "7.23.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.23.8.tgz", - "integrity": "sha512-yAYslGsY1bX6Knmg46RjiCiNSwJKv2IUC8qOdYKqMMr0491SXFhcHqOdRDeCRohOOIzwN/90C6mQ9qAKgrP7dg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.24.1.tgz", + "integrity": "sha512-ZTIe3W7UejJd3/3R4p7ScyyOoafetUShSf4kCqV0O7F/RiHxVj/wRaRnQlrGwflvcehNA8M42HkAiEDYZu2F1Q==", "requires": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-compilation-targets": "^7.23.6", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-replace-supers": "^7.24.1", "@babel/helper-split-export-declaration": "^7.22.6", "globals": "^11.1.0" } }, "@babel/plugin-transform-computed-properties": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.23.3.tgz", - "integrity": "sha512-dTj83UVTLw/+nbiHqQSFdwO9CbTtwq1DsDqm3CUEtDrZNET5rT5E6bIdTlOftDTDLMYxvxHNEYO4B9SLl8SLZw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.1.tgz", + "integrity": "sha512-5pJGVIUfJpOS+pAqBQd+QMaTD2vCL/HcePooON6pDpHgRp4gNRmzyHTPIkXntwKsq3ayUFVfJaIKPw2pOkOcTw==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/template": "^7.22.15" + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/template": "^7.24.0" } }, "@babel/plugin-transform-destructuring": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz", - "integrity": "sha512-n225npDqjDIr967cMScVKHXJs7rout1q+tt50inyBCPkyZ8KxeI6d+GIbSBTT/w/9WdlWDOej3V9HE5Lgk57gw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.1.tgz", + "integrity": "sha512-ow8jciWqNxR3RYbSNVuF4U2Jx130nwnBnhRw6N6h1bOejNkABmcI5X5oz29K4alWX7vf1C+o6gtKXikzRKkVdw==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-dotall-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.1.tgz", + "integrity": "sha512-p7uUxgSoZwZ2lPNMzUkqCts3xlp8n+o05ikjy7gbtFJSt9gdU88jAmtfmOxHM14noQXBxfgzf2yRWECiNVhTCw==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-duplicate-keys": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.1.tgz", + "integrity": "sha512-msyzuUnvsjsaSaocV6L7ErfNsa5nDWL1XKNnDePLgmz+WdU4w/J8+AxBMrWfi9m4IxfL5sZQKUPQKDQeeAT6lA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-dynamic-import": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.1.tgz", + "integrity": "sha512-av2gdSTyXcJVdI+8aFZsCAtR29xJt0S5tas+Ef8NvBNmD1a+N/3ecMLeMBgfcK+xzsjdLDT6oHt+DFPyeqUbDA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + } + }, + "@babel/plugin-transform-exponentiation-operator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.1.tgz", + "integrity": "sha512-U1yX13dVBSwS23DEAqU+Z/PkwE9/m7QQy8Y9/+Tdb8UWYaGNDYwTLi19wqIAiROr8sXVum9A/rtiH5H0boUcTw==", + "requires": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-export-namespace-from": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.1.tgz", + "integrity": "sha512-Ft38m/KFOyzKw2UaJFkWG9QnHPG/Q/2SkOrRk4pNBPg5IPZ+dOxcmkK5IyuBcxiNPyyYowPGUReyBvrvZs7IlQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" } }, "@babel/plugin-transform-flow-strip-types": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.23.3.tgz", - "integrity": "sha512-26/pQTf9nQSNVJCrLB1IkHUKyPxR+lMrH2QDPG89+Znu9rAMbtrybdbWeE9bb7gzjmE5iXHEY+e0HUwM6Co93Q==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.24.1.tgz", + "integrity": "sha512-iIYPIWt3dUmUKKE10s3W+jsQ3icFkw0JyRVyY1B7G4yK/nngAOHLVx8xlhA6b/Jzl/Y0nis8gjqhqKtRDQqHWQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/plugin-syntax-flow": "^7.23.3" + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-flow": "^7.24.1" } }, "@babel/plugin-transform-for-of": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.23.6.tgz", - "integrity": "sha512-aYH4ytZ0qSuBbpfhuofbg/e96oQ7U2w1Aw/UQmKT+1l39uEhUPoFS3fHevDc1G0OvewyDudfMKY1OulczHzWIw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.1.tgz", + "integrity": "sha512-OxBdcnF04bpdQdR3i4giHZNZQn7cm8RQKcSwA17wAAqEELo1ZOwp5FFgeptWUQXFyT9kwHo10aqqauYkRZPCAg==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" } }, "@babel/plugin-transform-function-name": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.23.3.tgz", - "integrity": "sha512-I1QXp1LxIvt8yLaib49dRW5Okt7Q4oaxao6tFVKS/anCdEOMtYwWVKoiOA1p34GOWIZjUK0E+zCp7+l1pfQyiw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.24.1.tgz", + "integrity": "sha512-BXmDZpPlh7jwicKArQASrj8n22/w6iymRnvHYYd2zO30DbE277JO20/7yXJT3QxDPtiQiOxQBbZH4TpivNXIxA==", "requires": { - "@babel/helper-compilation-targets": "^7.22.15", + "@babel/helper-compilation-targets": "^7.23.6", "@babel/helper-function-name": "^7.23.0", - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-json-strings": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.1.tgz", + "integrity": "sha512-U7RMFmRvoasscrIFy5xA4gIp8iWnWubnKkKuUGJjsuOH7GfbMkB+XZzeslx2kLdEGdOJDamEmCqOks6e8nv8DQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-json-strings": "^7.8.3" } }, "@babel/plugin-transform-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.23.3.tgz", - "integrity": "sha512-wZ0PIXRxnwZvl9AYpqNUxpZ5BiTGrYt7kueGQ+N5FiQ7RCOD4cm8iShd6S6ggfVIWaJf2EMk8eRzAh52RfP4rQ==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.24.1.tgz", + "integrity": "sha512-zn9pwz8U7nCqOYIiBaOxoQOtYmMODXTJnkxG4AtX8fPmnCRYWBOHD0qcpwS9e2VDSp1zNJYpdnFMIKb8jmwu6g==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-logical-assignment-operators": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.1.tgz", + "integrity": "sha512-OhN6J4Bpz+hIBqItTeWJujDOfNP+unqv/NJgyhlpSqgBTPm37KkMmZV6SYcOj+pnDbdcl1qRGV/ZiIjX9Iy34w==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" } }, "@babel/plugin-transform-member-expression-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.23.3.tgz", - "integrity": "sha512-sC3LdDBDi5x96LA+Ytekz2ZPk8i/Ck+DEuDbRAll5rknJ5XRTSaPKEYwomLcs1AA8wg9b3KjIQRsnApj+q51Ag==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.1.tgz", + "integrity": "sha512-4ojai0KysTWXzHseJKa1XPNXKRbuUrhkOPY4rEGeR+7ChlJVKxFa3H3Bz+7tWaGKgJAXUWKOGmltN+u9B3+CVg==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-modules-amd": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.1.tgz", + "integrity": "sha512-lAxNHi4HVtjnHd5Rxg3D5t99Xm6H7b04hUS7EHIXcUl2EV4yl1gWdqZrNzXnSrHveL9qMdbODlLF55mvgjAfaQ==", + "requires": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0" } }, "@babel/plugin-transform-modules-commonjs": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.3.tgz", - "integrity": "sha512-aVS0F65LKsdNOtcz6FRCpE4OgsP2OFnW46qNxNIX9h3wuzaNcSQsJysuMwqSibC98HPrf2vCgtxKNwS0DAlgcA==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.1.tgz", + "integrity": "sha512-szog8fFTUxBfw0b98gEWPaEqF42ZUD/T3bkynW/wtgx2p/XCP55WEsb+VosKceRSd6njipdZvNogqdtI4Q0chw==", "requires": { "@babel/helper-module-transforms": "^7.23.3", - "@babel/helper-plugin-utils": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0", "@babel/helper-simple-access": "^7.22.5" } }, + "@babel/plugin-transform-modules-systemjs": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.24.1.tgz", + "integrity": "sha512-mqQ3Zh9vFO1Tpmlt8QPnbwGHzNz3lpNEMxQb1kAemn/erstyqw1r9KeOlOfo3y6xAnFEcOv2tSyrXfmMk+/YZA==", + "requires": { + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-identifier": "^7.22.20" + } + }, + "@babel/plugin-transform-modules-umd": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.1.tgz", + "integrity": "sha512-tuA3lpPj+5ITfcCluy6nWonSL7RvaG0AOTeAuvXqEKS34lnLzXpDb0dcP6K8jD0zWZFNDVly90AGFJPnm4fOYg==", + "requires": { + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz", + "integrity": "sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.22.5", + "@babel/helper-plugin-utils": "^7.22.5" + } + }, + "@babel/plugin-transform-new-target": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.1.tgz", + "integrity": "sha512-/rurytBM34hYy0HKZQyA0nHbQgQNFm4Q/BOc9Hflxi2X3twRof7NaE5W46j4kQitm7SvACVRXsa6N/tSZxvPug==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.1.tgz", + "integrity": "sha512-iQ+caew8wRrhCikO5DrUYx0mrmdhkaELgFa+7baMcVuhxIkN7oxt06CZ51D65ugIb1UWRQ8oQe+HXAVM6qHFjw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + } + }, + "@babel/plugin-transform-numeric-separator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.1.tgz", + "integrity": "sha512-7GAsGlK4cNL2OExJH1DzmDeKnRv/LXq0eLUSvudrehVA5Rgg4bIrqEUW29FbKMBRT0ztSqisv7kjP+XIC4ZMNw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + } + }, + "@babel/plugin-transform-object-rest-spread": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.1.tgz", + "integrity": "sha512-XjD5f0YqOtebto4HGISLNfiNMTTs6tbkFf2TOqJlYKYmbo+mN9Dnpl4SRoofiziuOWMIyq3sZEUqLo3hLITFEA==", + "requires": { + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.24.1" + } + }, "@babel/plugin-transform-object-super": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.23.3.tgz", - "integrity": "sha512-BwQ8q0x2JG+3lxCVFohg+KbQM7plfpBwThdW9A6TMtWwLsbDA01Ek2Zb/AgDN39BiZsExm4qrXxjk+P1/fzGrA==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.1.tgz", + "integrity": "sha512-oKJqR3TeI5hSLRxudMjFQ9re9fBVUU0GICqM3J1mi8MqlhVr6hC/ZN4ttAyMuQR6EZZIY6h/exe5swqGNNIkWQ==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5", - "@babel/helper-replace-supers": "^7.22.20" + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-replace-supers": "^7.24.1" + } + }, + "@babel/plugin-transform-optional-catch-binding": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.1.tgz", + "integrity": "sha512-oBTH7oURV4Y+3EUrf6cWn1OHio3qG/PVwO5J03iSJmBg6m2EhKjkAu/xuaXaYwWW9miYtvbWv4LNf0AmR43LUA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + } + }, + "@babel/plugin-transform-optional-chaining": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.1.tgz", + "integrity": "sha512-n03wmDt+987qXwAgcBlnUUivrZBPZ8z1plL0YvgQalLm+ZE5BMhGm94jhxXtA1wzv1Cu2aaOv1BM9vbVttrzSg==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" } }, "@babel/plugin-transform-parameters": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz", - "integrity": "sha512-09lMt6UsUb3/34BbECKVbVwrT9bO6lILWln237z7sLaWnMsTi7Yc9fhX5DLpkJzAGfaReXI22wP41SZmnAA3Vw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.1.tgz", + "integrity": "sha512-8Jl6V24g+Uw5OGPeWNKrKqXPDw2YDjLc53ojwfMcKwlEoETKU9rU0mHUtcg9JntWI/QYzGAXNWEcVHZ+fR+XXg==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-private-methods": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.24.1.tgz", + "integrity": "sha512-tGvisebwBO5em4PaYNqt4fkw56K2VALsAbAakY0FjTYqJp7gfdrgr7YX76Or8/cpik0W6+tj3rZ0uHU9Oil4tw==", + "requires": { + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-private-property-in-object": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.1.tgz", + "integrity": "sha512-pTHxDVa0BpUbvAgX3Gat+7cSciXqUcY9j2VZKTbSB6+VQGpNgNO9ailxTGHSXlqOnX1Hcx1Enme2+yv7VqP9bg==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" } }, "@babel/plugin-transform-property-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.23.3.tgz", - "integrity": "sha512-jR3Jn3y7cZp4oEWPFAlRsSWjxKe4PZILGBSd4nis1TsC5qeSpb+nrtihJuDhNI7QHiVbUaiXa0X2RZY3/TI6Nw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.1.tgz", + "integrity": "sha512-LetvD7CrHmEx0G442gOomRr66d7q8HzzGGr4PMHGr+5YIm6++Yke+jxj246rpvsbyhJwCLxcTn6zW1P1BSenqA==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-react-constant-elements": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.24.1.tgz", + "integrity": "sha512-QXp1U9x0R7tkiGB0FOk8o74jhnap0FlZ5gNkRIWdG3eP+SvMFg118e1zaWewDzgABb106QSKpVsD3Wgd8t6ifA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" } }, "@babel/plugin-transform-react-display-name": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.23.3.tgz", - "integrity": "sha512-GnvhtVfA2OAtzdX58FJxU19rhoGeQzyVndw3GgtdECQvQFXPEZIOVULHVZGAYmOgmqjXpVpfocAbSjh99V/Fqw==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.24.1.tgz", + "integrity": "sha512-mvoQg2f9p2qlpDQRBC7M3c3XTr0k7cp/0+kFKKO/7Gtu0LSw16eKB+Fabe2bDT/UpsyasTBBkAnbdsLrkD5XMw==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" } }, "@babel/plugin-transform-react-jsx": { "version": "7.23.4", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.23.4.tgz", "integrity": "sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA==", - "dev": true, "requires": { "@babel/helper-annotate-as-pure": "^7.22.5", "@babel/helper-module-imports": "^7.22.15", @@ -16850,6 +20351,14 @@ "@babel/types": "^7.23.4" } }, + "@babel/plugin-transform-react-jsx-development": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz", + "integrity": "sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A==", + "requires": { + "@babel/plugin-transform-react-jsx": "^7.22.5" + } + }, "@babel/plugin-transform-react-jsx-self": { "version": "7.23.3", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.23.3.tgz", @@ -16866,50 +20375,262 @@ "@babel/helper-plugin-utils": "^7.22.5" } }, + "@babel/plugin-transform-react-pure-annotations": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.24.1.tgz", + "integrity": "sha512-+pWEAaDJvSm9aFvJNpLiM2+ktl2Sn2U5DdyiWdZBxmLc6+xGt88dvFqsHiAiDS+8WqUwbDfkKz9jRxK3M0k+kA==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-regenerator": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.1.tgz", + "integrity": "sha512-sJwZBCzIBE4t+5Q4IGLaaun5ExVMRY0lYwos/jNecjMrVCygCdph3IKv0tkP5Fc87e/1+bebAmEAGBfnRD+cnw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "regenerator-transform": "^0.15.2" + } + }, + "@babel/plugin-transform-reserved-words": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.1.tgz", + "integrity": "sha512-JAclqStUfIwKN15HrsQADFgeZt+wexNQ0uLhuqvqAUFoqPMjEcFCYZBhq0LUdz6dZK/mD+rErhW71fbx8RYElg==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, "@babel/plugin-transform-shorthand-properties": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.23.3.tgz", - "integrity": "sha512-ED2fgqZLmexWiN+YNFX26fx4gh5qHDhn1O2gvEhreLW2iI63Sqm4llRLCXALKrCnbN4Jy0VcMQZl/SAzqug/jg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.1.tgz", + "integrity": "sha512-LyjVB1nsJ6gTTUKRjRWx9C1s9hE7dLfP/knKdrfeH9UPtAGjYGgxIbFfx7xyLIEWs7Xe1Gnf8EWiUqfjLhInZA==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" } }, "@babel/plugin-transform-spread": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.23.3.tgz", - "integrity": "sha512-VvfVYlrlBVu+77xVTOAoxQ6mZbnIq5FM0aGBSFEcIh03qHf+zNqA4DC/3XMUozTg7bZV3e3mZQ0i13VB6v5yUg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.1.tgz", + "integrity": "sha512-KjmcIM+fxgY+KxPVbjelJC6hrH1CgtPmTvdXAfn3/a9CnWGSTY7nH4zm5+cjmWJybdcPSsD0++QssDsjcpe47g==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5", + "@babel/helper-plugin-utils": "^7.24.0", "@babel/helper-skip-transparent-expression-wrappers": "^7.22.5" } }, + "@babel/plugin-transform-sticky-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.1.tgz", + "integrity": "sha512-9v0f1bRXgPVcPrngOQvLXeGNNVLc8UjMVfebo9ka0WF3/7+aVUHmaJVT3sa0XCzEFioPfPHZiOcYG9qOsH63cw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, "@babel/plugin-transform-template-literals": { - "version": "7.23.3", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.23.3.tgz", - "integrity": "sha512-Flok06AYNp7GV2oJPZZcP9vZdszev6vPBkHLwxwSpaIqx75wn6mUd3UFWsSsA0l8nXAKkyCmL/sR02m8RYGeHg==", - "dev": true, + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.1.tgz", + "integrity": "sha512-WRkhROsNzriarqECASCNu/nojeXCDTE/F2HmRgOzi7NGvyfYGq1NEjKBK3ckLfRgGc6/lPAqP0vDOSw3YtG34g==", "requires": { - "@babel/helper-plugin-utils": "^7.22.5" + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-typeof-symbol": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.1.tgz", + "integrity": "sha512-CBfU4l/A+KruSUoW+vTQthwcAdwuqbpRNB8HQKlZABwHRhsdHZ9fezp4Sn18PeAlYxTNiLMlx4xUBV3AWfg1BA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.24.1.tgz", + "integrity": "sha512-liYSESjX2fZ7JyBFkYG78nfvHlMKE6IpNdTVnxmlYUR+j5ZLsitFbaAE+eJSK2zPPkNWNw4mXL51rQ8WrvdK0w==", + "requires": { + "@babel/helper-annotate-as-pure": "^7.22.5", + "@babel/helper-create-class-features-plugin": "^7.24.1", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/plugin-syntax-typescript": "^7.24.1" + } + }, + "@babel/plugin-transform-unicode-escapes": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.1.tgz", + "integrity": "sha512-RlkVIcWT4TLI96zM660S877E7beKlQw7Ig+wqkKBiWfj0zH5Q4h50q6er4wzZKRNSYpfo6ILJ+hrJAGSX2qcNw==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-unicode-property-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.1.tgz", + "integrity": "sha512-Ss4VvlfYV5huWApFsF8/Sq0oXnGO+jB+rijFEFugTd3cwSObUSnUi88djgR5528Csl0uKlrI331kRqe56Ov2Ng==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-unicode-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.1.tgz", + "integrity": "sha512-2A/94wgZgxfTsiLaQ2E36XAOdcZmGAaEEgVmxQWwZXWkGhvoHbaqXcKnU8zny4ycpu3vNqg0L/PcCiYtHtA13g==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/plugin-transform-unicode-sets-regex": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.24.1.tgz", + "integrity": "sha512-fqj4WuzzS+ukpgerpAoOnMfQXwUHFxXUZUE84oL2Kao2N8uSlvcpnAidKASgsNgzZHBsHWvcm8s9FPWUhAb8fA==", + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.22.15", + "@babel/helper-plugin-utils": "^7.24.0" + } + }, + "@babel/preset-env": { + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.24.3.tgz", + "integrity": "sha512-fSk430k5c2ff8536JcPvPWK4tZDwehWLGlBp0wrsBUjZVdeQV6lePbwKWZaZfK2vnh/1kQX1PzAJWsnBmVgGJA==", + "requires": { + "@babel/compat-data": "^7.24.1", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.24.1", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.1", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.24.1", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.24.1", + "@babel/plugin-syntax-import-attributes": "^7.24.1", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.24.1", + "@babel/plugin-transform-async-generator-functions": "^7.24.3", + "@babel/plugin-transform-async-to-generator": "^7.24.1", + "@babel/plugin-transform-block-scoped-functions": "^7.24.1", + "@babel/plugin-transform-block-scoping": "^7.24.1", + "@babel/plugin-transform-class-properties": "^7.24.1", + "@babel/plugin-transform-class-static-block": "^7.24.1", + "@babel/plugin-transform-classes": "^7.24.1", + "@babel/plugin-transform-computed-properties": "^7.24.1", + "@babel/plugin-transform-destructuring": "^7.24.1", + "@babel/plugin-transform-dotall-regex": "^7.24.1", + "@babel/plugin-transform-duplicate-keys": "^7.24.1", + "@babel/plugin-transform-dynamic-import": "^7.24.1", + "@babel/plugin-transform-exponentiation-operator": "^7.24.1", + "@babel/plugin-transform-export-namespace-from": "^7.24.1", + "@babel/plugin-transform-for-of": "^7.24.1", + "@babel/plugin-transform-function-name": "^7.24.1", + "@babel/plugin-transform-json-strings": "^7.24.1", + "@babel/plugin-transform-literals": "^7.24.1", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.1", + "@babel/plugin-transform-member-expression-literals": "^7.24.1", + "@babel/plugin-transform-modules-amd": "^7.24.1", + "@babel/plugin-transform-modules-commonjs": "^7.24.1", + "@babel/plugin-transform-modules-systemjs": "^7.24.1", + "@babel/plugin-transform-modules-umd": "^7.24.1", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.22.5", + "@babel/plugin-transform-new-target": "^7.24.1", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.1", + "@babel/plugin-transform-numeric-separator": "^7.24.1", + "@babel/plugin-transform-object-rest-spread": "^7.24.1", + "@babel/plugin-transform-object-super": "^7.24.1", + "@babel/plugin-transform-optional-catch-binding": "^7.24.1", + "@babel/plugin-transform-optional-chaining": "^7.24.1", + "@babel/plugin-transform-parameters": "^7.24.1", + "@babel/plugin-transform-private-methods": "^7.24.1", + "@babel/plugin-transform-private-property-in-object": "^7.24.1", + "@babel/plugin-transform-property-literals": "^7.24.1", + "@babel/plugin-transform-regenerator": "^7.24.1", + "@babel/plugin-transform-reserved-words": "^7.24.1", + "@babel/plugin-transform-shorthand-properties": "^7.24.1", + "@babel/plugin-transform-spread": "^7.24.1", + "@babel/plugin-transform-sticky-regex": "^7.24.1", + "@babel/plugin-transform-template-literals": "^7.24.1", + "@babel/plugin-transform-typeof-symbol": "^7.24.1", + "@babel/plugin-transform-unicode-escapes": "^7.24.1", + "@babel/plugin-transform-unicode-property-regex": "^7.24.1", + "@babel/plugin-transform-unicode-regex": "^7.24.1", + "@babel/plugin-transform-unicode-sets-regex": "^7.24.1", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.10.4", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "core-js-compat": "^3.31.0", + "semver": "^6.3.1" + } + }, + "@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "requires": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + } + }, + "@babel/preset-react": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.24.1.tgz", + "integrity": "sha512-eFa8up2/8cZXLIpkafhaADTXSnl7IsUFCYenRWrARBz0/qZwcT0RBXpys0LJU4+WfPoF2ZG6ew6s2V6izMCwRA==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-transform-react-display-name": "^7.24.1", + "@babel/plugin-transform-react-jsx": "^7.23.4", + "@babel/plugin-transform-react-jsx-development": "^7.22.5", + "@babel/plugin-transform-react-pure-annotations": "^7.24.1" + } + }, + "@babel/preset-typescript": { + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.24.1.tgz", + "integrity": "sha512-1DBaMmRDpuYQBPWD8Pf/WEwCrtgRHxsZnP4mIy9G/X+hFfbI47Q2G4t1Paakld84+qsk2fSsUPMKg71jkoOOaQ==", + "requires": { + "@babel/helper-plugin-utils": "^7.24.0", + "@babel/helper-validator-option": "^7.23.5", + "@babel/plugin-syntax-jsx": "^7.24.1", + "@babel/plugin-transform-modules-commonjs": "^7.24.1", + "@babel/plugin-transform-typescript": "^7.24.1" } }, + "@babel/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==" + }, "@babel/runtime": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", - "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.1.tgz", + "integrity": "sha512-+BIznRzyqBf+2wCTxcKE3wDjfGeCoVE61KSHGpkzqrLi8qxqFwBeUFyId2cxkTmm55fzDGnm0+yCxaxygrLUnQ==", "requires": { "regenerator-runtime": "^0.14.0" } }, "@babel/template": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.23.9.tgz", - "integrity": "sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", + "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", "requires": { "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.23.9", - "@babel/types": "^7.23.9" + "@babel/parser": "^7.24.0", + "@babel/types": "^7.24.0" } }, "@babel/traverse": { @@ -16930,9 +20651,9 @@ } }, "@babel/types": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.9.tgz", - "integrity": "sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz", + "integrity": "sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==", "requires": { "@babel/helper-string-parser": "^7.23.4", "@babel/helper-validator-identifier": "^7.22.20", @@ -17047,14 +20768,14 @@ } }, "@codemirror/state": { - "version": "6.4.0", - "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.0.tgz", - "integrity": "sha512-hm8XshYj5Fo30Bb922QX9hXB/bxOAVH+qaqHBzw5TKa72vOeslyGwd4X8M0c1dJ9JqxlaMceOQ8RsL9tC7gU0A==" + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz", + "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==" }, "@codemirror/view": { - "version": "6.23.1", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.23.1.tgz", - "integrity": "sha512-J2Xnn5lFYT1ZN/5ewEoMBCmLlL71lZ3mBdb7cUEuHhX2ESoSrNEucpsDXpX22EuTGm9LOgC9v4Z0wx+Ez8QmGA==", + "version": "6.26.1", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.1.tgz", + "integrity": "sha512-wLw0t3R9AwOSQThdZ5Onw8QQtem5asE7+bPlnzc57eubPqiuJKIzwjMZ+C42vQett+iva+J8VgFV4RYWDBh5FA==", "requires": { "@codemirror/state": "^6.4.0", "style-mod": "^4.1.0", @@ -17149,10 +20870,157 @@ } } }, - "@esbuild/darwin-arm64": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz", - "integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==", + "@emotion/is-prop-valid": { + "version": "0.8.8", + "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-0.8.8.tgz", + "integrity": "sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==", + "optional": true, + "requires": { + "@emotion/memoize": "0.7.4" + } + }, + "@emotion/memoize": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.7.4.tgz", + "integrity": "sha512-Ja/Vfqe3HpuzRsG1oBtWTHk2PGZ7GR+2Vz5iYGelAw8dx32K0y7PjVuxK6z1nMpZOqAFsRUPCkK1YjJ56qJlgw==", + "optional": true + }, + "@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "optional": true + }, + "@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "optional": true + }, + "@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "optional": true + }, + "@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "optional": true + }, + "@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "optional": true + }, + "@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "optional": true + }, + "@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "optional": true + }, + "@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "optional": true + }, + "@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "optional": true + }, + "@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "optional": true + }, + "@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "optional": true + }, + "@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "optional": true + }, + "@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "optional": true + }, + "@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "optional": true + }, + "@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "optional": true + }, + "@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "optional": true + }, + "@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "optional": true + }, + "@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "optional": true + }, + "@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "optional": true + }, + "@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "optional": true + }, + "@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "optional": true + }, + "@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "optional": true + }, + "@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", "optional": true }, "@eslint-community/eslint-utils": { @@ -17205,9 +21073,9 @@ } }, "@eslint/js": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", - "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true }, "@fastify/busboy": { @@ -17246,34 +21114,131 @@ "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz", "integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q==" }, + "@graphiql/plugin-explorer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@graphiql/plugin-explorer/-/plugin-explorer-1.0.4.tgz", + "integrity": "sha512-Z0UDhHSX1u4PfiqtlOMrXVrSE11ifC0zycGwhzK+BeglS9z56hknEky7NwJvUb9qC7sTlTmXEgfGLsYb5DjKrg==", + "requires": { + "graphiql-explorer": "^0.9.0" + }, + "dependencies": { + "graphiql-explorer": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/graphiql-explorer/-/graphiql-explorer-0.9.0.tgz", + "integrity": "sha512-fZC/wsuatqiQDO2otchxriFO0LaWIo/ovF/CQJ1yOudmY0P7pzDiP+l9CEHUiWbizk3e99x6DQG4XG1VxA+d6A==", + "requires": {} + } + } + }, + "@graphiql/react": { + "version": "0.20.4", + "resolved": "https://registry.npmjs.org/@graphiql/react/-/react-0.20.4.tgz", + "integrity": "sha512-LDgIlHa65pSngk8G2O0hvohNz4B41VUa7Yg6iPwifa1XreXxHIXjhV6FC1qi5oSjdCIRp4T8dkZnHA6iI5eElg==", + "requires": { + "@graphiql/toolkit": "^0.9.1", + "@headlessui/react": "^1.7.15", + "@radix-ui/react-dialog": "^1.0.4", + "@radix-ui/react-dropdown-menu": "^2.0.5", + "@radix-ui/react-tooltip": "^1.0.6", + "@radix-ui/react-visually-hidden": "^1.0.3", + "@types/codemirror": "^5.60.8", + "clsx": "^1.2.1", + "codemirror": "^5.65.3", + "codemirror-graphql": "^2.0.11", + "copy-to-clipboard": "^3.2.0", + "framer-motion": "^6.5.1", + "graphql-language-service": "^5.2.0", + "markdown-it": "^12.2.0", + "set-value": "^4.1.0" + }, + "dependencies": { + "@codemirror/language": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.0.0.tgz", + "integrity": "sha512-rtjk5ifyMzOna1c7PBu7J1VCt0PvA5wy3o8eMVnxMKb7z8KA7JFecvD04dSn14vj/bBaAbqRsGed5OjtofEnLA==", + "peer": true, + "requires": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0", + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0", + "@lezer/lr": "^1.0.0", + "style-mod": "^4.0.0" + } + }, + "clsx": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", + "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==" + }, + "codemirror-graphql": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/codemirror-graphql/-/codemirror-graphql-2.0.11.tgz", + "integrity": "sha512-j1QDDXKVkpin2VsyS0ke2nAhKal6/N1UJtgnBGrPe3gj9ZSP6/K8Xytft94k0xW6giIU/JhZjvW0GwwERNzbFA==", + "requires": { + "@types/codemirror": "^0.0.90", + "graphql-language-service": "5.2.0" + }, + "dependencies": { + "@types/codemirror": { + "version": "0.0.90", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.90.tgz", + "integrity": "sha512-8Z9+tSg27NPRGubbUPUCrt5DDG/OWzLph5BvcDykwR5D7RyZh5mhHG0uS1ePKV1YFCA+/cwc4Ey2AJAEFfV3IA==", + "requires": { + "@types/tern": "*" + } + } + } + } + } + }, + "@graphiql/toolkit": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@graphiql/toolkit/-/toolkit-0.9.1.tgz", + "integrity": "sha512-LVt9pdk0830so50ZnU2Znb2rclcoWznG8r8asqAENzV0U1FM1kuY0sdPpc/rBc9MmmNgnB6A+WZzDhq6dbhTHA==", + "requires": { + "@n1ru4l/push-pull-async-iterable-iterator": "^3.1.0", + "meros": "^1.1.4" + } + }, + "@graphql-codegen/add": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/add/-/add-5.0.2.tgz", + "integrity": "sha512-ouBkSvMFUhda5VoKumo/ZvsZM9P5ZTyDsI8LW18VxSNWOjrTeLXBWHG8Gfaai0HwhflPtCYVABbriEcOmrRShQ==", + "dev": true, + "requires": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "tslib": "~2.6.0" + } + }, "@graphql-codegen/cli": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-3.3.1.tgz", - "integrity": "sha512-4Es8Y9zFeT0Zx2qRL7L3qXDbbqvXK6aID+8v8lP6gaYD+uWx3Jd4Hsq5vxwVBR+6flm0BW/C85Qm0cvmT7O6LA==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/cli/-/cli-5.0.2.tgz", + "integrity": "sha512-MBIaFqDiLKuO4ojN6xxG9/xL9wmfD3ZjZ7RsPjwQnSHBCUXnEkdKvX+JVpx87Pq29Ycn8wTJUguXnTZ7Di0Mlw==", "dev": true, "requires": { "@babel/generator": "^7.18.13", "@babel/template": "^7.18.10", "@babel/types": "^7.18.13", - "@graphql-codegen/core": "^3.1.0", - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-tools/apollo-engine-loader": "^7.3.6", - "@graphql-tools/code-file-loader": "^7.3.17", - "@graphql-tools/git-loader": "^7.2.13", - "@graphql-tools/github-loader": "^7.3.20", - "@graphql-tools/graphql-file-loader": "^7.5.0", - "@graphql-tools/json-file-loader": "^7.4.1", - "@graphql-tools/load": "^7.8.0", - "@graphql-tools/prisma-loader": "^7.2.49", - "@graphql-tools/url-loader": "^7.13.2", - "@graphql-tools/utils": "^9.0.0", - "@parcel/watcher": "^2.1.0", + "@graphql-codegen/client-preset": "^4.2.2", + "@graphql-codegen/core": "^4.0.2", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/apollo-engine-loader": "^8.0.0", + "@graphql-tools/code-file-loader": "^8.0.0", + "@graphql-tools/git-loader": "^8.0.0", + "@graphql-tools/github-loader": "^8.0.0", + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/prisma-loader": "^8.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", "@whatwg-node/fetch": "^0.8.0", "chalk": "^4.1.0", - "cosmiconfig": "^7.0.0", + "cosmiconfig": "^8.1.3", "debounce": "^1.2.0", "detect-indent": "^6.0.0", - "graphql-config": "^4.5.0", + "graphql-config": "^5.0.2", "inquirer": "^8.0.0", "is-glob": "^4.0.1", "jiti": "^1.17.1", @@ -17285,177 +21250,243 @@ "string-env-interpolation": "^1.0.1", "ts-log": "^2.2.3", "tslib": "^2.4.0", - "yaml": "^1.10.0", + "yaml": "^2.3.1", "yargs": "^17.0.0" } }, + "@graphql-codegen/client-preset": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/@graphql-codegen/client-preset/-/client-preset-4.2.5.tgz", + "integrity": "sha512-hAdB6HN8EDmkoBtr0bPUN/7NH6svzqbcTDMWBCRXPESXkl7y80po+IXrXUjsSrvhKG8xkNXgJNz/2mjwHzywcA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.20.2", + "@babel/template": "^7.20.7", + "@graphql-codegen/add": "^5.0.2", + "@graphql-codegen/gql-tag-operations": "4.0.6", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typed-document-node": "^5.0.6", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/typescript-operations": "^4.2.0", + "@graphql-codegen/visitor-plugin-common": "^5.1.0", + "@graphql-tools/documents": "^1.0.0", + "@graphql-tools/utils": "^10.0.0", + "@graphql-typed-document-node/core": "3.2.0", + "tslib": "~2.6.0" + } + }, "@graphql-codegen/core": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-3.1.0.tgz", - "integrity": "sha512-DH1/yaR7oJE6/B+c6ZF2Tbdh7LixF1K8L+8BoSubjNyQ8pNwR4a70mvc1sv6H7qgp6y1bPQ9tKE+aazRRshysw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/core/-/core-4.0.2.tgz", + "integrity": "sha512-IZbpkhwVqgizcjNiaVzNAzm/xbWT6YnGgeOLwVjm4KbJn3V2jchVtuzHH09G5/WkkLSk2wgbXNdwjM41JxO6Eg==", "dev": true, "requires": { - "@graphql-codegen/plugin-helpers": "^4.1.0", - "@graphql-tools/schema": "^9.0.0", - "@graphql-tools/utils": "^9.1.1", - "tslib": "~2.5.0" - }, - "dependencies": { - "tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - } + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/schema": "^10.0.0", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + } + }, + "@graphql-codegen/gql-tag-operations": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/gql-tag-operations/-/gql-tag-operations-4.0.6.tgz", + "integrity": "sha512-y6iXEDpDNjwNxJw3WZqX1/Znj0QHW7+y8O+t2V8qvbTT+3kb2lr9ntc8By7vCr6ctw9tXI4XKaJgpTstJDOwFA==", + "dev": true, + "requires": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "@graphql-tools/utils": "^10.0.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" } }, "@graphql-codegen/plugin-helpers": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-4.2.0.tgz", - "integrity": "sha512-THFTCfg+46PXlXobYJ/OoCX6pzjI+9woQqCjdyKtgoI0tn3Xq2HUUCiidndxUpEYVrXb5pRiRXb7b/ZbMQqD0A==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/@graphql-codegen/plugin-helpers/-/plugin-helpers-5.0.3.tgz", + "integrity": "sha512-yZ1rpULIWKBZqCDlvGIJRSyj1B2utkEdGmXZTBT/GVayP4hyRYlkd36AJV/LfEsVD8dnsKL5rLz2VTYmRNlJ5Q==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.0.0", + "@graphql-tools/utils": "^10.0.0", "change-case-all": "1.0.15", "common-tags": "1.8.2", "import-from": "4.0.0", "lodash": "~4.17.0", - "tslib": "~2.5.0" - }, - "dependencies": { - "tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - } + "tslib": "~2.6.0" } }, "@graphql-codegen/schema-ast": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-3.0.1.tgz", - "integrity": "sha512-rTKTi4XiW4QFZnrEqetpiYEWVsOFNoiR/v3rY9mFSttXFbIwNXPme32EspTiGWmEEdHY8UuTDtZN3vEcs/31zw==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@graphql-codegen/schema-ast/-/schema-ast-4.0.2.tgz", + "integrity": "sha512-5mVAOQQK3Oz7EtMl/l3vOQdc2aYClUzVDHHkMvZlunc+KlGgl81j8TLa+X7ANIllqU4fUEsQU3lJmk4hXP6K7Q==", "dev": true, "requires": { - "@graphql-codegen/plugin-helpers": "^4.1.0", - "@graphql-tools/utils": "^9.0.0", - "tslib": "~2.5.0" - }, - "dependencies": { - "tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - } + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/utils": "^10.0.0", + "tslib": "~2.6.0" + } + }, + "@graphql-codegen/typed-document-node": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typed-document-node/-/typed-document-node-5.0.6.tgz", + "integrity": "sha512-US0J95hOE2/W/h42w4oiY+DFKG7IetEN1mQMgXXeat1w6FAR5PlIz4JrRrEkiVfVetZ1g7K78SOwBD8/IJnDiA==", + "dev": true, + "requires": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "change-case-all": "1.0.15", + "tslib": "~2.6.0" } }, "@graphql-codegen/typescript": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-3.0.4.tgz", - "integrity": "sha512-x4O47447DZrWNtE/l5CU9QzzW4m1RbmCEdijlA3s2flG/y1Ckqdemob4CWfilSm5/tZ3w1junVDY616RDTSvZw==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript/-/typescript-4.0.6.tgz", + "integrity": "sha512-IBG4N+Blv7KAL27bseruIoLTjORFCT3r+QYyMC3g11uY3/9TPpaUyjSdF70yBe5GIQ6dAgDU+ENUC1v7EPi0rw==", "dev": true, "requires": { - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-codegen/schema-ast": "^3.0.1", - "@graphql-codegen/visitor-plugin-common": "3.1.1", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/schema-ast": "^4.0.2", + "@graphql-codegen/visitor-plugin-common": "5.1.0", "auto-bind": "~4.0.0", - "tslib": "~2.5.0" - }, - "dependencies": { - "tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - } + "tslib": "~2.6.0" + } + }, + "@graphql-codegen/typescript-operations": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/typescript-operations/-/typescript-operations-4.2.0.tgz", + "integrity": "sha512-lmuwYb03XC7LNRS8oo9M4/vlOrq/wOKmTLBHlltK2YJ1BO/4K/Q9Jdv/jDmJpNydHVR1fmeF4wAfsIp1f9JibA==", + "dev": true, + "requires": { + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-codegen/typescript": "^4.0.6", + "@graphql-codegen/visitor-plugin-common": "5.1.0", + "auto-bind": "~4.0.0", + "tslib": "~2.6.0" } }, "@graphql-codegen/visitor-plugin-common": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-3.1.1.tgz", - "integrity": "sha512-uAfp+zu/009R3HUAuTK2AamR1bxIltM6rrYYI6EXSmkM3rFtFsLTuJhjUDj98HcUCszJZrADppz8KKLGRUVlNg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-5.1.0.tgz", + "integrity": "sha512-eamQxtA9bjJqI2lU5eYoA1GbdMIRT2X8m8vhWYsVQVWD3qM7sx/IqJU0kx0J3Vd4/CSd36BzL6RKwksibytDIg==", "dev": true, "requires": { - "@graphql-codegen/plugin-helpers": "^4.2.0", - "@graphql-tools/optimize": "^1.3.0", - "@graphql-tools/relay-operation-optimizer": "^6.5.0", - "@graphql-tools/utils": "^9.0.0", + "@graphql-codegen/plugin-helpers": "^5.0.3", + "@graphql-tools/optimize": "^2.0.0", + "@graphql-tools/relay-operation-optimizer": "^7.0.0", + "@graphql-tools/utils": "^10.0.0", "auto-bind": "~4.0.0", "change-case-all": "1.0.15", "dependency-graph": "^0.11.0", "graphql-tag": "^2.11.0", "parse-filepath": "^1.0.2", - "tslib": "~2.5.0" - }, - "dependencies": { - "tslib": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.3.tgz", - "integrity": "sha512-mSxlJJwl3BMEQCUNnxXBU9jP4JBktcEGhURcPR6VQVlnP0FdDEsIaz0C35dXNGLyRfrATNofF0F5p2KPxQgB+w==", - "dev": true - } + "tslib": "~2.6.0" } }, "@graphql-tools/apollo-engine-loader": { - "version": "7.3.26", - "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-7.3.26.tgz", - "integrity": "sha512-h1vfhdJFjnCYn9b5EY1Z91JTF0KB3hHVJNQIsiUV2mpQXZdeOXQoaWeYEKaiI5R6kwBw5PP9B0fv3jfUIG8LyQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-8.0.1.tgz", + "integrity": "sha512-NaPeVjtrfbPXcl+MLQCJLWtqe2/E4bbAqcauEOQ+3sizw1Fc2CNmhHRF8a6W4D0ekvTRRXAMptXYgA2uConbrA==", "dev": true, "requires": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/utils": "^9.2.1", - "@whatwg-node/fetch": "^0.8.0", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", "tslib": "^2.4.0" + }, + "dependencies": { + "@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true + }, + "@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "requires": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + } + }, + "@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "requires": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + } + }, + "urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + } } }, "@graphql-tools/batch-execute": { - "version": "8.5.22", - "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-8.5.22.tgz", - "integrity": "sha512-hcV1JaY6NJQFQEwCKrYhpfLK8frSXDbtNMoTur98u10Cmecy1zrqNKSqhEyGetpgHxaJRqszGzKeI3RuroDN6A==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-9.0.4.tgz", + "integrity": "sha512-kkebDLXgDrep5Y0gK1RN3DMUlLqNhg60OAz0lTCqrYeja6DshxLtLkj+zV4mVbBA4mQOEoBmw6g1LZs3dA84/w==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "dataloader": "^2.2.2", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" } }, "@graphql-tools/code-file-loader": { - "version": "7.3.23", - "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-7.3.23.tgz", - "integrity": "sha512-8Wt1rTtyTEs0p47uzsPJ1vAtfAx0jmxPifiNdmo9EOCuUPyQGEbMaik/YkqZ7QUFIEYEQu+Vgfo8tElwOPtx5Q==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/code-file-loader/-/code-file-loader-8.1.1.tgz", + "integrity": "sha512-q4KN25EPSUztc8rA8YUU3ufh721Yk12xXDbtUA+YstczWS7a1RJlghYMFEfR1HsHSYbF7cUqkbnTKSGM3o52bQ==", "dev": true, "requires": { - "@graphql-tools/graphql-tag-pluck": "7.5.2", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" } }, "@graphql-tools/delegate": { - "version": "9.0.35", - "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-9.0.35.tgz", - "integrity": "sha512-jwPu8NJbzRRMqi4Vp/5QX1vIUeUPpWmlQpOkXQD2r1X45YsVceyUUBnktCrlJlDB4jPRVy7JQGwmYo3KFiOBMA==", + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-10.0.4.tgz", + "integrity": "sha512-WswZRbQZMh/ebhc8zSomK9DIh6Pd5KbuiMsyiKkKz37TWTrlCOe+4C/fyrBFez30ksq6oFyCeSKMwfrCbeGo0Q==", "dev": true, "requires": { - "@graphql-tools/batch-execute": "^8.5.22", - "@graphql-tools/executor": "^0.0.20", - "@graphql-tools/schema": "^9.0.19", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/batch-execute": "^9.0.4", + "@graphql-tools/executor": "^1.2.1", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", "dataloader": "^2.2.2", - "tslib": "^2.5.0", - "value-or-promise": "^1.0.12" + "tslib": "^2.5.0" + } + }, + "@graphql-tools/documents": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/documents/-/documents-1.0.0.tgz", + "integrity": "sha512-rHGjX1vg/nZ2DKqRGfDPNC55CWZBMldEVcH+91BThRa6JeT80NqXknffLLEZLRUxyikCfkwMsk6xR3UNMqG0Rg==", + "dev": true, + "requires": { + "lodash.sortby": "^4.7.0", + "tslib": "^2.4.0" } }, "@graphql-tools/executor": { - "version": "0.0.20", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-0.0.20.tgz", - "integrity": "sha512-GdvNc4vszmfeGvUqlcaH1FjBoguvMYzxAfT6tDd4/LgwymepHhinqLNA5otqwVLW+JETcDaK7xGENzFomuE6TA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor/-/executor-1.2.5.tgz", + "integrity": "sha512-s7sW4K3BUNsk9sjq+vNicwb9KwcR3G55uS/CI8KZQ4x0ZdeYMIwpeU9MVeORCCpHuQyTaV+/VnO0hFrS/ygzsg==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.1.1", "@graphql-typed-document-node/core": "3.2.0", "@repeaterjs/repeater": "^3.0.4", "tslib": "^2.4.0", @@ -17463,81 +21494,92 @@ } }, "@graphql-tools/executor-graphql-ws": { - "version": "0.0.14", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-0.0.14.tgz", - "integrity": "sha512-P2nlkAsPZKLIXImFhj0YTtny5NQVGSsKnhi7PzXiaHSXc6KkzqbWZHKvikD4PObanqg+7IO58rKFpGXP7eeO+w==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-1.1.2.tgz", + "integrity": "sha512-+9ZK0rychTH1LUv4iZqJ4ESbmULJMTsv3XlFooPUngpxZkk00q6LqHKJRrsLErmQrVaC7cwQCaRBJa0teK17Lg==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", - "@repeaterjs/repeater": "3.0.4", + "@graphql-tools/utils": "^10.0.13", "@types/ws": "^8.0.0", - "graphql-ws": "5.12.1", - "isomorphic-ws": "5.0.0", + "graphql-ws": "^5.14.0", + "isomorphic-ws": "^5.0.0", "tslib": "^2.4.0", - "ws": "8.13.0" - }, - "dependencies": { - "@repeaterjs/repeater": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@repeaterjs/repeater/-/repeater-3.0.4.tgz", - "integrity": "sha512-AW8PKd6iX3vAZ0vA43nOUOnbq/X5ihgU+mSXXqunMkeQADGiqw/PY0JNeYtD5sr0PAy51YPgAPbDoeapv9r8WA==", - "dev": true - }, - "ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", - "dev": true, - "requires": {} - } + "ws": "^8.13.0" } }, "@graphql-tools/executor-http": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-0.1.10.tgz", - "integrity": "sha512-hnAfbKv0/lb9s31LhWzawQ5hghBfHS+gYWtqxME6Rl0Aufq9GltiiLBcl7OVVOnkLF0KhwgbYP1mB5VKmgTGpg==", + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-http/-/executor-http-1.0.9.tgz", + "integrity": "sha512-+NXaZd2MWbbrWHqU4EhXcrDbogeiCDmEbrAN+rMn4Nu2okDjn2MTFDbTIab87oEubQCH4Te1wDkWPKrzXup7+Q==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "@repeaterjs/repeater": "^3.0.4", - "@whatwg-node/fetch": "^0.8.1", - "dset": "^3.1.2", + "@whatwg-node/fetch": "^0.9.0", "extract-files": "^11.0.0", "meros": "^1.2.1", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" + }, + "dependencies": { + "@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true + }, + "@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "requires": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + } + }, + "@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "requires": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + } + }, + "urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + } } }, "@graphql-tools/executor-legacy-ws": { - "version": "0.0.11", - "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-0.0.11.tgz", - "integrity": "sha512-4ai+NnxlNfvIQ4c70hWFvOZlSUN8lt7yc+ZsrwtNFbFPH/EroIzFMapAxM9zwyv9bH38AdO3TQxZ5zNxgBdvUw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-1.0.6.tgz", + "integrity": "sha512-lDSxz9VyyquOrvSuCCnld3256Hmd+QI2lkmkEv7d4mdzkxkK4ddAWW1geQiWrQvWmdsmcnGGlZ7gDGbhEExwqg==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "@types/ws": "^8.0.0", - "isomorphic-ws": "5.0.0", + "isomorphic-ws": "^5.0.0", "tslib": "^2.4.0", - "ws": "8.13.0" - }, - "dependencies": { - "ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", - "dev": true, - "requires": {} - } + "ws": "^8.15.0" } }, "@graphql-tools/git-loader": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-7.3.0.tgz", - "integrity": "sha512-gcGAK+u16eHkwsMYqqghZbmDquh8QaO24Scsxq+cVR+vx1ekRlsEiXvu+yXVDbZdcJ6PBIbeLcQbEu+xhDLmvQ==", + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/git-loader/-/git-loader-8.0.5.tgz", + "integrity": "sha512-P97/1mhruDiA6D5WUmx3n/aeGPLWj2+4dpzDOxFGGU+z9NcI/JdygMkeFpGZNHeJfw+kHfxgPcMPnxHcyhAoVA==", "dev": true, "requires": { - "@graphql-tools/graphql-tag-pluck": "7.5.2", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/graphql-tag-pluck": "8.3.0", + "@graphql-tools/utils": "^10.0.13", "is-glob": "4.0.3", "micromatch": "^4.0.4", "tslib": "^2.4.0", @@ -17545,190 +21587,304 @@ } }, "@graphql-tools/github-loader": { - "version": "7.3.28", - "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-7.3.28.tgz", - "integrity": "sha512-OK92Lf9pmxPQvjUNv05b3tnVhw0JRfPqOf15jZjyQ8BfdEUrJoP32b4dRQQem/wyRL24KY4wOfArJNqzpsbwCA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/github-loader/-/github-loader-8.0.1.tgz", + "integrity": "sha512-W4dFLQJ5GtKGltvh/u1apWRFKBQOsDzFxO9cJkOYZj1VzHCpRF43uLST4VbCfWve+AwBqOuKr7YgkHoxpRMkcg==", "dev": true, "requires": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/executor-http": "^0.1.9", - "@graphql-tools/graphql-tag-pluck": "^7.4.6", - "@graphql-tools/utils": "^9.2.1", - "@whatwg-node/fetch": "^0.8.0", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/graphql-tag-pluck": "^8.0.0", + "@graphql-tools/utils": "^10.0.13", + "@whatwg-node/fetch": "^0.9.0", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" + }, + "dependencies": { + "@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true + }, + "@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "requires": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + } + }, + "@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "requires": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + } + }, + "urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + } } }, "@graphql-tools/graphql-file-loader": { - "version": "7.5.17", - "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-7.5.17.tgz", - "integrity": "sha512-hVwwxPf41zOYgm4gdaZILCYnKB9Zap7Ys9OhY1hbwuAuC4MMNY9GpUjoTU3CQc3zUiPoYStyRtUGkHSJZ3HxBw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-file-loader/-/graphql-file-loader-8.0.1.tgz", + "integrity": "sha512-7gswMqWBabTSmqbaNyWSmRRpStWlcCkBc73E6NZNlh4YNuiyKOwbvSkOUYFOqFMfEL+cFsXgAvr87Vz4XrYSbA==", "dev": true, "requires": { - "@graphql-tools/import": "6.7.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/import": "7.0.1", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" } }, "@graphql-tools/graphql-tag-pluck": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-7.5.2.tgz", - "integrity": "sha512-RW+H8FqOOLQw0BPXaahYepVSRjuOHw+7IL8Opaa5G5uYGOBxoXR7DceyQ7BcpMgktAOOmpDNQ2WtcboChOJSRA==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-8.3.0.tgz", + "integrity": "sha512-gNqukC+s7iHC7vQZmx1SEJQmLnOguBq+aqE2zV2+o1hxkExvKqyFli1SY/9gmukFIKpKutCIj+8yLOM+jARutw==", "dev": true, "requires": { + "@babel/core": "^7.22.9", "@babel/parser": "^7.16.8", "@babel/plugin-syntax-import-assertions": "^7.20.0", "@babel/traverse": "^7.16.8", "@babel/types": "^7.16.8", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" } }, "@graphql-tools/import": { - "version": "6.7.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-6.7.18.tgz", - "integrity": "sha512-XQDdyZTp+FYmT7as3xRWH/x8dx0QZA2WZqfMF5EWb36a0PiH7WwlRQYIdyYXj8YCLpiWkeBXgBRHmMnwEYR8iQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/import/-/import-7.0.1.tgz", + "integrity": "sha512-935uAjAS8UAeXThqHfYVr4HEAp6nHJ2sximZKO1RzUTq5WoALMAhhGARl0+ecm6X+cqNUwIChJbjtaa6P/ML0w==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "resolve-from": "5.0.0", "tslib": "^2.4.0" } }, "@graphql-tools/json-file-loader": { - "version": "7.4.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-7.4.18.tgz", - "integrity": "sha512-AJ1b6Y1wiVgkwsxT5dELXhIVUPs/u3VZ8/0/oOtpcoyO/vAeM5rOvvWegzicOOnQw8G45fgBRMkkRfeuwVt6+w==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/json-file-loader/-/json-file-loader-8.0.1.tgz", + "integrity": "sha512-lAy2VqxDAHjVyqeJonCP6TUemrpYdDuKt25a10X6zY2Yn3iFYGnuIDQ64cv3ytyGY6KPyPB+Kp+ZfOkNDG3FQA==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "globby": "^11.0.3", "tslib": "^2.4.0", "unixify": "^1.0.0" } }, "@graphql-tools/load": { - "version": "7.8.14", - "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-7.8.14.tgz", - "integrity": "sha512-ASQvP+snHMYm+FhIaLxxFgVdRaM0vrN9wW2BKInQpktwWTXVyk+yP5nQUCEGmn0RTdlPKrffBaigxepkEAJPrg==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/load/-/load-8.0.2.tgz", + "integrity": "sha512-S+E/cmyVmJ3CuCNfDuNF2EyovTwdWfQScXv/2gmvJOti2rGD8jTt9GYVzXaxhblLivQR9sBUCNZu/w7j7aXUCA==", "dev": true, "requires": { - "@graphql-tools/schema": "^9.0.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.0.13", "p-limit": "3.1.0", "tslib": "^2.4.0" } }, "@graphql-tools/merge": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-8.4.2.tgz", - "integrity": "sha512-XbrHAaj8yDuINph+sAfuq3QCZ/tKblrTLOpirK0+CAgNlZUCHs0Fa+xtMUURgwCVThLle1AF7svJCxFizygLsw==", + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-9.0.3.tgz", + "integrity": "sha512-FeKv9lKLMwqDu0pQjPpF59GY3HReUkWXKsMIuMuJQOKh9BETu7zPEFUELvcw8w+lwZkl4ileJsHXC9+AnsT2Lw==", "dev": true, "requires": { - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" } }, "@graphql-tools/optimize": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-1.4.0.tgz", - "integrity": "sha512-dJs/2XvZp+wgHH8T5J2TqptT9/6uVzIYvA6uFACha+ufvdMBedkfR4b4GbT8jAKLRARiqRTxy3dctnwkTM2tdw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@graphql-tools/optimize/-/optimize-2.0.0.tgz", + "integrity": "sha512-nhdT+CRGDZ+bk68ic+Jw1OZ99YCDIKYA5AlVAnBHJvMawSx9YQqQAIj4refNc1/LRieGiuWvhbG3jvPVYho0Dg==", "dev": true, "requires": { "tslib": "^2.4.0" } }, "@graphql-tools/prisma-loader": { - "version": "7.2.72", - "resolved": "https://registry.npmjs.org/@graphql-tools/prisma-loader/-/prisma-loader-7.2.72.tgz", - "integrity": "sha512-0a7uV7Fky6yDqd0tI9+XMuvgIo6GAqiVzzzFV4OSLry4AwiQlI3igYseBV7ZVOGhedOTqj/URxjpiv07hRcwag==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/prisma-loader/-/prisma-loader-8.0.3.tgz", + "integrity": "sha512-oZhxnMr3Jw2WAW1h9FIhF27xWzIB7bXWM8olz4W12oII4NiZl7VRkFw9IT50zME2Bqi9LGh9pkmMWkjvbOpl+Q==", "dev": true, "requires": { - "@graphql-tools/url-loader": "^7.17.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/url-loader": "^8.0.2", + "@graphql-tools/utils": "^10.0.13", "@types/js-yaml": "^4.0.0", "@types/json-stable-stringify": "^1.0.32", - "@whatwg-node/fetch": "^0.8.2", + "@whatwg-node/fetch": "^0.9.0", "chalk": "^4.1.0", "debug": "^4.3.1", "dotenv": "^16.0.0", "graphql-request": "^6.0.0", - "http-proxy-agent": "^6.0.0", - "https-proxy-agent": "^6.0.0", - "jose": "^4.11.4", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "jose": "^5.0.0", "js-yaml": "^4.0.0", "json-stable-stringify": "^1.0.1", "lodash": "^4.17.20", "scuid": "^1.1.0", "tslib": "^2.4.0", "yaml-ast-parser": "^0.0.43" + }, + "dependencies": { + "@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true + }, + "@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "requires": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + } + }, + "@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "requires": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + } + }, + "urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + } } }, "@graphql-tools/relay-operation-optimizer": { - "version": "6.5.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-6.5.18.tgz", - "integrity": "sha512-mc5VPyTeV+LwiM+DNvoDQfPqwQYhPV/cl5jOBjTgSniyaq8/86aODfMkrE2OduhQ5E00hqrkuL2Fdrgk0w1QJg==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-7.0.1.tgz", + "integrity": "sha512-y0ZrQ/iyqWZlsS/xrJfSir3TbVYJTYmMOu4TaSz6F4FRDTQ3ie43BlKkhf04rC28pnUOS4BO9pDcAo1D30l5+A==", "dev": true, "requires": { "@ardatan/relay-compiler": "12.0.0", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0" } }, "@graphql-tools/schema": { - "version": "9.0.19", - "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-9.0.19.tgz", - "integrity": "sha512-oBRPoNBtCkk0zbUsyP4GaIzCt8C0aCI4ycIRUL67KK5pOHljKLBBtGT+Jr6hkzA74C8Gco8bpZPe7aWFjiaK2w==", + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-10.0.3.tgz", + "integrity": "sha512-p28Oh9EcOna6i0yLaCFOnkcBDQECVf3SCexT6ktb86QNj9idnkhI+tCxnwZDh58Qvjd2nURdkbevvoZkvxzCog==", "dev": true, "requires": { - "@graphql-tools/merge": "^8.4.1", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/merge": "^9.0.3", + "@graphql-tools/utils": "^10.0.13", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" } }, "@graphql-tools/url-loader": { - "version": "7.17.18", - "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-7.17.18.tgz", - "integrity": "sha512-ear0CiyTj04jCVAxi7TvgbnGDIN2HgqzXzwsfcqiVg9cvjT40NcMlZ2P1lZDgqMkZ9oyLTV8Bw6j+SyG6A+xPw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/url-loader/-/url-loader-8.0.2.tgz", + "integrity": "sha512-1dKp2K8UuFn7DFo1qX5c1cyazQv2h2ICwA9esHblEqCYrgf69Nk8N7SODmsfWg94OEaI74IqMoM12t7eIGwFzQ==", "dev": true, "requires": { "@ardatan/sync-fetch": "^0.0.1", - "@graphql-tools/delegate": "^9.0.31", - "@graphql-tools/executor-graphql-ws": "^0.0.14", - "@graphql-tools/executor-http": "^0.1.7", - "@graphql-tools/executor-legacy-ws": "^0.0.11", - "@graphql-tools/utils": "^9.2.1", - "@graphql-tools/wrap": "^9.4.2", + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/executor-graphql-ws": "^1.1.2", + "@graphql-tools/executor-http": "^1.0.9", + "@graphql-tools/executor-legacy-ws": "^1.0.6", + "@graphql-tools/utils": "^10.0.13", + "@graphql-tools/wrap": "^10.0.2", "@types/ws": "^8.0.0", - "@whatwg-node/fetch": "^0.8.0", + "@whatwg-node/fetch": "^0.9.0", "isomorphic-ws": "^5.0.0", "tslib": "^2.4.0", "value-or-promise": "^1.0.11", "ws": "^8.12.0" + }, + "dependencies": { + "@whatwg-node/events": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@whatwg-node/events/-/events-0.1.1.tgz", + "integrity": "sha512-AyQEn5hIPV7Ze+xFoXVU3QTHXVbWPrzaOkxtENMPMuNL6VVHrp4hHfDt9nrQpjO7BgvuM95dMtkycX5M/DZR3w==", + "dev": true + }, + "@whatwg-node/fetch": { + "version": "0.9.17", + "resolved": "https://registry.npmjs.org/@whatwg-node/fetch/-/fetch-0.9.17.tgz", + "integrity": "sha512-TDYP3CpCrxwxpiNY0UMNf096H5Ihf67BK1iKGegQl5u9SlpEDYrvnV71gWBGJm+Xm31qOy8ATgma9rm8Pe7/5Q==", + "dev": true, + "requires": { + "@whatwg-node/node-fetch": "^0.5.7", + "urlpattern-polyfill": "^10.0.0" + } + }, + "@whatwg-node/node-fetch": { + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@whatwg-node/node-fetch/-/node-fetch-0.5.10.tgz", + "integrity": "sha512-KIAHepie/T1PRkUfze4t+bPlyvpxlWiXTPtcGlbIZ0vWkBJMdRmCg4ZrJ2y4XaO1eTPo1HlWYUuj1WvoIpumqg==", + "dev": true, + "requires": { + "@kamilkisiela/fast-url-parser": "^1.1.4", + "@whatwg-node/events": "^0.1.0", + "busboy": "^1.6.0", + "fast-querystring": "^1.1.1", + "tslib": "^2.3.1" + } + }, + "urlpattern-polyfill": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/urlpattern-polyfill/-/urlpattern-polyfill-10.0.0.tgz", + "integrity": "sha512-H/A06tKD7sS1O1X2SshBVeA5FLycRpjqiBeqGKmBwBDBy28EnRjORxTNe269KSSr5un5qyWi1iL61wLxpd+ZOg==", + "dev": true + } } }, "@graphql-tools/utils": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-9.2.1.tgz", - "integrity": "sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-10.1.2.tgz", + "integrity": "sha512-fX13CYsDnX4yifIyNdiN0cVygz/muvkreWWem6BBw130+ODbRRgfiVveL0NizCEnKXkpvdeTy9Bxvo9LIKlhrw==", "dev": true, "requires": { "@graphql-typed-document-node/core": "^3.1.1", + "cross-inspect": "1.0.0", + "dset": "^3.1.2", "tslib": "^2.4.0" } }, "@graphql-tools/wrap": { - "version": "9.4.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-9.4.2.tgz", - "integrity": "sha512-DFcd9r51lmcEKn0JW43CWkkI2D6T9XI1juW/Yo86i04v43O9w2/k4/nx2XTJv4Yv+iXwUw7Ok81PGltwGJSDSA==", + "version": "10.0.5", + "resolved": "https://registry.npmjs.org/@graphql-tools/wrap/-/wrap-10.0.5.tgz", + "integrity": "sha512-Cbr5aYjr3HkwdPvetZp1cpDWTGdD1Owgsb3z/ClzhmrboiK86EnQDxDvOJiQkDCPWE9lNBwj8Y4HfxroY0D9DQ==", "dev": true, "requires": { - "@graphql-tools/delegate": "^9.0.31", - "@graphql-tools/schema": "^9.0.18", - "@graphql-tools/utils": "^9.2.1", + "@graphql-tools/delegate": "^10.0.4", + "@graphql-tools/schema": "^10.0.3", + "@graphql-tools/utils": "^10.1.1", "tslib": "^2.4.0", "value-or-promise": "^1.0.12" } @@ -17749,9 +21905,9 @@ } }, "@heroicons/react": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.1.1.tgz", - "integrity": "sha512-JyyN9Lo66kirbCMuMMRPtJxtKJoIsXKS569ebHGGRKbl8s4CtUfLnyKJxteA+vIKySocO4s1SkTkGS4xtG/yEA==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.1.3.tgz", + "integrity": "sha512-fEcPfo4oN345SoqdlCDdSa4ivjaKbk0jTd+oubcgNxnNgAfzysfwWfQUr+51wigiWHQQRiZNd1Ao0M5Y3M2EGg==", "requires": {} }, "@hookform/error-message": { @@ -17784,11 +21940,11 @@ "dev": true }, "@iconify-icon/react": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@iconify-icon/react/-/react-1.0.8.tgz", - "integrity": "sha512-T8Hzz0cQ+08hboS1KFt5t/lBxwpZZuMNVxld2O7vfarPd6mZt9DRRQiM5RIYfB6hxVNddDuzyxbqqfGcMU27pA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@iconify-icon/react/-/react-2.0.1.tgz", + "integrity": "sha512-1m6L2yNsSJ25k5baQRqNqh2J0w+91PwOn1WdBIR6ZTwxePbsZC8k3NNVc6m9BJObsIQdUlMA1NGj8el4tfbsVg==", "requires": { - "iconify-icon": "^1.0.8" + "iconify-icon": "^2.0.0" } }, "@iconify-json/mdi": { @@ -17868,6 +22024,15 @@ "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", "dev": true }, + "@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "requires": { + "@sinclair/typebox": "^0.27.8" + } + }, "@jridgewell/gen-mapping": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", @@ -17894,14 +22059,20 @@ "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" }, "@jridgewell/trace-mapping": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz", - "integrity": "sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw==", + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", "requires": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "@kamilkisiela/fast-url-parser": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@kamilkisiela/fast-url-parser/-/fast-url-parser-1.1.4.tgz", + "integrity": "sha512-gbkePEBupNydxCelHCESvFSFM8XPh1Zs/OAVRW/rKpEqPAl5PbOM90Si8mv9bvnR53uPD2s/FiRxdvSejpRJew==", + "dev": true + }, "@lezer/common": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz", @@ -17940,28 +22111,101 @@ "resolved": "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.4.13.tgz", "integrity": "sha512-5IBr8LIO3xJdJH1e9aj/ZNLE4LSbdsx25wFmGRAZsj2zSmwAYjx26JyU/BYOCpRQlu1jcv1z3vy4NB9+UkfRow==", "requires": { - "@lezer/common": "^1.2.0", - "@lezer/highlight": "^1.1.3", - "@lezer/lr": "^1.3.0" + "@lezer/common": "^1.2.0", + "@lezer/highlight": "^1.1.3", + "@lezer/lr": "^1.3.0" + } + }, + "@lezer/lr": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.0.tgz", + "integrity": "sha512-Wst46p51km8gH0ZUmeNrtpRYmdlRHUpN1DQd3GFAyKANi8WVz8c2jHYTf1CVScFaCjQw1iO3ZZdqGDxQPRErTg==", + "requires": { + "@lezer/common": "^1.0.0" + } + }, + "@lezer/markdown": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.2.0.tgz", + "integrity": "sha512-d7MwsfAukZJo1GpPrcPGa3MxaFFOqNp0gbqF+3F7pTeNDOgeJN1muXzx1XXDPt+Ac+/voCzsH7qXqnn+xReG/g==", + "requires": { + "@lezer/common": "^1.0.0", + "@lezer/highlight": "^1.0.0" + } + }, + "@loadable/component": { + "version": "5.16.3", + "resolved": "https://registry.npmjs.org/@loadable/component/-/component-5.16.3.tgz", + "integrity": "sha512-2mVvHs2988oVX2/zM0y6nYhJ4rTVHhkhRnpupBA0Rjl5tS8op9uSR4u5SLVfMLxzpspr2UiIBQD+wEuMsuq4Dg==", + "requires": { + "@babel/runtime": "^7.7.7", + "hoist-non-react-statics": "^3.3.1", + "react-is": "^16.12.0" + } + }, + "@motionone/animation": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/animation/-/animation-10.17.0.tgz", + "integrity": "sha512-ANfIN9+iq1kGgsZxs+Nz96uiNcPLGTXwfNo2Xz/fcJXniPYpaz/Uyrfa+7I5BPLxCP82sh7quVDudf1GABqHbg==", + "requires": { + "@motionone/easing": "^10.17.0", + "@motionone/types": "^10.17.0", + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" + } + }, + "@motionone/dom": { + "version": "10.12.0", + "resolved": "https://registry.npmjs.org/@motionone/dom/-/dom-10.12.0.tgz", + "integrity": "sha512-UdPTtLMAktHiqV0atOczNYyDd/d8Cf5fFsd1tua03PqTwwCe/6lwhLSQ8a7TbnQ5SN0gm44N1slBfj+ORIhrqw==", + "requires": { + "@motionone/animation": "^10.12.0", + "@motionone/generators": "^10.12.0", + "@motionone/types": "^10.12.0", + "@motionone/utils": "^10.12.0", + "hey-listen": "^1.0.8", + "tslib": "^2.3.1" + } + }, + "@motionone/easing": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/easing/-/easing-10.17.0.tgz", + "integrity": "sha512-Bxe2wSuLu/qxqW4rBFS5m9tMLOw+QBh8v5A7Z5k4Ul4sTj5jAOfZG5R0bn5ywmk+Fs92Ij1feZ5pmC4TeXA8Tg==", + "requires": { + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" } }, - "@lezer/lr": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.0.tgz", - "integrity": "sha512-Wst46p51km8gH0ZUmeNrtpRYmdlRHUpN1DQd3GFAyKANi8WVz8c2jHYTf1CVScFaCjQw1iO3ZZdqGDxQPRErTg==", + "@motionone/generators": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/generators/-/generators-10.17.0.tgz", + "integrity": "sha512-T6Uo5bDHrZWhIfxG/2Aut7qyWQyJIWehk6OB4qNvr/jwA/SRmixwbd7SOrxZi1z5rH3LIeFFBKK1xHnSbGPZSQ==", "requires": { - "@lezer/common": "^1.0.0" + "@motionone/types": "^10.17.0", + "@motionone/utils": "^10.17.0", + "tslib": "^2.3.1" } }, - "@lezer/markdown": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.2.0.tgz", - "integrity": "sha512-d7MwsfAukZJo1GpPrcPGa3MxaFFOqNp0gbqF+3F7pTeNDOgeJN1muXzx1XXDPt+Ac+/voCzsH7qXqnn+xReG/g==", + "@motionone/types": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/types/-/types-10.17.0.tgz", + "integrity": "sha512-EgeeqOZVdRUTEHq95Z3t8Rsirc7chN5xFAPMYFobx8TPubkEfRSm5xihmMUkbaR2ErKJTUw3347QDPTHIW12IA==" + }, + "@motionone/utils": { + "version": "10.17.0", + "resolved": "https://registry.npmjs.org/@motionone/utils/-/utils-10.17.0.tgz", + "integrity": "sha512-bGwrki4896apMWIj9yp5rAS2m0xyhxblg6gTB/leWDPt+pb410W8lYWsxyurX+DH+gO1zsQsfx2su/c1/LtTpg==", "requires": { - "@lezer/common": "^1.0.0", - "@lezer/highlight": "^1.0.0" + "@motionone/types": "^10.17.0", + "hey-listen": "^1.0.8", + "tslib": "^2.3.1" } }, + "@n1ru4l/push-pull-async-iterable-iterator": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@n1ru4l/push-pull-async-iterable-iterator/-/push-pull-async-iterable-iterator-3.2.0.tgz", + "integrity": "sha512-3fkKj25kEjsfObL6IlKPAlHYPq/oYwUkkQ03zsTTiDjD7vg/RxjdiLeCydqtxHZP0JgsXL3D/X5oAkMGzuUp/Q==" + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -17990,6 +22234,8 @@ "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.4.0.tgz", "integrity": "sha512-XJLGVL0DEclX5pcWa2N9SX1jCGTDd8l972biNooLFtjneuGqodupPQh6XseXIBBeVIMaaJ7bTcs3qGvXwsp4vg==", "dev": true, + "optional": true, + "peer": true, "requires": { "@parcel/watcher-android-arm64": "2.4.0", "@parcel/watcher-darwin-arm64": "2.4.0", @@ -18014,7 +22260,8 @@ "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.4.0.tgz", "integrity": "sha512-T/At5pansFuQ8VJLRx0C6C87cgfqIYhW2N/kBfLCUvDhCah0EnLLwaD/6MW3ux+rpgkpQAnMELOCTKlbwncwiA==", "dev": true, - "optional": true + "optional": true, + "peer": true }, "@peculiar/asn1-schema": { "version": "2.3.8", @@ -18037,16 +22284,16 @@ } }, "@peculiar/webcrypto": { - "version": "1.4.5", - "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.5.tgz", - "integrity": "sha512-oDk93QCDGdxFRM8382Zdminzs44dg3M2+E5Np+JWkpqLDyJC9DviMh8F8mEJkYuUcUOGA5jHO5AJJ10MFWdbZw==", + "version": "1.4.6", + "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.6.tgz", + "integrity": "sha512-YBcMfqNSwn3SujUJvAaySy5tlYbYm6tVt9SKoXu8BaTdKGROiJDgPR3TXpZdAKUfklzm3lRapJEAltiMQtBgZg==", "dev": true, "requires": { "@peculiar/asn1-schema": "^2.3.8", "@peculiar/json-schema": "^1.1.12", "pvtsutils": "^1.3.5", "tslib": "^2.6.2", - "webcrypto-core": "^1.7.8" + "webcrypto-core": "^1.7.9" } }, "@pkgjs/parseargs": { @@ -18056,12 +22303,12 @@ "optional": true }, "@playwright/test": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.41.1.tgz", - "integrity": "sha512-9g8EWTjiQ9yFBXc6HjCWe41msLpxEX0KhmfmPl9RPLJdfzL4F0lg2BdJ91O9azFdl11y1pmpwdjBiSxvqc+btw==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", + "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", "dev": true, "requires": { - "playwright": "1.41.1" + "playwright": "1.42.1" } }, "@popperjs/core": { @@ -18086,6 +22333,18 @@ "@radix-ui/react-primitive": "1.0.3" } }, + "@radix-ui/react-collection": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.0.3.tgz", + "integrity": "sha512-3SzW+0PW7yBBoQlT8wNcGtaxaD0XSu0uLUFgrtHY08Acx05TaHaOmVLR73c0j/cqpDy53KBMO7s0dx2wmOIDIA==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2" + } + }, "@radix-ui/react-compose-refs": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.1.tgz", @@ -18102,6 +22361,36 @@ "@babel/runtime": "^7.13.10" } }, + "@radix-ui/react-dialog": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.5.tgz", + "integrity": "sha512-GjWJX/AUpB703eEBanuBnIWdIXg6NvJFCXcNlSZk4xdszCdhrJgBoUd1cGk67vFO+WdA2pfI/plOpqz/5GUP6Q==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-controllable-state": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" + } + }, + "@radix-ui/react-direction": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.0.1.tgz", + "integrity": "sha512-RXcvnXgyvYvBEOhCBuddKecVkoMiI10Jcm5cTI7abJRAHYfFxeu+FBQs/DvdxSYucxR5mna0dNsL6QFlds5TMA==", + "requires": { + "@babel/runtime": "^7.13.10" + } + }, "@radix-ui/react-dismissable-layer": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", @@ -18115,6 +22404,40 @@ "@radix-ui/react-use-escape-keydown": "1.0.3" } }, + "@radix-ui/react-dropdown-menu": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.6.tgz", + "integrity": "sha512-i6TuFOoWmLWq+M/eCLGd/bQ2HfAX1RJgvrBQ6AQLmzfvsLdefxbWu8G9zczcPFfcSPehz9GcpF6K9QYreFV8hA==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-menu": "2.0.6", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-controllable-state": "1.0.1" + } + }, + "@radix-ui/react-focus-guards": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.1.tgz", + "integrity": "sha512-Rect2dWbQ8waGzhMavsIbmSVCgYxkXLxxR3ZvCX79JOglzdEy4JXMb98lq4hPxUbLr77nP0UOGf4rcMU+s1pUA==", + "requires": { + "@babel/runtime": "^7.13.10" + } + }, + "@radix-ui/react-focus-scope": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz", + "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1" + } + }, "@radix-ui/react-id": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.1.tgz", @@ -18124,6 +22447,55 @@ "@radix-ui/react-use-layout-effect": "1.0.1" } }, + "@radix-ui/react-menu": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.6.tgz", + "integrity": "sha512-BVkFLS+bUC8HcImkRKPSiVumA1VPOOEC5WBMiT+QAVsPzW1FJzI9KnqgGxVDPBcql5xXrHkD3JOVoXWEXD8SYA==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-collection": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-direction": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-roving-focus": "1.0.4", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-callback-ref": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" + } + }, + "@radix-ui/react-popover": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz", + "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-dismissable-layer": "1.0.5", + "@radix-ui/react-focus-guards": "1.0.1", + "@radix-ui/react-focus-scope": "1.0.4", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", + "@radix-ui/react-presence": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-slot": "1.0.2", + "@radix-ui/react-use-controllable-state": "1.0.1", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.5" + } + }, "@radix-ui/react-popper": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz", @@ -18170,6 +22542,33 @@ "@radix-ui/react-slot": "1.0.2" } }, + "@radix-ui/react-progress": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.0.3.tgz", + "integrity": "sha512-5G6Om/tYSxjSeEdrb1VfKkfZfn/1IlPWd731h2RfPuSbIfNUgfqAwbKfJCg/PP6nuUCTrYzalwHSpSinoWoCag==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3" + } + }, + "@radix-ui/react-roving-focus": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz", + "integrity": "sha512-2mUg5Mgcu001VkGy+FfzZyzbmuUWzgWkj3rvv4yu+mLw03+mTzbxZHvfcGyFp2b8EkQeMkpRQ5FiA2Vr2O6TeQ==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-collection": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-direction": "1.0.1", + "@radix-ui/react-id": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-controllable-state": "1.0.1" + } + }, "@radix-ui/react-slot": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.2.tgz", @@ -18269,9 +22668,9 @@ } }, "@remix-run/router": { - "version": "1.14.2", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.14.2.tgz", - "integrity": "sha512-ACXpdMM9hmKZww21yEqWwiLws/UPLhNKvimN8RrYSqPSvB3ov7sLvAcfvaxePeLvccTQKGdkDIhLYApZVDFuKg==" + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.15.3.tgz", + "integrity": "sha512-Oy8rmScVrVxWZVOpEF57ovlnhpZ8CCPlnIIumVcV9nFdiSIrus99+Lw78ekXyGvVDlIsFJbSfmSovJUhCWYV3w==" }, "@repeaterjs/repeater": { "version": "3.0.5", @@ -18289,103 +22688,101 @@ "picomatch": "^2.3.1" } }, - "@sentry-internal/feedback": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/feedback/-/feedback-7.99.0.tgz", - "integrity": "sha512-exIO1o+bE0MW4z30FxC0cYzJ4ZHSMlDPMHCBDPzU+MWGQc/fb8s58QUrx5Dnm6HTh9G3H+YlroCxIo9u0GSwGQ==", - "requires": { - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-android-arm-eabi": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.14.0.tgz", + "integrity": "sha512-jwXtxYbRt1V+CdQSy6Z+uZti7JF5irRKF8hlKfEnF/xJpcNGuuiZMBvuoYM+x9sr9iWGnzrlM0+9hvQ1kgkf1w==", + "optional": true }, - "@sentry-internal/replay-canvas": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/replay-canvas/-/replay-canvas-7.99.0.tgz", - "integrity": "sha512-PoIkfusToDq0snfl2M6HJx/1KJYtXxYhQplrn11kYadO04SdG0XGXf4h7wBTMEQ7LDEAtQyvsOu4nEQtTO3YjQ==", - "requires": { - "@sentry/core": "7.99.0", - "@sentry/replay": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-android-arm64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.14.0.tgz", + "integrity": "sha512-fI9nduZhCccjzlsA/OuAwtFGWocxA4gqXGTLvOyiF8d+8o0fZUeSztixkYjcGq1fGZY3Tkq4yRvHPFxU+jdZ9Q==", + "optional": true }, - "@sentry-internal/tracing": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.99.0.tgz", - "integrity": "sha512-z3JQhHjoM1KdM20qrHwRClKJrNLr2CcKtCluq7xevLtXHJWNAQQbafnWD+Aoj85EWXBzKt9yJMv2ltcXJ+at+w==", - "requires": { - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-darwin-arm64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.14.0.tgz", + "integrity": "sha512-BcnSPRM76/cD2gQC+rQNGBN6GStBs2pl/FpweW8JYuz5J/IEa0Fr4AtrPv766DB/6b2MZ/AfSIOSGw3nEIP8SA==", + "optional": true }, - "@sentry/browser": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-7.99.0.tgz", - "integrity": "sha512-bgfoUv3wkwwLgN5YUOe0ibB3y268ZCnamZh6nLFqnY/UBKC1+FXWFdvzVON/XKUm62LF8wlpCybOf08ebNj2yg==", - "requires": { - "@sentry-internal/feedback": "7.99.0", - "@sentry-internal/replay-canvas": "7.99.0", - "@sentry-internal/tracing": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/replay": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-darwin-x64": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.14.0.tgz", + "integrity": "sha512-LDyFB9GRolGN7XI6955aFeI3wCdCUszFWumWU0deHA8VpR3nWRrjG6GtGjBrQxQKFevnUTHKCfPR4IvrW3kCgQ==", + "optional": true }, - "@sentry/core": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.99.0.tgz", - "integrity": "sha512-vOAtzcAXEUtS/oW7wi3wMkZ3hsb5Ch96gKyrrj/mXdOp2zrcwdNV6N9/pawq2E9P/7Pw8AXw4CeDZztZrjQLuA==", - "requires": { - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.14.0.tgz", + "integrity": "sha512-ygrGVhQP47mRh0AAD0zl6QqCbNsf0eTo+vgwkY6LunBcg0f2Jv365GXlDUECIyoXp1kKwL5WW6rsO429DBY/bA==", + "optional": true }, - "@sentry/react": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/react/-/react-7.99.0.tgz", - "integrity": "sha512-RtHwgzMHJhzJfSQpVG0SDPQYMTGDX3Q37/YWI59S4ALMbSW4/F6n/eQAvGVYZKbh2UCSqgFuRWaXOYkSZT17wA==", - "requires": { - "@sentry/browser": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0", - "hoist-non-react-statics": "^3.3.2" - } + "@rollup/rollup-linux-arm64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.14.0.tgz", + "integrity": "sha512-x+uJ6MAYRlHGe9wi4HQjxpaKHPM3d3JjqqCkeC5gpnnI6OWovLdXTpfa8trjxPLnWKyBsSi5kne+146GAxFt4A==", + "optional": true }, - "@sentry/replay": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/replay/-/replay-7.99.0.tgz", - "integrity": "sha512-gyN/I2WpQrLAZDT+rScB/0jnFL2knEVBo8U8/OVt8gNP20Pq8T/rDZKO/TG0cBfvULDUbJj2P4CJryn2p/O2rA==", - "requires": { - "@sentry-internal/tracing": "7.99.0", - "@sentry/core": "7.99.0", - "@sentry/types": "7.99.0", - "@sentry/utils": "7.99.0" - } + "@rollup/rollup-linux-arm64-musl": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.14.0.tgz", + "integrity": "sha512-nrRw8ZTQKg6+Lttwqo6a2VxR9tOroa2m91XbdQ2sUUzHoedXlsyvY1fN4xWdqz8PKmf4orDwejxXHjh7YBGUCA==", + "optional": true }, - "@sentry/tracing": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/tracing/-/tracing-7.99.0.tgz", - "integrity": "sha512-Cf622gSeamiSsi0JEj3PTXnq019OymaCrGf91x1d6OPyJ5jAXdlNuhw7NkqCEw8euIhhULuS81l5nGfBrgjj9Q==", - "requires": { - "@sentry-internal/tracing": "7.99.0" - } + "@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.14.0.tgz", + "integrity": "sha512-xV0d5jDb4aFu84XKr+lcUJ9y3qpIWhttO3Qev97z8DKLXR62LC3cXT/bMZXrjLF9X+P5oSmJTzAhqwUbY96PnA==", + "optional": true }, - "@sentry/types": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.99.0.tgz", - "integrity": "sha512-94qwOw4w40sAs5mCmzcGyj8ZUu/KhnWnuMZARRq96k+SjRW/tHFAOlIdnFSrt3BLPvSOK7R3bVAskZQ0N4FTmA==" + "@rollup/rollup-linux-riscv64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.14.0.tgz", + "integrity": "sha512-SDDhBQwZX6LPRoPYjAZWyL27LbcBo7WdBFWJi5PI9RPCzU8ijzkQn7tt8NXiXRiFMJCVpkuMkBf4OxSxVMizAw==", + "optional": true }, - "@sentry/utils": { - "version": "7.99.0", - "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.99.0.tgz", - "integrity": "sha512-cYZy5WNTkWs5GgggGnjfGqC44CWir0pAv4GVVSx0fsup4D4pMKBJPrtub15f9uC+QkUf3vVkqwpBqeFxtmJQTQ==", - "requires": { - "@sentry/types": "7.99.0" - } + "@rollup/rollup-linux-s390x-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.14.0.tgz", + "integrity": "sha512-RxB/qez8zIDshNJDufYlTT0ZTVut5eCpAZ3bdXDU9yTxBzui3KhbGjROK2OYTTor7alM7XBhssgoO3CZ0XD3qA==", + "optional": true + }, + "@rollup/rollup-linux-x64-gnu": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.14.0.tgz", + "integrity": "sha512-C6y6z2eCNCfhZxT9u+jAM2Fup89ZjiG5pIzZIDycs1IwESviLxwkQcFRGLjnDrP+PT+v5i4YFvlcfAs+LnreXg==", + "optional": true + }, + "@rollup/rollup-linux-x64-musl": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.14.0.tgz", + "integrity": "sha512-i0QwbHYfnOMYsBEyjxcwGu5SMIi9sImDVjDg087hpzXqhBSosxkE7gyIYFHgfFl4mr7RrXksIBZ4DoLoP4FhJg==", + "optional": true + }, + "@rollup/rollup-win32-arm64-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.14.0.tgz", + "integrity": "sha512-Fq52EYb0riNHLBTAcL0cun+rRwyZ10S9vKzhGKKgeD+XbwunszSY0rVMco5KbOsTlwovP2rTOkiII/fQ4ih/zQ==", + "optional": true + }, + "@rollup/rollup-win32-ia32-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.14.0.tgz", + "integrity": "sha512-e/PBHxPdJ00O9p5Ui43+vixSgVf4NlLsmV6QneGERJ3lnjIua/kim6PRFe3iDueT1rQcgSkYP8ZBBXa/h4iPvw==", + "optional": true + }, + "@rollup/rollup-win32-x64-msvc": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.14.0.tgz", + "integrity": "sha512-aGg7iToJjdklmxlUlJh/PaPNa4PmqHfyRMLunbL3eaMO0gp656+q1zOKkpJ/CVe9CryJv6tAN1HDoR8cNGzkag==", + "optional": true + }, + "@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true }, "@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", @@ -18460,19 +22857,6 @@ "camelcase": "^6.2.0", "cosmiconfig": "^8.1.3", "snake-case": "^3.0.4" - }, - "dependencies": { - "cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "requires": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" - } - } } }, "@svgr/hast-util-to-babel-ast": { @@ -18495,6 +22879,32 @@ "svg-parser": "^2.0.4" } }, + "@svgr/plugin-svgo": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz", + "integrity": "sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA==", + "requires": { + "cosmiconfig": "^8.1.3", + "deepmerge": "^4.3.1", + "svgo": "^3.0.2" + } + }, + "@svgr/rollup": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/rollup/-/rollup-8.1.0.tgz", + "integrity": "sha512-0XR1poYvPQoPpmfDYLEqUGu5ePAQ4pdgN3VFsZBNAeze7qubVpsIY1o1R6PZpKep/DKu33GSm2NhwpCLkMs2Cw==", + "requires": { + "@babel/core": "^7.21.3", + "@babel/plugin-transform-react-constant-elements": "^7.21.3", + "@babel/preset-env": "^7.20.2", + "@babel/preset-react": "^7.18.6", + "@babel/preset-typescript": "^7.21.0", + "@rollup/pluginutils": "^5.0.2", + "@svgr/core": "8.1.0", + "@svgr/plugin-jsx": "8.1.0", + "@svgr/plugin-svgo": "8.1.0" + } + }, "@tailwindcss/forms": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.7.tgz", @@ -18533,9 +22943,9 @@ } }, "@testing-library/react": { - "version": "14.2.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.0.tgz", - "integrity": "sha512-7uBnPHyOG6nDGCzv8SLeJbSa33ZoYw7swYpSLIgJvBALdq7l9zPNk33om4USrxy1lKTxXaVfufzLmq83WNfWIw==", + "version": "14.2.2", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-14.2.2.tgz", + "integrity": "sha512-SOUuM2ysCvjUWBXTNfQ/ztmnKDmqaiPV3SvoIuyxMUca45rbSWWAT/qB8CUs/JQ/ux/8JFs9DNdFQ3f6jH3crA==", "dev": true, "requires": { "@babel/runtime": "^7.12.5", @@ -18543,11 +22953,10 @@ "@types/react-dom": "^18.0.0" } }, - "@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true + "@trysound/sax": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz", + "integrity": "sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==" }, "@tsconfig/node10": { "version": "1.0.9", @@ -18616,19 +23025,12 @@ "@babel/types": "^7.20.7" } }, - "@types/chai": { - "version": "4.3.11", - "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.11.tgz", - "integrity": "sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ==", - "dev": true - }, - "@types/chai-subset": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/chai-subset/-/chai-subset-1.3.5.tgz", - "integrity": "sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A==", - "dev": true, + "@types/codemirror": { + "version": "5.60.15", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.15.tgz", + "integrity": "sha512-dTOvwEQ+ouKJ/rE9LT1Ue2hmP6H1mZv5+CCnNWu2qtiOe2LQa9lCprEY20HxiDmV/Bxh+dXjywmy5aKvoGjULA==", "requires": { - "@types/chai": "*" + "@types/tern": "*" } }, "@types/d3-array": { @@ -18744,6 +23146,15 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "@types/loadable__component": { + "version": "5.13.9", + "resolved": "https://registry.npmjs.org/@types/loadable__component/-/loadable__component-5.13.9.tgz", + "integrity": "sha512-QWOtIkwZqHNdQj3nixQ8oyihQiTMKZLk/DNuvNxMSbTfxf47w+kqcbnxlUeBgAxdOtW0Dh48dTAIp83iJKtnrQ==", + "dev": true, + "requires": { + "@types/react": "*" + } + }, "@types/mdast": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.3.tgz", @@ -18758,16 +23169,13 @@ "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" }, "@types/node": { - "version": "16.18.77", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.77.tgz", - "integrity": "sha512-zwqAbRkHjGlxH9PBv8i9dmeaDpBRgfQDSFuREMF2Z+WUi8uc13gfRquMV/8LxBqwm+7jBz+doTVkEEA1CIWOnQ==", - "devOptional": true - }, - "@types/parse-json": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", - "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==", - "dev": true + "version": "20.12.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.3.tgz", + "integrity": "sha512-sD+ia2ubTeWrOu+YMF+MTAB7E+O7qsMqAbMfW7DG3K1URwhZ5hN1pLlRVGbf4wDFzSfikL05M17EyorS86jShw==", + "devOptional": true, + "requires": { + "undici-types": "~5.26.4" + } }, "@types/prismjs": { "version": "1.26.3", @@ -18781,21 +23189,20 @@ "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==" }, "@types/ramda": { - "version": "0.28.25", - "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.28.25.tgz", - "integrity": "sha512-HrQNqQAGcITpn9HAJFamDxm7iZeeXiP/95pN5OMbNniDjzCCeOHbBKNGmUy8NRi0fhYS+/cXeo91MFC+06gbow==", + "version": "0.29.12", + "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.12.tgz", + "integrity": "sha512-sgIEjpJhdQPB52gDF4aphs9nl0xe54CR22DPdWqT8gQHjZYmVApgA0R3/CpMbl0Y8az2TEZrPNL2zy0EvjbkLA==", "dev": true, "requires": { - "ts-toolbelt": "^6.15.1" + "types-ramda": "^0.29.10" } }, "@types/react": { - "version": "18.2.48", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.48.tgz", - "integrity": "sha512-qboRCl6Ie70DQQG9hhNREz81jqC1cs9EVNcjQ1AU+jH6NFfSAhVVbrrY/+nSF+Bsk4AOwm9Qa61InvMCyV+H3w==", + "version": "18.2.74", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.74.tgz", + "integrity": "sha512-9AEqNZZyBx8OdZpxzQlaFEVCSFUM2YXJH46yPOiOpm078k6ZLOCcuAzGum/zK8YBwY+dbahVNbHrbgrAwIRlqw==", "requires": { "@types/prop-types": "*", - "@types/scheduler": "*", "csstype": "^3.0.2" } }, @@ -18809,12 +23216,23 @@ "@types/react": "*", "date-fns": "^2.0.1", "react-popper": "^2.2.5" + }, + "dependencies": { + "date-fns": { + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", + "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "dev": true, + "requires": { + "@babel/runtime": "^7.21.0" + } + } } }, "@types/react-dom": { - "version": "18.2.18", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.18.tgz", - "integrity": "sha512-TJxDm6OfAX2KJWJdMEVTwWke5Sc/E/RlnPGvGfS0W7+6ocy2xhDVQVh/KvC2Uf7kACs+gDytdusDSdWfWkaNzw==", + "version": "18.2.23", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.23.tgz", + "integrity": "sha512-ZQ71wgGOTmDYpnav2knkjr3qXdAFu0vsk8Ci5w3pGAIdj7/kKAyn+VsQDhXsmzzzepAiI9leWMmubXz690AI/A==", "devOptional": true, "requires": { "@types/react": "*" @@ -18829,11 +23247,6 @@ "@types/react": "*" } }, - "@types/scheduler": { - "version": "0.16.8", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", - "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==" - }, "@types/semver": { "version": "7.5.6", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", @@ -18861,6 +23274,14 @@ "integrity": "sha512-0vWLNK2D5MT9dg0iOo8GlKguPAU02QjmZitPEsXRuJXU/OGIOt9vT9Fc26wtYuavLxtO45v9PGleoL9Z0k1LHg==", "dev": true }, + "@types/tern": { + "version": "0.23.9", + "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.9.tgz", + "integrity": "sha512-ypzHFE/wBzh+BlH6rrBgS5I/Z7RD21pGhZ2rltb/+ZrVM1awdZwjx7hE5XfuYgHWk9uvV5HLZN3SloevCAp3Bw==", + "requires": { + "@types/estree": "*" + } + }, "@types/unist": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.2.tgz", @@ -18886,21 +23307,22 @@ } }, "@typescript-eslint/eslint-plugin": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.62.0.tgz", - "integrity": "sha512-TiZzBSJja/LbhNPvk6yc0JrX9XqhQ0hdh6M2svYfsHGejaKFIAGd9MQ+ERIMzLGlN/kZoYIgdxFV0PuljTKXag==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.5.0.tgz", + "integrity": "sha512-HpqNTH8Du34nLxbKgVMGljZMG0rJd2O9ecvr2QLYp+7512ty1j42KnsFwspPXg1Vh8an9YImf6CokUBltisZFQ==", "dev": true, "requires": { - "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/type-utils": "5.62.0", - "@typescript-eslint/utils": "5.62.0", + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/type-utils": "7.5.0", + "@typescript-eslint/utils": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "dependencies": { "lru-cache": { @@ -18930,60 +23352,71 @@ } }, "@typescript-eslint/parser": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.62.0.tgz", - "integrity": "sha512-VlJEV0fOQ7BExOsHYAGrgbEiZoi8D+Bl2+f6V2RrXerRSylnp+ZBHmPvaIa8cz0Ajx7WO7Z5RqfgYg7ED1nRhA==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.5.0.tgz", + "integrity": "sha512-cj+XGhNujfD2/wzR1tabNsidnYRaFfEkcULdcIyVBYcXjBvBKOes+mpMBP7hMpOyk+gBcfXsrg4NBGAStQyxjQ==", "dev": true, "requires": { - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4" } }, "@typescript-eslint/scope-manager": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz", - "integrity": "sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.5.0.tgz", + "integrity": "sha512-Z1r7uJY0MDeUlql9XJ6kRVgk/sP11sr3HKXn268HZyqL7i4cEfrdFuSSY/0tUqT37l5zT0tJOsuDP16kio85iA==", "dev": true, "requires": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0" + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0" } }, "@typescript-eslint/type-utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz", - "integrity": "sha512-xsSQreu+VnfbqQpW5vnCJdq1Z3Q0U31qiWmRhr98ONQmcp/yhiPJFPq8MXiJVLiksmOKSjIldZzkebzHuCGzew==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.5.0.tgz", + "integrity": "sha512-A021Rj33+G8mx2Dqh0nMO9GyjjIBK3MqgVgZ2qlKf6CJy51wY/lkkFqq3TqqnH34XyAHUkq27IjlUkWlQRpLHw==", "dev": true, "requires": { - "@typescript-eslint/typescript-estree": "5.62.0", - "@typescript-eslint/utils": "5.62.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "@typescript-eslint/utils": "7.5.0", "debug": "^4.3.4", - "tsutils": "^3.21.0" + "ts-api-utils": "^1.0.1" } }, "@typescript-eslint/types": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.62.0.tgz", - "integrity": "sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.5.0.tgz", + "integrity": "sha512-tv5B4IHeAdhR7uS4+bf8Ov3k793VEVHd45viRRkehIUZxm0WF82VPiLgHzA/Xl4TGPg1ZD49vfxBKFPecD5/mg==", "dev": true }, "@typescript-eslint/typescript-estree": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz", - "integrity": "sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.5.0.tgz", + "integrity": "sha512-YklQQfe0Rv2PZEueLTUffiQGKQneiIEKKnfIqPIOxgM9lKSZFCjT5Ad4VqRKj/U4+kQE3fa8YQpskViL7WjdPQ==", "dev": true, "requires": { - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/visitor-keys": "5.62.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/visitor-keys": "7.5.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" }, "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -18993,10 +23426,19 @@ "yallist": "^4.0.0" } }, + "minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -19011,19 +23453,18 @@ } }, "@typescript-eslint/utils": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.62.0.tgz", - "integrity": "sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.5.0.tgz", + "integrity": "sha512-3vZl9u0R+/FLQcpy2EHyRGNqAS/ofJ3Ji8aebilfJe+fobK8+LbIFmrHciLVDxjDoONmufDcnVSF38KwMEOjzw==", "dev": true, "requires": { - "@eslint-community/eslint-utils": "^4.2.0", - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.62.0", - "@typescript-eslint/types": "5.62.0", - "@typescript-eslint/typescript-estree": "5.62.0", - "eslint-scope": "^5.1.1", - "semver": "^7.3.7" + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "7.5.0", + "@typescript-eslint/types": "7.5.0", + "@typescript-eslint/typescript-estree": "7.5.0", + "semver": "^7.5.4" }, "dependencies": { "lru-cache": { @@ -19036,9 +23477,9 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -19053,226 +23494,236 @@ } }, "@typescript-eslint/visitor-keys": { - "version": "5.62.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.62.0.tgz", - "integrity": "sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.5.0.tgz", + "integrity": "sha512-mcuHM/QircmA6O7fy6nn2w/3ditQkj+SgtOc8DW3uQ10Yfj42amm2i+6F2K4YAOPNNTmE6iM1ynM6lrSwdendA==", "dev": true, "requires": { - "@typescript-eslint/types": "5.62.0", - "eslint-visitor-keys": "^3.3.0" + "@typescript-eslint/types": "7.5.0", + "eslint-visitor-keys": "^3.4.1" } }, "@uiw/color-convert": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/color-convert/-/color-convert-2.0.6.tgz", - "integrity": "sha512-LDu9r8geEjUgGzsuTkUu6rV5SCqR9r2liVYQxH42tZ5NkFx87Oswz/lpwthCoulhgjSWA+QEjjE3XB8Z9ove1g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/color-convert/-/color-convert-2.1.1.tgz", + "integrity": "sha512-L421mBAT2NRsmYv7BQvofOEwV0iKee1upPVxMjo2NnkJWyIu4I+H1RxK9m3uT8yvcOlStZhv7BQBsFyJCGmIMg==", "requires": {} }, "@uiw/react-color": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color/-/react-color-2.0.6.tgz", - "integrity": "sha512-0QdRfGFgIUtIMnoUzopVApORPXP8kvUqmk9iWJFF+VcKVyrNCTc8x05HLi/8HwPeawS/tQk4LdeGiwasba8/MA==", - "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-block": "2.0.6", - "@uiw/react-color-chrome": "2.0.6", - "@uiw/react-color-circle": "2.0.6", - "@uiw/react-color-colorful": "2.0.6", - "@uiw/react-color-compact": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-hsla": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-github": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-material": "2.0.6", - "@uiw/react-color-saturation": "2.0.6", - "@uiw/react-color-shade-slider": "2.0.6", - "@uiw/react-color-sketch": "2.0.6", - "@uiw/react-color-slider": "2.0.6", - "@uiw/react-color-swatch": "2.0.6", - "@uiw/react-color-wheel": "2.0.6" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color/-/react-color-2.1.1.tgz", + "integrity": "sha512-RE95rGzlOej848nK0onqxk2N+asrHpp3LEH2h7VJkcdJLOK54jccnGKdCc2seNue3zpCIcwPcR38hOeHhfJLJg==", + "requires": { + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-block": "2.1.1", + "@uiw/react-color-chrome": "2.1.1", + "@uiw/react-color-circle": "2.1.1", + "@uiw/react-color-colorful": "2.1.1", + "@uiw/react-color-compact": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-hsla": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-github": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-material": "2.1.1", + "@uiw/react-color-name": "2.1.1", + "@uiw/react-color-saturation": "2.1.1", + "@uiw/react-color-shade-slider": "2.1.1", + "@uiw/react-color-sketch": "2.1.1", + "@uiw/react-color-slider": "2.1.1", + "@uiw/react-color-swatch": "2.1.1", + "@uiw/react-color-wheel": "2.1.1" } }, "@uiw/react-color-alpha": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-alpha/-/react-color-alpha-2.0.6.tgz", - "integrity": "sha512-EWV54kU6cWT+cpq6QTqJwtSgh3Hjdu/1umVrEC7v6vp3abHgKpmLBMznybY6zoeIh6+TnLnNLoyc8M+Jm0Aj7g==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-alpha/-/react-color-alpha-2.1.1.tgz", + "integrity": "sha512-6wvWLn4Dgb3jIaveLdjhSg2RJIWKJbRU/uHSFtEd8rvXebRt9P7NFr5YsnkHDBUitx9KFxRL6kaI/GQCYU+8nA==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" } }, "@uiw/react-color-block": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-block/-/react-color-block-2.0.6.tgz", - "integrity": "sha512-GV3c7OCElnNq+BwdOD4HfbL3gGQlc1d4kod9XlzjaR6oMlWZ5PMEXeirf8Pn0c7xPUV+ltwAbgCmLkwaSxwTew==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-block/-/react-color-block-2.1.1.tgz", + "integrity": "sha512-c4xA42z7aLou8jBjxwLgUYZ+SiaZbVMADPLE/CcBi8EY/NcvvvtrL2wJGqE0g2Aqfey5RjB7nFxUeqSG1N00aA==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" } }, "@uiw/react-color-chrome": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-chrome/-/react-color-chrome-2.0.6.tgz", - "integrity": "sha512-lcT9/r+IjYSCHJf6EH1Gb4XAlJUIkDy/1XJe7SYYmySig55daIs0LjenTw9U8mWQlqgGfMItGp9RFEQy9aJelQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-chrome/-/react-color-chrome-2.1.1.tgz", + "integrity": "sha512-tv51lG6Wol8skiclLXXc8yf5nAVig5OjYtuNxsnFr165GP1YJ/mdnS7OIprYF/wP5mz66W7K0iz/8hAIof5/ug==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-hsla": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-github": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-hsla": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-github": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1" } }, "@uiw/react-color-circle": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-circle/-/react-color-circle-2.0.6.tgz", - "integrity": "sha512-jCwzlKXC0YVQPP6zhogYjFifNxS8J6VBx2ADKv8t50We0lc3gcicrT2Db/EAC0WGp8yigp/J+4OD5BROiST2rw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-circle/-/react-color-circle-2.1.1.tgz", + "integrity": "sha512-t/Wr6eT9GLOzywaTmJclOZ3NuirJlMk8eVxoLKM7eRePbN5WowIEMwug/towSU3YrBrEbSSWjZRhfVUqdh7kMw==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" } }, "@uiw/react-color-colorful": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-colorful/-/react-color-colorful-2.0.6.tgz", - "integrity": "sha512-ZCUFeuviRJcJiu1MB1n1MbouzmrUMYYgLcWU3ZvGQOT0NkJ3PgCRJrKR/9p6X3zkDBfQ4QCUG88c9X7XiEb+SQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-colorful/-/react-color-colorful-2.1.1.tgz", + "integrity": "sha512-7cGRtYv+llXO7Tmpfska9HxjQAbkqBP5P63wned6JD/0lOM4KXELxhWI3044nO/Osi5r3FDsGh0HRqiLgUK75Q==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1" } }, "@uiw/react-color-compact": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-compact/-/react-color-compact-2.0.6.tgz", - "integrity": "sha512-NB3vDw6aNDzKFH4z9lnSMwd4QCc2SmAwAhglns38CwhoD75iwciMsv9iNLMiW7CCZhuJtWpIiSR36Vb9eIRm8Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-compact/-/react-color-compact-2.1.1.tgz", + "integrity": "sha512-Af9skc0Bx3lot2zg58SlpMoQEKqyMbKvr7y2O541Hodc89ykgEDJZXqLrKiKnacBWMZxSAcAHoSFf70RLeoTlw==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" } }, "@uiw/react-color-editable-input": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input/-/react-color-editable-input-2.0.6.tgz", - "integrity": "sha512-m2aGFZU0nOtP6XYRaa4mRKv7654tG4YYnGtYXMkR32j4eCOAUEpAmMYbKtSFiB+vUPQKyA3tcEoKNa6alNp0vw==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input/-/react-color-editable-input-2.1.1.tgz", + "integrity": "sha512-mEohydHWV49iQ3RuH/3My20T7wDtOPzzGEBOMJeHIxMnN6FMwl9U1bAAgDb2ovnt5Ws0PaCWcBjNKHARpVSZ1Q==", "requires": {} }, "@uiw/react-color-editable-input-hsla": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-hsla/-/react-color-editable-input-hsla-2.0.6.tgz", - "integrity": "sha512-ME3WcgP5piU8BED6pcSTe0LryIecr0GGLWTw2p9rfZgUlAaUIfdDHOTCXTCzxh9EWEQcclBlCotj1Nckcsjplg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-hsla/-/react-color-editable-input-hsla-2.1.1.tgz", + "integrity": "sha512-2Eqcd0hUa5qLVxT062Vf9vsxS2/6X+AJ6f6Wfs6/UAM6iUWqWPkJxajRmEtFfB6Zv5bcaYjhSZNGgeEf7azAvQ==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1" } }, "@uiw/react-color-editable-input-rgba": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-rgba/-/react-color-editable-input-rgba-2.0.6.tgz", - "integrity": "sha512-EcEcjHkiQX8Ecuv6nYK2DWmR5oSp5d3VMifZovNCr8Q41C2p08AeFA16CgPnmlsWlghRYqe5ekY6bkwdIC5Q2Q==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-editable-input-rgba/-/react-color-editable-input-rgba-2.1.1.tgz", + "integrity": "sha512-6YtDaBWTXu27MK6s3HZty0qg3mYb4GN/8dI8T39R/qEiMX/SButMfC09pnygN74InyuG8MzobUg2GowfTRUG5A==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1" } }, "@uiw/react-color-github": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-github/-/react-color-github-2.0.6.tgz", - "integrity": "sha512-oDLRId+r+YEMr9vCDBzAZbrCW3qNGcgtzhWEgCupMZfIpglTMwR2iJmWQjylq66kXhwuBqLJe5r+uvXljqo4VA==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-github/-/react-color-github-2.1.1.tgz", + "integrity": "sha512-5balACHzjVqrkdEsGXI2Ir4iXQrTAHQ7uRzqY+op41uuciIb8yGI1PecJd2qIjUJhk/kZ4nmp6KAjQEAK2iVIQ==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" } }, "@uiw/react-color-hue": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-hue/-/react-color-hue-2.0.6.tgz", - "integrity": "sha512-ytn3yH2SDM761IwoyXPBLKFSGpGxBHKopE3bgyw/5nuDq12J/oFy2uOLjBNAUy0qfZC2FARt6yCYqQcON9aChQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-hue/-/react-color-hue-2.1.1.tgz", + "integrity": "sha512-IvN2acCV35yRfmbscUQbNfwjKF+g51kMONv9j0zxDlTct2R0x4gatsVjA1tTpLv5UCIkFvhw80xg04QATrJ4Nw==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" } }, "@uiw/react-color-material": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-material/-/react-color-material-2.0.6.tgz", - "integrity": "sha512-HylgLq4bArp8NFx/Ub78/Rt28+xnqhk72hK5sjd5PJyoD+1wuca+Hi1JhnRKCb4X4jNytMRol2Ny5Up/YTyiAg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-material/-/react-color-material-2.1.1.tgz", + "integrity": "sha512-Pcp/kpBnTGYXqP0up3rqTdJWKtnD2XdiA5Zdh5bdwqssI+qHo0cVELXOnpwU3LiSCZykTDauvGoOqTWprvox4g==", + "requires": { + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1" + } + }, + "@uiw/react-color-name": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-name/-/react-color-name-2.1.1.tgz", + "integrity": "sha512-k+19NgPHPZ88oqzCfAcVd7fT4F6XywkeZkX3DDyRG3Skc8zuGdIS2xT7Ne7ZSQb31UT0+UfuOiwOST+r+kGnFA==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6" + "colors-named": "^1.0.1", + "colors-named-hex": "^1.0.1" } }, "@uiw/react-color-saturation": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-saturation/-/react-color-saturation-2.0.6.tgz", - "integrity": "sha512-wnM1GlxKjvFuEHhSOA/rxho2lqZyywcwscTPzgAxg59hrQ6ddUdDaiAWJiujJ+mkmGqf4xO6llAvf9epJC3AZg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-saturation/-/react-color-saturation-2.1.1.tgz", + "integrity": "sha512-lg3ElCNuiHt7wsfR9FQpgFcg9zht+GAuVhemvgLq6twR62ZUgFd58in42T1F8l2ZpimXu8SgLGEtvc7XB2i8CQ==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" } }, "@uiw/react-color-shade-slider": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-shade-slider/-/react-color-shade-slider-2.0.6.tgz", - "integrity": "sha512-N9BmJ4HoeYiHKvOXWRNp9if4o+gNN4VK7/OPo4IjWiyZUKqskYIi0qqvlafIQix1SmZPa8C4Ip9e4dPq80qrFQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-shade-slider/-/react-color-shade-slider-2.1.1.tgz", + "integrity": "sha512-7DO2d53GGFR6fXPS7g3hUNTlQCwNjKSQum90h4HeJa5jxIQiuSpeMVPo6IoG8EAuP88Au5C55SYV+qjNgLvG8Q==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" } }, "@uiw/react-color-sketch": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-sketch/-/react-color-sketch-2.0.6.tgz", - "integrity": "sha512-tSN/R0JsvLJMuIN3nptfuYhkD7brY9BH6nJOPLaydPCLzj5jMftIcTyqY5H/bDMd8ZNwlU71TCLkPS4CNdHJSQ==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-sketch/-/react-color-sketch-2.1.1.tgz", + "integrity": "sha512-hiHhwSJnMzRs9mUfOiqekPosQQ60mhzRN1LDfD/z4sW7GHxWgV9sl9jdoBeN3RRS0O4i/qHjNrJqTad6D4rV7g==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6", - "@uiw/react-color-editable-input": "2.0.6", - "@uiw/react-color-editable-input-rgba": "2.0.6", - "@uiw/react-color-hue": "2.0.6", - "@uiw/react-color-saturation": "2.0.6", - "@uiw/react-color-swatch": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1", + "@uiw/react-color-editable-input": "2.1.1", + "@uiw/react-color-editable-input-rgba": "2.1.1", + "@uiw/react-color-hue": "2.1.1", + "@uiw/react-color-saturation": "2.1.1", + "@uiw/react-color-swatch": "2.1.1" } }, "@uiw/react-color-slider": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-slider/-/react-color-slider-2.0.6.tgz", - "integrity": "sha512-cImrIBaqU04RmzJxkwS4QQATtF5OD2jj+wJCsPkRhcT1Zc+WD4JSe3CXgliVsyXQux4DHbND6NcCJjXyj5W3ig==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-slider/-/react-color-slider-2.1.1.tgz", + "integrity": "sha512-J0nqSpiJS4lZCUudAFo8sFMTZgiPgQ0iR4ADx1Hc5vGJr5KfpGwOVq68cUvqiAqXplUQZPVcjwoBhxl4M4fCzg==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-color-alpha": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-color-alpha": "2.1.1" } }, "@uiw/react-color-swatch": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-swatch/-/react-color-swatch-2.0.6.tgz", - "integrity": "sha512-fDS5oo25tJEW+Xzm8ZM+0t6WjTE7poOQhxbw9ePxVdlcg12SGIvOCiY0kQrox3QjCAgyjTx33poYNEa3uO6BAg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-swatch/-/react-color-swatch-2.1.1.tgz", + "integrity": "sha512-soKsfVgflcKSBx47PaBocKZ0beIXfk9ruE4r9778mGnDDpxc2RC5zPNfvzQkSLKW+siXIS0cscuvb/8s1zK5jw==", "requires": { - "@uiw/color-convert": "2.0.6" + "@uiw/color-convert": "2.1.1" } }, "@uiw/react-color-wheel": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-color-wheel/-/react-color-wheel-2.0.6.tgz", - "integrity": "sha512-7EfZ3db+NUd+2AQ7Nm7lsXmmVXCD65cZFhpTQuM3PIF8VgzwVTumXKBV2WyfCdo+alQowvN8DQCxdcmmjIozFg==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-color-wheel/-/react-color-wheel-2.1.1.tgz", + "integrity": "sha512-88TYL9GCStBNULEkZ6qlQt8z/jnAf1ZSJwpbVK1JGUwogPJaMAJt8FRSUfzTNpIwYA8ymgK9Y1seng6Z8YkS9Q==", "requires": { - "@uiw/color-convert": "2.0.6", - "@uiw/react-drag-event-interactive": "2.0.6" + "@uiw/color-convert": "2.1.1", + "@uiw/react-drag-event-interactive": "2.1.1" } }, "@uiw/react-drag-event-interactive": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/@uiw/react-drag-event-interactive/-/react-drag-event-interactive-2.0.6.tgz", - "integrity": "sha512-LO3Q5x7NMAiIiZMwd6Yulpim+bYafs8ZYOhp+uOgY3Isvbke/me2Ix62qM+gQimEQjOYuH/9GZx5aC+uOcYo2w==", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@uiw/react-drag-event-interactive/-/react-drag-event-interactive-2.1.1.tgz", + "integrity": "sha512-hJjoJg9ZASzhY6HFwZSnNhx+BJ5rfqqUnpTm6ZtfjCO5DKRZW3CDios0cmMu2Ojvdu0a9qE9x8CURCUuoCzqxw==", "requires": {} }, "@ungap/structured-clone": { @@ -19292,46 +23743,54 @@ "react-refresh": "^0.14.0" } }, - "@vitest/coverage-c8": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-c8/-/coverage-c8-0.31.4.tgz", - "integrity": "sha512-VPx368m4DTcpA/P0v3YdVxl4QOSh1DbUcXURLRvDShrIB5KxOgfzw4Bn2R8AhAe/GyiWW/FIsJ/OJdYXCCiC1w==", + "@vitest/coverage-v8": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-1.4.0.tgz", + "integrity": "sha512-4hDGyH1SvKpgZnIByr9LhGgCEuF9DKM34IBLCC/fVfy24Z3+PZ+Ii9hsVBsHvY1umM1aGPEjceRkzxCfcQ10wg==", "dev": true, "requires": { "@ampproject/remapping": "^2.2.1", - "c8": "^7.13.0", - "magic-string": "^0.30.0", + "@bcoe/v8-coverage": "^0.2.3", + "debug": "^4.3.4", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", + "istanbul-lib-source-maps": "^5.0.4", + "istanbul-reports": "^3.1.6", + "magic-string": "^0.30.5", + "magicast": "^0.3.3", "picocolors": "^1.0.0", - "std-env": "^3.3.2" + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "test-exclude": "^6.0.0", + "v8-to-istanbul": "^9.2.0" } }, "@vitest/expect": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-0.31.4.tgz", - "integrity": "sha512-tibyx8o7GUyGHZGyPgzwiaPaLDQ9MMuCOrc03BYT0nryUuhLbL7NV2r/q98iv5STlwMgaKuFJkgBW/8iPKwlSg==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-1.4.0.tgz", + "integrity": "sha512-Jths0sWCJZ8BxjKe+p+eKsoqev1/T8lYcrjavEaz8auEJ4jAVY0GwW3JKmdVU4mmNPLPHixh4GNXP7GFtAiDHA==", "dev": true, "requires": { - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "chai": "^4.3.7" + "@vitest/spy": "1.4.0", + "@vitest/utils": "1.4.0", + "chai": "^4.3.10" } }, "@vitest/runner": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-0.31.4.tgz", - "integrity": "sha512-Wgm6UER+gwq6zkyrm5/wbpXGF+g+UBB78asJlFkIOwyse0pz8lZoiC6SW5i4gPnls/zUcPLWS7Zog0LVepXnpg==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-1.4.0.tgz", + "integrity": "sha512-EDYVSmesqlQ4RD2VvWo3hQgTJ7ZrFQ2VSJdfiJiArkCerDAGeyF1i6dHkmySqk573jLp6d/cfqCN+7wUB5tLgg==", "dev": true, "requires": { - "@vitest/utils": "0.31.4", - "concordance": "^5.0.4", - "p-limit": "^4.0.0", - "pathe": "^1.1.0" + "@vitest/utils": "1.4.0", + "p-limit": "^5.0.0", + "pathe": "^1.1.1" }, "dependencies": { "p-limit": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", - "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "dev": true, "requires": { "yocto-queue": "^1.0.0" @@ -19346,34 +23805,94 @@ } }, "@vitest/snapshot": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-0.31.4.tgz", - "integrity": "sha512-LemvNumL3NdWSmfVAMpXILGyaXPkZbG5tyl6+RQSdcHnTj6hvA49UAI8jzez9oQyE/FWLKRSNqTGzsHuk89LRA==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-1.4.0.tgz", + "integrity": "sha512-saAFnt5pPIA5qDGxOHxJ/XxhMFKkUSBJmVt5VgDsAqPTX6JP326r5C/c9UuCMPoXNzuudTPsYDZCoJ5ilpqG2A==", "dev": true, "requires": { - "magic-string": "^0.30.0", - "pathe": "^1.1.0", - "pretty-format": "^27.5.1" + "magic-string": "^0.30.5", + "pathe": "^1.1.1", + "pretty-format": "^29.7.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + }, + "pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "requires": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + } + }, + "react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + } } }, "@vitest/spy": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-0.31.4.tgz", - "integrity": "sha512-3ei5ZH1s3aqbEyftPAzSuunGICRuhE+IXOmpURFdkm5ybUADk+viyQfejNk6q8M5QGX8/EVKw+QWMEP3DTJDag==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-1.4.0.tgz", + "integrity": "sha512-Ywau/Qs1DzM/8Uc+yA77CwSegizMlcgTJuYGAi0jujOteJOUf1ujunHThYo243KG9nAyWT3L9ifPYZ5+As/+6Q==", "dev": true, "requires": { - "tinyspy": "^2.1.0" + "tinyspy": "^2.2.0" } }, "@vitest/utils": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-0.31.4.tgz", - "integrity": "sha512-DobZbHacWznoGUfYU8XDPY78UubJxXfMNY1+SUdOp1NsI34eopSA6aZMeaGu10waSOeYwE8lxrd/pLfT0RMxjQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-1.4.0.tgz", + "integrity": "sha512-mx3Yd1/6e2Vt/PUC98DcqTirtfxUyAZ32uK82r8rZzbtBeBo+nqgnjx/LvqQdWsrvNtm14VmurNgcf4nqY5gJg==", "dev": true, "requires": { - "concordance": "^5.0.4", - "loupe": "^2.3.6", - "pretty-format": "^27.5.1" + "diff-sequences": "^29.6.3", + "estree-walker": "^3.0.3", + "loupe": "^2.3.7", + "pretty-format": "^29.7.0" + }, + "dependencies": { + "ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true + }, + "estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "requires": { + "@types/estree": "^1.0.0" + } + }, + "pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "requires": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + } + }, + "react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + } } }, "@whatwg-node/events": { @@ -19440,28 +23959,12 @@ "tslib": "^2.3.0" } }, - "abab": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", - "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", - "dev": true - }, "acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", "devOptional": true }, - "acorn-globals": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", - "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", - "dev": true, - "requires": { - "acorn": "^8.1.0", - "acorn-walk": "^8.0.2" - } - }, "acorn-jsx": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", @@ -19476,9 +23979,9 @@ "devOptional": true }, "agent-base": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", - "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", "dev": true, "requires": { "debug": "^4.3.4" @@ -19564,6 +24067,14 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "aria-hidden": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", + "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", + "requires": { + "tslib": "^2.0.0" + } + }, "aria-query": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", @@ -19574,25 +24085,26 @@ } }, "array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" } }, "array-includes": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", - "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", "is-string": "^1.0.7" } }, @@ -19602,17 +24114,32 @@ "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "dev": true }, + "array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + } + }, "array.prototype.findlastindex": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", - "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" } }, "array.prototype.flat": { @@ -19639,31 +24166,44 @@ "es-shim-unscopables": "^1.0.0" } }, - "array.prototype.tosorted": { + "array.prototype.toreversed": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.2.tgz", - "integrity": "sha512-HuQCHOlk1Weat5jzStICBCd83NxiIMwqDg/dHEsoefabn/hJRj5pVdWcPUSpRrwhwxZOsQassMpgN/xRYFBMIg==", + "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", + "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", "dev": true, "requires": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "es-shim-unscopables": "^1.0.0" + } + }, + "array.prototype.tosorted": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.3.tgz", + "integrity": "sha512-/DdH4TiTmOKzyQbp/eadcCVexiCb36xJg7HshYOYJnNZFDj33GEv0P7GxsynpShhq4OLYJzbGcBDkLsDt7MnNg==", + "dev": true, + "requires": { + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.1.0", + "es-shim-unscopables": "^1.0.2" } }, "arraybuffer.prototype.slice": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", - "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, "requires": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" } }, @@ -19717,15 +24257,6 @@ "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", "dev": true }, - "asynciterator.prototype": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz", - "integrity": "sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg==", - "dev": true, - "requires": { - "has-symbols": "^1.0.3" - } - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -19745,12 +24276,12 @@ "dev": true }, "autoprefixer": { - "version": "10.4.17", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz", - "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==", + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", "requires": { - "browserslist": "^4.22.2", - "caniuse-lite": "^1.0.30001578", + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -19758,10 +24289,13 @@ } }, "available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", - "dev": true + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "requires": { + "possible-typed-array-names": "^1.0.0" + } }, "aws-sign2": { "version": "0.7.0", @@ -19775,6 +24309,33 @@ "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==", "dev": true }, + "babel-plugin-polyfill-corejs2": { + "version": "0.4.10", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.10.tgz", + "integrity": "sha512-rpIuu//y5OX6jVU+a5BCn1R5RSZYWAl2Nar76iwaOdycqb6JPxediskWFMMl7stfwNJR4b7eiQvh5fB5TEQJTQ==", + "requires": { + "@babel/compat-data": "^7.22.6", + "@babel/helper-define-polyfill-provider": "^0.6.1", + "semver": "^6.3.1" + } + }, + "babel-plugin-polyfill-corejs3": { + "version": "0.10.4", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.4.tgz", + "integrity": "sha512-25J6I8NGfa5YkCDogHRID3fVCadIR8/pGl1/spvCkzb6lVn6SR3ojpx9nOn9iEBcUsjY24AmdKm5khcfKdylcg==", + "requires": { + "@babel/helper-define-polyfill-provider": "^0.6.1", + "core-js-compat": "^3.36.1" + } + }, + "babel-plugin-polyfill-regenerator": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.1.tgz", + "integrity": "sha512-JfTApdE++cgcTWjsiCQlLyFBMbTUft9ja17saCc93lgV33h4tuCVj7tlvu//qpLwaG+3yEz7/KhahGrUMkVq9g==", + "requires": { + "@babel/helper-define-polyfill-provider": "^0.6.1" + } + }, "babel-plugin-syntax-trailing-function-commas": { "version": "7.0.0-beta.0", "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz", @@ -19874,11 +24435,10 @@ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", "dev": true }, - "blueimp-md5": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz", - "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==", - "dev": true + "boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" }, "brace-expansion": { "version": "1.1.11", @@ -19899,12 +24459,12 @@ } }, "browserslist": { - "version": "4.22.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.3.tgz", - "integrity": "sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A==", + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", "requires": { - "caniuse-lite": "^1.0.30001580", - "electron-to-chromium": "^1.4.648", + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", "node-releases": "^2.0.14", "update-browserslist-db": "^1.0.13" } @@ -19934,6 +24494,12 @@ "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", "dev": true }, + "builtin-modules": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz", + "integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==", + "dev": true + }, "builtins": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", @@ -19953,9 +24519,9 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -19978,65 +24544,6 @@ "streamsearch": "^1.1.0" } }, - "c8": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-7.14.0.tgz", - "integrity": "sha512-i04rtkkcNcCf7zsQcSv/T9EbUn4RXQ6mropeMcjFOsQXQ0iGLAr/xT6TImQg4+U9hmNpN9XdvPkjUL1IzbgxJw==", - "dev": true, - "requires": { - "@bcoe/v8-coverage": "^0.2.3", - "@istanbuljs/schema": "^0.1.3", - "find-up": "^5.0.0", - "foreground-child": "^2.0.0", - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-reports": "^3.1.4", - "rimraf": "^3.0.2", - "test-exclude": "^6.0.0", - "v8-to-istanbul": "^9.0.0", - "yargs": "^16.2.0", - "yargs-parser": "^20.2.9" - }, - "dependencies": { - "cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "requires": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - } - } - } - }, "cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -20050,14 +24557,16 @@ "dev": true }, "call-bind": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz", - "integrity": "sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.1", - "set-function-length": "^1.1.1" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" } }, "callsites": { @@ -20086,9 +24595,9 @@ "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" }, "caniuse-lite": { - "version": "1.0.30001581", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001581.tgz", - "integrity": "sha512-whlTkwhqV2tUmP3oYhtNfaWGYHDdS3JYFQBKXxcUR9qqPWsRhFHhoISO2Xnl/g0xyKzht9mI1LZpiNWfMzHixQ==" + "version": "1.0.30001605", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001605.tgz", + "integrity": "sha512-nXwGlFWo34uliI9z3n6Qc0wZaf7zaZWA1CPZ169La5mV3I/gem7bst0vr5XQH5TJXZIMfDeZyOrZnSlVzKxxHQ==" }, "capital-case": { "version": "1.0.4", @@ -20355,6 +24864,11 @@ "integrity": "sha512-1prg2gv44sYfpHscP26uLT/ePrh0mlmVwMSoSd3zYKQ92Ab3jPRLzyCnpyOCQLJbK+YdNs4HvMRqMNYdy4pMhA==", "requires": {} }, + "codemirror": { + "version": "5.65.16", + "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.65.16.tgz", + "integrity": "sha512-br21LjYmSlVL0vFCPWPfhzUCT34FM/pAdK7rRIZwa0rrtrIdotvP4Oh4GUHsu2E3IrQMCfRkL/fN3ytMNxVQvg==" + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -20374,6 +24888,16 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", "dev": true }, + "colors-named": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/colors-named/-/colors-named-1.0.2.tgz", + "integrity": "sha512-2ANq2r393PV9njYUD66UdfBcxR1slMqRA3QRTWgCx49JoCJ+kOhyfbQYxKJbPZQIhZUcNjVOs5AlyY1WwXec3w==" + }, + "colors-named-hex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/colors-named-hex/-/colors-named-hex-1.0.2.tgz", + "integrity": "sha512-k6kq1e1pUCQvSVwIaGFq2l0LrkAPQZWyeuZn1Z8nOiYSEZiKoFj4qx690h2Kd34DFl9Me0gKS6MUwAMBJj8nuA==" + }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -20406,48 +24930,6 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "dev": true }, - "concordance": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/concordance/-/concordance-5.0.4.tgz", - "integrity": "sha512-OAcsnTEYu1ARJqWVGwf4zh4JDfHZEaSNlNccFmt8YjB2l/n19/PF2viLINHc57vO4FKIAFl2FWASIGZZWZ2Kxw==", - "dev": true, - "requires": { - "date-time": "^3.1.0", - "esutils": "^2.0.3", - "fast-diff": "^1.2.0", - "js-string-escape": "^1.0.1", - "lodash": "^4.17.15", - "md5-hex": "^3.0.1", - "semver": "^7.3.2", - "well-known-symbols": "^2.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } - } - }, "constant-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-3.0.4.tgz", @@ -20464,6 +24946,22 @@ "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, + "copy-to-clipboard": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz", + "integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==", + "requires": { + "toggle-selection": "^1.0.6" + } + }, + "core-js-compat": { + "version": "3.36.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.36.1.tgz", + "integrity": "sha512-Dk997v9ZCt3X/npqzyGdTlq6t7lDBhZwGvV94PKzDArjp7BTRm7WlDAXYd/OWdeFHO8OChQYRJNJvUCqCbrtKA==", + "requires": { + "browserslist": "^4.23.0" + } + }, "core-util-is": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", @@ -20471,16 +24969,14 @@ "dev": true }, "cosmiconfig": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", - "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", - "dev": true, + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "requires": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" } }, "create-require": { @@ -20502,6 +24998,15 @@ "node-fetch": "^2.6.12" } }, + "cross-inspect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/cross-inspect/-/cross-inspect-1.0.0.tgz", + "integrity": "sha512-4PFfn4b5ZN6FMNGSZlyb7wUhuN8wvj8t/VQHZdM4JsDcruGJ8L2kf9zao98QIrBPFCpdk27qst/AGTl7pL3ypQ==", + "dev": true, + "requires": { + "tslib": "^2.4.0" + } + }, "cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -20517,15 +25022,65 @@ "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==" }, + "css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "requires": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + } + }, + "css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "requires": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + } + }, + "css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==" + }, "cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" }, + "csso": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", + "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", + "requires": { + "css-tree": "~2.2.0" + }, + "dependencies": { + "css-tree": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", + "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", + "requires": { + "mdn-data": "2.0.28", + "source-map-js": "^1.0.1" + } + }, + "mdn-data": { + "version": "2.0.28", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", + "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==" + } + } + }, "cssstyle": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-3.0.0.tgz", - "integrity": "sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz", + "integrity": "sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==", "dev": true, "requires": { "rrweb-cssom": "^0.6.0" @@ -20537,9 +25092,9 @@ "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" }, "cypress": { - "version": "13.6.4", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.6.4.tgz", - "integrity": "sha512-pYJjCfDYB+hoOoZuhysbbYhEmNW7DEDsqn+ToCLwuVowxUXppIWRr7qk4TVRIU471ksfzyZcH+mkoF0CQUKnpw==", + "version": "13.7.3", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-13.7.3.tgz", + "integrity": "sha512-uoecY6FTCAuIEqLUYkTrxamDBjMHTYak/1O7jtgwboHiTnS1NaMOoR08KcTrbRZFCBvYOiS4tEkQRmsV+xcrag==", "dev": true, "requires": { "@cypress/request": "^3.0.0", @@ -20549,7 +25104,7 @@ "arch": "^2.2.0", "blob-util": "^2.0.2", "bluebird": "^3.7.2", - "buffer": "^5.6.0", + "buffer": "^5.7.1", "cachedir": "^2.3.0", "chalk": "^4.1.0", "check-more-types": "^2.24.0", @@ -20567,7 +25122,7 @@ "figures": "^3.2.0", "fs-extra": "^9.1.0", "getos": "^3.2.1", - "is-ci": "^3.0.0", + "is-ci": "^3.0.1", "is-installed-globally": "~0.4.0", "lazy-ass": "^1.6.0", "listr2": "^3.8.3", @@ -20654,16 +25209,6 @@ } } }, - "cypress-vite": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/cypress-vite/-/cypress-vite-1.5.0.tgz", - "integrity": "sha512-vvTMqJZgI3sN2ylQTi4OQh8LRRjSrfrIdkQD5fOj+EC/e9oHkxS96lif1SyDF1PwailG1tnpJE+VpN6+AwO/rg==", - "dev": true, - "requires": { - "chokidar": "^3.5.3", - "debug": "^4.3.4" - } - }, "d3-array": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", @@ -20751,39 +25296,59 @@ } }, "data-urls": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-4.0.0.tgz", - "integrity": "sha512-/mMTei/JXPqvFqQtfyTowxmJVwr2PVAeCcDxyFf6LhoOu/09TX2OX3kb2wzi4DMXcfj4OItwDOnhl5oziPnT6g==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", + "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==", "dev": true, "requires": { - "abab": "^2.0.6", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.0" + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0" } }, - "dataloader": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.2.tgz", - "integrity": "sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g==", - "dev": true + "data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dev": true, + "requires": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + } }, - "date-fns": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", - "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, "requires": { - "@babel/runtime": "^7.21.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" } }, - "date-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/date-time/-/date-time-3.1.0.tgz", - "integrity": "sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==", + "data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", "dev": true, "requires": { - "time-zone": "^1.0.0" + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" } }, + "dataloader": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/dataloader/-/dataloader-2.2.2.tgz", + "integrity": "sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g==", + "dev": true + }, + "date-fns": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", + "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==" + }, "dayjs": { "version": "1.11.10", "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz", @@ -20830,9 +25395,9 @@ } }, "decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.4.1.tgz", + "integrity": "sha512-+8VxcR21HhTy8nOt6jf20w0c9CADrw1O8d+VZ/YzzCt4bJ3uBjw+D1q2osAB8RnpwwaeYBxy0HyKQxD5JBMuuQ==" }, "deep-eql": { "version": "4.1.3", @@ -20875,6 +25440,11 @@ "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", "dev": true }, + "deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==" + }, "defaults": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", @@ -20885,14 +25455,14 @@ } }, "define-data-property": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", - "integrity": "sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "requires": { - "get-intrinsic": "^1.2.1", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" } }, "define-properties": { @@ -20933,7 +25503,14 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "dev": true + "dev": true, + "optional": true, + "peer": true + }, + "detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==" }, "devlop": { "version": "1.1.0", @@ -20959,6 +25536,12 @@ "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==" }, + "diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true + }, "dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -20997,13 +25580,37 @@ "csstype": "^3.0.2" } }, - "domexception": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", - "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", - "dev": true, + "dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "requires": { - "webidl-conversions": "^7.0.0" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + } + }, + "domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==" + }, + "domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "requires": { + "domelementtype": "^2.3.0" + } + }, + "domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "requires": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" } }, "dot-case": { @@ -21016,9 +25623,9 @@ } }, "dotenv": { - "version": "16.4.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.1.tgz", - "integrity": "sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ==", + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", "dev": true }, "dset": { @@ -21043,9 +25650,9 @@ } }, "electron-to-chromium": { - "version": "1.4.651", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.651.tgz", - "integrity": "sha512-jjks7Xx+4I7dslwsbaFocSwqBbGHQmuXBJUK9QBZTIrzPq3pzn6Uf2szFSP728FtLYE3ldiccmlkOM/zhGKCpA==" + "version": "1.4.724", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.724.tgz", + "integrity": "sha512-RTRvkmRkGhNBPPpdrgtDKvmOEYTrPlXDfc0J/Nfq5s29tEahAwhiX4mmhNzj6febWMleulxVYPh7QwCSL/EldA==" }, "emoji-regex": { "version": "8.0.0", @@ -21085,52 +25692,74 @@ } }, "es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", - "dev": true, - "requires": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.2", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dev": true, + "requires": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", - "get-symbol-description": "^1.0.0", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.12", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.1", - "safe-array-concat": "^1.0.1", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.8", - "string.prototype.trimend": "^1.0.7", - "string.prototype.trimstart": "^1.0.7", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.15" + } + }, + "es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "requires": { + "get-intrinsic": "^1.2.4" } }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true + }, "es-get-iterator": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", @@ -21149,36 +25778,45 @@ } }, "es-iterator-helpers": { - "version": "1.0.15", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz", - "integrity": "sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g==", + "version": "1.0.18", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.18.tgz", + "integrity": "sha512-scxAJaewsahbqTYrGKJihhViaM6DDZDDoucfvzNbK0pOren1g/daDQ3IAhzn+1G14rBG7w+i5N+qul60++zlKA==", "dev": true, "requires": { - "asynciterator.prototype": "^1.0.0", - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", - "es-abstract": "^1.22.1", - "es-set-tostringtag": "^2.0.1", - "function-bind": "^1.1.1", - "get-intrinsic": "^1.2.1", + "es-abstract": "^1.23.0", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", + "internal-slot": "^1.0.7", "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.0.1" + "safe-array-concat": "^1.1.2" + } + }, + "es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "requires": { + "es-errors": "^1.3.0" } }, "es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" } }, "es-shim-unscopables": { @@ -21202,32 +25840,33 @@ } }, "esbuild": { - "version": "0.18.20", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.20.tgz", - "integrity": "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA==", - "requires": { - "@esbuild/android-arm": "0.18.20", - "@esbuild/android-arm64": "0.18.20", - "@esbuild/android-x64": "0.18.20", - "@esbuild/darwin-arm64": "0.18.20", - "@esbuild/darwin-x64": "0.18.20", - "@esbuild/freebsd-arm64": "0.18.20", - "@esbuild/freebsd-x64": "0.18.20", - "@esbuild/linux-arm": "0.18.20", - "@esbuild/linux-arm64": "0.18.20", - "@esbuild/linux-ia32": "0.18.20", - "@esbuild/linux-loong64": "0.18.20", - "@esbuild/linux-mips64el": "0.18.20", - "@esbuild/linux-ppc64": "0.18.20", - "@esbuild/linux-riscv64": "0.18.20", - "@esbuild/linux-s390x": "0.18.20", - "@esbuild/linux-x64": "0.18.20", - "@esbuild/netbsd-x64": "0.18.20", - "@esbuild/openbsd-x64": "0.18.20", - "@esbuild/sunos-x64": "0.18.20", - "@esbuild/win32-arm64": "0.18.20", - "@esbuild/win32-ia32": "0.18.20", - "@esbuild/win32-x64": "0.18.20" + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", + "requires": { + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" } }, "escalade": { @@ -21241,29 +25880,17 @@ "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "dev": true }, - "escodegen": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.1.0.tgz", - "integrity": "sha512-2NlIDTwUWJN0mRPQOdtQBzbUHvdGY2P1VXSyU83Q3xKxM7WHX2Ql8dKq782Q9TgQUNOLEzEYu9bzLNj1q88I5w==", - "dev": true, - "requires": { - "esprima": "^4.0.1", - "estraverse": "^5.2.0", - "esutils": "^2.0.2", - "source-map": "~0.6.1" - } - }, "eslint": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", - "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.56.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -21335,30 +25962,48 @@ } } }, - "eslint-config-prettier": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz", - "integrity": "sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg==", + "eslint-compat-utils": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.5.0.tgz", + "integrity": "sha512-dc6Y8tzEcSYZMHa+CMPLi/hyo1FzNeonbhJL7Ol0ccuKQkwopJcJBA9YL/xmMTLU1eKigXo9vj9nALElWYSowg==", "dev": true, - "requires": {} + "requires": { + "semver": "^7.5.4" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } }, - "eslint-config-standard": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", - "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "eslint-config-prettier": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", + "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, "requires": {} }, - "eslint-config-standard-with-typescript": { - "version": "34.0.1", - "resolved": "https://registry.npmjs.org/eslint-config-standard-with-typescript/-/eslint-config-standard-with-typescript-34.0.1.tgz", - "integrity": "sha512-J7WvZeLtd0Vr9F+v4dZbqJCLD16cbIy4U+alJMq4MiXdpipdBM3U5NkXaGUjePc4sb1ZE01U9g6VuTBpHHz1fg==", - "dev": true, - "requires": { - "@typescript-eslint/parser": "^5.43.0", - "eslint-config-standard": "17.0.0" - } - }, "eslint-import-resolver-node": { "version": "0.3.9", "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", @@ -21382,9 +26027,9 @@ } }, "eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", + "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", "dev": true, "requires": { "debug": "^3.2.7" @@ -21427,31 +26072,15 @@ } } }, - "eslint-plugin-es": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", - "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "eslint-plugin-es-x": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.6.0.tgz", + "integrity": "sha512-I0AmeNgevgaTR7y2lrVCJmGYF0rjoznpDvqV/kIkZSZbZ8Rw3eu4cGlvBBULScfkSOCzqKbff5LR4CNrV7mZHA==", "dev": true, "requires": { - "eslint-utils": "^2.0.0", - "regexpp": "^3.0.0" - }, - "dependencies": { - "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - }, - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true - } + "@eslint-community/eslint-utils": "^4.1.2", + "@eslint-community/regexpp": "^4.6.0", + "eslint-compat-utils": "^0.5.0" } }, "eslint-plugin-import": { @@ -21500,21 +26129,33 @@ } }, "eslint-plugin-n": { - "version": "15.7.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz", - "integrity": "sha512-jDex9s7D/Qial8AGVIHq4W7NswpUD5DPDL2RH8Lzd9EloWUuvUkHfv4FRLMipH5q2UtyurorBkPeNi1wVWNh3Q==", + "version": "16.6.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-16.6.2.tgz", + "integrity": "sha512-6TyDmZ1HXoFQXnhCTUjVFULReoBPOAjpuiKELMkeP40yffI/1ZRO+d9ug/VC6fqISo2WkuIBk3cvuRPALaWlOQ==", "dev": true, "requires": { + "@eslint-community/eslint-utils": "^4.4.0", "builtins": "^5.0.1", - "eslint-plugin-es": "^4.1.0", - "eslint-utils": "^3.0.0", - "ignore": "^5.1.1", - "is-core-module": "^2.11.0", + "eslint-plugin-es-x": "^7.5.0", + "get-tsconfig": "^4.7.0", + "globals": "^13.24.0", + "ignore": "^5.2.4", + "is-builtin-module": "^3.2.1", + "is-core-module": "^2.12.1", "minimatch": "^3.1.2", - "resolve": "^1.22.1", - "semver": "^7.3.8" + "resolve": "^1.22.2", + "semver": "^7.5.3" }, "dependencies": { + "globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "requires": { + "type-fest": "^0.20.2" + } + }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -21525,14 +26166,20 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" } }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -21549,27 +26196,29 @@ "requires": {} }, "eslint-plugin-react": { - "version": "7.33.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz", - "integrity": "sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==", + "version": "7.34.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.1.tgz", + "integrity": "sha512-N97CxlouPT1AHt8Jn0mhhN2RrADlUAsk1/atcT2KyA/l9Q/E6ll7OIGwNumFmWfZ9skV3XXccYS19h80rHtgkw==", "dev": true, "requires": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", + "array-includes": "^3.1.7", + "array.prototype.findlast": "^1.2.4", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.toreversed": "^1.1.2", + "array.prototype.tosorted": "^1.1.3", "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.0.12", + "es-iterator-helpers": "^1.0.17", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", + "object.entries": "^1.1.7", + "object.fromentries": "^2.0.7", + "object.hasown": "^1.1.3", + "object.values": "^1.1.7", "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", + "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.8" + "string.prototype.matchall": "^4.0.10" }, "dependencies": { "doctrine": { @@ -21595,9 +26244,9 @@ } }, "eslint-plugin-unused-imports": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-2.0.0.tgz", - "integrity": "sha512-3APeS/tQlTrFa167ThtP0Zm0vctjr4M44HMpeg1P4bK6wItarumq0Ma82xorMKdFsWpphQBlRPzw/pxiVELX1A==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-3.1.0.tgz", + "integrity": "sha512-9l1YFCzXKkw1qtAru1RWUtG2EVDZY0a0eChKXcL+EZ5jitG7qxdctu4RnvhOJHv4xfmUf7h+JJPINlVpGhZMrw==", "dev": true, "requires": { "eslint-rule-composer": "^0.3.0" @@ -21609,41 +26258,6 @@ "integrity": "sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==", "dev": true }, - "eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "dependencies": { - "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true - } - } - }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true - } - } - }, "eslint-visitor-keys": { "version": "3.4.3", "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", @@ -21661,12 +26275,6 @@ "eslint-visitor-keys": "^3.4.1" } }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true - }, "esquery": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", @@ -21704,8 +26312,7 @@ "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" }, "eventemitter2": { "version": "6.4.7", @@ -21808,12 +26415,6 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "dev": true }, - "fast-diff": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", - "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", - "dev": true - }, "fast-equals": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz", @@ -21954,9 +26555,9 @@ } }, "filter-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz", - "integrity": "sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==" + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-5.1.0.tgz", + "integrity": "sha512-qWeTREPoT7I0bifpPUXtxkZJ1XJzxWtfoWWkdVGqa+eCr3SHW/Ocp89o8vLvbUuQnadybJpjOKu4V+RwO6sGng==" }, "find-up": { "version": "5.0.0", @@ -21991,17 +26592,7 @@ "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", "dev": true, "requires": { - "is-callable": "^1.1.3" - } - }, - "foreground-child": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.0", - "signal-exit": "^3.0.2" + "is-callable": "^1.1.3" } }, "forever-agent": { @@ -22026,6 +26617,28 @@ "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==" }, + "framer-motion": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-6.5.1.tgz", + "integrity": "sha512-o1BGqqposwi7cgDrtg0dNONhkmPsUFDaLcKXigzuTFC5x58mE8iyTazxSudFzmT6MEyJKfjjU8ItoMe3W+3fiw==", + "requires": { + "@emotion/is-prop-valid": "^0.8.2", + "@motionone/dom": "10.12.0", + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "popmotion": "11.0.3", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + } + }, + "framesync": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/framesync/-/framesync-6.0.1.tgz", + "integrity": "sha512-fUY88kXvGiIItgNC7wcTOl0SNRCVXMKSWW2Yzfmn7EKNc+MpCzcz9DhdHcdjbrtN3c6R4H5dTY2jiCpPdysEjA==", + "requires": { + "tslib": "^2.1.0" + } + }, "fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -22091,17 +26704,23 @@ "dev": true }, "get-intrinsic": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.2.tgz", - "integrity": "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "requires": { + "es-errors": "^1.3.0", "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" } }, + "get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==" + }, "get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -22112,13 +26731,23 @@ } }, "get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" + } + }, + "get-tsconfig": { + "version": "4.7.3", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.3.tgz", + "integrity": "sha512-ZvkrzoUA0PQZM6fy6+/Hce561s+faD1rsNwhnO5FelNjyy7EMGJ3Rz1AQ8GYDWjhRs/7dBLOEJvhK8MiEJOAFg==", + "dev": true, + "requires": { + "resolve-pkg-maps": "^1.0.0" } }, "getos": { @@ -22229,49 +26858,41 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, + "graphiql": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.1.2.tgz", + "integrity": "sha512-k3p2k+7ZgARdLnqMDV192VL47cTmPNn02n5ullULnBE1nv1dtJfUve+AJxaU+kU8JcbwCxSxu3qlIxuu1N3mDQ==", + "requires": { + "@graphiql/react": "^0.20.4", + "@graphiql/toolkit": "^0.9.1", + "graphql-language-service": "^5.2.0", + "markdown-it": "^12.2.0" + } + }, "graphql": { "version": "16.8.1", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", - "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", - "peer": true + "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==" }, "graphql-config": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-4.5.0.tgz", - "integrity": "sha512-x6D0/cftpLUJ0Ch1e5sj1TZn6Wcxx4oMfmhaG9shM0DKajA9iR+j1z86GSTQ19fShbGvrSSvbIQsHku6aQ6BBw==", - "dev": true, - "requires": { - "@graphql-tools/graphql-file-loader": "^7.3.7", - "@graphql-tools/json-file-loader": "^7.3.7", - "@graphql-tools/load": "^7.5.5", - "@graphql-tools/merge": "^8.2.6", - "@graphql-tools/url-loader": "^7.9.7", - "@graphql-tools/utils": "^9.0.0", - "cosmiconfig": "8.0.0", - "jiti": "1.17.1", - "minimatch": "4.2.3", - "string-env-interpolation": "1.0.1", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/graphql-config/-/graphql-config-5.0.3.tgz", + "integrity": "sha512-BNGZaoxIBkv9yy6Y7omvsaBUHOzfFcII3UN++tpH8MGOKFPFkCPZuwx09ggANMt8FgyWP1Od8SWPmrUEZca4NQ==", + "dev": true, + "requires": { + "@graphql-tools/graphql-file-loader": "^8.0.0", + "@graphql-tools/json-file-loader": "^8.0.0", + "@graphql-tools/load": "^8.0.0", + "@graphql-tools/merge": "^9.0.0", + "@graphql-tools/url-loader": "^8.0.0", + "@graphql-tools/utils": "^10.0.0", + "cosmiconfig": "^8.1.0", + "jiti": "^1.18.2", + "minimatch": "^4.2.3", + "string-env-interpolation": "^1.0.1", "tslib": "^2.4.0" }, "dependencies": { - "cosmiconfig": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.0.0.tgz", - "integrity": "sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ==", - "dev": true, - "requires": { - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", - "path-type": "^4.0.0" - } - }, - "jiti": { - "version": "1.17.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.17.1.tgz", - "integrity": "sha512-NZIITw8uZQFuzQimqjUxIrIcEdxYDFIe/0xYfIlVXTkiBjjyBEvgasj5bb0/cHtPRD/NziPbT312sFrkI5ALpw==", - "dev": true - }, "minimatch": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-4.2.3.tgz", @@ -22283,6 +26904,15 @@ } } }, + "graphql-language-service": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/graphql-language-service/-/graphql-language-service-5.2.0.tgz", + "integrity": "sha512-o/ZgTS0pBxWm3hSF4+6GwiV1//DxzoLWEbS38+jqpzzy1d/QXBidwQuVYTOksclbtOJZ3KR/tZ8fi/tI6VpVMg==", + "requires": { + "nullthrows": "^1.0.0", + "vscode-languageserver-types": "^3.17.1" + } + }, "graphql-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/graphql-request/-/graphql-request-6.1.0.tgz", @@ -22313,9 +26943,9 @@ } }, "graphql-ws": { - "version": "5.12.1", - "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.12.1.tgz", - "integrity": "sha512-umt4f5NnMK46ChM2coO36PTFhHouBrK9stWWBczERguwYrGnPNxJ9dimU6IyOBfOkC6Izhkg4H8+F51W/8CYDg==", + "version": "5.16.0", + "resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.16.0.tgz", + "integrity": "sha512-Ju2RCU2dQMgSKtArPbEtsK5gNLnsQyTNIo/T7cZNp96niC1x0KdJNZV0TIoilceBPQwfb5itrGl8pkFeOUMl4A==", "devOptional": true, "requires": {} }, @@ -22344,18 +26974,18 @@ "dev": true }, "has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" } }, "has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true }, "has-symbols": { @@ -22365,18 +26995,18 @@ "dev": true }, "has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "requires": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" } }, "hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "requires": { "function-bind": "^1.1.2" } @@ -22421,6 +27051,11 @@ "tslib": "^2.0.3" } }, + "hey-listen": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/hey-listen/-/hey-listen-1.0.8.tgz", + "integrity": "sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==" + }, "hoist-non-react-statics": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", @@ -22430,12 +27065,12 @@ } }, "html-encoding-sniffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", - "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", + "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", "dev": true, "requires": { - "whatwg-encoding": "^2.0.0" + "whatwg-encoding": "^3.1.1" } }, "html-escaper": { @@ -22450,9 +27085,9 @@ "integrity": "sha512-/sXbVCWayk6GDVg3ctOX6nxaVj7So40FcFAnWlWGNAB1LpYKcV5Cd10APjPjW80O7zYW2MsjBV4zZ7IZO5fVow==" }, "http-proxy-agent": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-6.1.1.tgz", - "integrity": "sha512-JRCz+4Whs6yrrIoIlrH+ZTmhrRwtMnmOHsHn8GFEn9O2sVfSE+DAZ3oyyGIKF8tjJEeSJmP89j7aTjVsSqsU0g==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz", + "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==", "dev": true, "requires": { "agent-base": "^7.1.0", @@ -22471,9 +27106,9 @@ } }, "https-proxy-agent": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-6.2.1.tgz", - "integrity": "sha512-ONsE3+yfZF2caH5+bJlcddtWqNI3Gvs5A38+ngvljxaBiRXRswym2c7yf8UAeFpRFKjFNHIFEHqR/OLAWJzyiA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", + "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", "dev": true, "requires": { "agent-base": "^7.0.2", @@ -22493,9 +27128,9 @@ "dev": true }, "iconify-icon": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/iconify-icon/-/iconify-icon-1.0.8.tgz", - "integrity": "sha512-jvbUKHXf8EnGGArmhlP2IG8VqQLFFyTvTqb9LVL2TKTh7/eCCD1o2HHE9thpbJJb6B8hzhcFb6rOKhvo7reNKA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/iconify-icon/-/iconify-icon-2.0.0.tgz", + "integrity": "sha512-38ArOkxmyD9oDbJBkxaFpE6eZ0K3F9Sk+3x4mWGfjMJaxi3EKrix9Du4iWhgBFT3imKC4FJJE34ur2Rc7Xm+Uw==", "requires": { "@iconify/types": "^2.0.0" } @@ -22612,12 +27247,12 @@ } }, "internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, "requires": { - "get-intrinsic": "^1.2.2", + "es-errors": "^1.3.0", "hasown": "^2.0.0", "side-channel": "^1.0.4" } @@ -22631,7 +27266,6 @@ "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dev": true, "requires": { "loose-envify": "^1.0.0" } @@ -22671,14 +27305,13 @@ } }, "is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, "requires": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" } }, "is-arrayish": { @@ -22722,6 +27355,15 @@ "has-tostringtag": "^1.0.0" } }, + "is-builtin-module": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-3.2.1.tgz", + "integrity": "sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==", + "dev": true, + "requires": { + "builtin-modules": "^3.3.0" + } + }, "is-callable": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", @@ -22745,6 +27387,15 @@ "hasown": "^2.0.0" } }, + "is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dev": true, + "requires": { + "is-typed-array": "^1.1.13" + } + }, "is-date-object": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", @@ -22832,9 +27483,9 @@ "dev": true }, "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true }, "is-number": { @@ -22862,12 +27513,25 @@ "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==" }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "requires": { + "isobject": "^3.0.1" + } + }, "is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==", "dev": true }, + "is-primitive": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-3.0.1.tgz", + "integrity": "sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==" + }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -22894,12 +27558,12 @@ "dev": true }, "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "requires": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" } }, "is-stream": { @@ -22927,12 +27591,12 @@ } }, "is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, "requires": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" } }, "is-typedarray": { @@ -23007,6 +27671,11 @@ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==" + }, "isomorphic-ws": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz", @@ -23037,10 +27706,21 @@ "supports-color": "^7.1.0" } }, + "istanbul-lib-source-maps": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.4.tgz", + "integrity": "sha512-wHOoEsNJTVltaJp8eVkm8w+GVkVNHT2YDYo53YdzQEL2gWm1hBX5cGFR9hQJtuGLebidVX7et3+dmDZrmclduw==", + "dev": true, + "requires": { + "@jridgewell/trace-mapping": "^0.3.23", + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0" + } + }, "istanbul-reports": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.6.tgz", - "integrity": "sha512-TLgnMkKg3iTDsQ9PbPTdpfAK2DzjF9mqUG7RMgcQl8oFjad8ob4laGxv5XV5U9MAfx8D6tSJiUyuAwzLicaxlg==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, "requires": { "html-escaper": "^2.0.0", @@ -23080,23 +27760,17 @@ "integrity": "sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q==" }, "jose": { - "version": "4.15.5", - "resolved": "https://registry.npmjs.org/jose/-/jose-4.15.5.tgz", - "integrity": "sha512-jc7BFxgKPKi94uOvEmzlSWFFe2+vASyXaKUpdQKatWAESU2MWjDfFf0fdfc83CDKcA5QecabZeNLyfhe3yKNkg==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/jose/-/jose-5.2.3.tgz", + "integrity": "sha512-KUXdbctm1uHVL8BYhnyHkgp3zDX5KW8ZhAKVFEfUbU2P8Alpzjb+48hHvjOdQIyPshoblhzsuqOwEEAbtHVirA==", "dev": true }, "jotai": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/jotai/-/jotai-2.6.3.tgz", - "integrity": "sha512-0htSJ2d6426ZdSEYHncJHXY6Lkgde1Hc2HE/ADIRi9d2L3hQL+jLKY1LkWBMeCNyOSlKH8+1u/Gc33Ox0uq21Q==", + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/jotai/-/jotai-2.7.2.tgz", + "integrity": "sha512-6Ft5kpNu8p93Ssf1Faoza3hYQZRIYp7rioK8MwTTFnbQKwUyZElwquPwl1h6U0uo9hC0jr+ghO3gcSjc6P35/Q==", "requires": {} }, - "js-string-escape": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", - "integrity": "sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==", - "dev": true - }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -23117,69 +27791,32 @@ "dev": true }, "jsdom": { - "version": "21.1.2", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-21.1.2.tgz", - "integrity": "sha512-sCpFmK2jv+1sjff4u7fzft+pUh2KSUbUrEHYHyfSIbGTIcmnjyp83qg6qLwdJ/I3LpTXx33ACxeRL7Lsyc6lGQ==", + "version": "24.0.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.0.0.tgz", + "integrity": "sha512-UDS2NayCvmXSXVP6mpTj+73JnNQadZlr9N68189xib2tx5Mls7swlTNao26IoHv46BZJFvXygyRtyXd1feAk1A==", "dev": true, "requires": { - "abab": "^2.0.6", - "acorn": "^8.8.2", - "acorn-globals": "^7.0.0", - "cssstyle": "^3.0.0", - "data-urls": "^4.0.0", + "cssstyle": "^4.0.1", + "data-urls": "^5.0.0", "decimal.js": "^10.4.3", - "domexception": "^4.0.0", - "escodegen": "^2.0.0", "form-data": "^4.0.0", - "html-encoding-sniffer": "^3.0.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.1", + "html-encoding-sniffer": "^4.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.2", "is-potential-custom-element-name": "^1.0.1", - "nwsapi": "^2.2.4", + "nwsapi": "^2.2.7", "parse5": "^7.1.2", "rrweb-cssom": "^0.6.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", - "tough-cookie": "^4.1.2", - "w3c-xmlserializer": "^4.0.0", + "tough-cookie": "^4.1.3", + "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^7.0.0", - "whatwg-encoding": "^2.0.0", - "whatwg-mimetype": "^3.0.0", - "whatwg-url": "^12.0.1", - "ws": "^8.13.0", - "xml-name-validator": "^4.0.0" - }, - "dependencies": { - "agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dev": true, - "requires": { - "debug": "4" - } - }, - "http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "requires": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - } - }, - "https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dev": true, - "requires": { - "agent-base": "6", - "debug": "4" - } - } + "whatwg-encoding": "^3.1.1", + "whatwg-mimetype": "^4.0.0", + "whatwg-url": "^14.0.0", + "ws": "^8.16.0", + "xml-name-validator": "^5.0.0" } }, "jsesc": { @@ -23330,6 +27967,14 @@ "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, + "linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "requires": { + "uc.micro": "^1.0.1" + } + }, "lint-staged": { "version": "13.3.0", "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.3.0.tgz", @@ -23622,10 +28267,14 @@ } }, "local-pkg": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.4.3.tgz", - "integrity": "sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g==", - "dev": true + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz", + "integrity": "sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==", + "dev": true, + "requires": { + "mlly": "^1.4.2", + "pkg-types": "^1.0.3" + } }, "locate-path": { "version": "6.0.0", @@ -23641,6 +28290,11 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, + "lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" + }, "lodash.merge": { "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", @@ -23653,6 +28307,12 @@ "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", "dev": true }, + "lodash.sortby": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", + "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==", + "dev": true + }, "log-symbols": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", @@ -23742,14 +28402,25 @@ "dev": true }, "magic-string": { - "version": "0.30.5", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.5.tgz", - "integrity": "sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==", + "version": "0.30.8", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.8.tgz", + "integrity": "sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==", "dev": true, "requires": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, + "magicast": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.3.tgz", + "integrity": "sha512-ZbrP1Qxnpoes8sz47AM0z08U+jW6TyRgZzcWy3Ma3vDhJttwMwAFDMMQFobwdBxByBD46JYmxRzeF7w2+wJEuw==", + "dev": true, + "requires": { + "@babel/parser": "^7.23.6", + "@babel/types": "^7.23.6", + "source-map-js": "^1.0.2" + } + }, "make-dir": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", @@ -23769,9 +28440,9 @@ } }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -23797,20 +28468,30 @@ "integrity": "sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==", "dev": true }, + "markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "requires": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "dependencies": { + "entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==" + } + } + }, "markdown-table": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz", "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==" }, - "md5-hex": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-3.0.1.tgz", - "integrity": "sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==", - "dev": true, - "requires": { - "blueimp-md5": "^2.10.0" - } - }, "mdast-util-find-and-replace": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", @@ -24013,6 +28694,16 @@ "@types/mdast": "^4.0.0" } }, + "mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==" + }, + "mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + }, "merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -24028,7 +28719,6 @@ "version": "1.3.0", "resolved": "https://registry.npmjs.org/meros/-/meros-1.3.0.tgz", "integrity": "sha512-2BNGOimxEz5hmjUG2FwoxCt5HN7BXdaWyFqEwxPTrJzVdABtrL4TiHTcsWSFAxPQ/tOnEaQEJh3qWq71QRMY+w==", - "dev": true, "requires": {} }, "micromark": { @@ -24383,9 +29073,9 @@ "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==" }, "mlly": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.5.0.tgz", - "integrity": "sha512-NPVQvAY1xr1QoVeG0cy8yUYC7FQcOx6evl/RjT1wL5FvzPnzOysoqB/jmx/DhssT2dYa8nxECLAaFI/+gVLhDQ==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.6.1.tgz", + "integrity": "sha512-vLgaHvaeunuOXHSmEbZ9izxPx3USsk8KCQ8iC+aTlp5sKRSoZvwhHh5L9VbKSaVC6sJDqbyohIS76E2VmHIPAA==", "dev": true, "requires": { "acorn": "^8.11.3", @@ -24432,12 +29122,6 @@ "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", "dev": true }, - "natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, "neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", @@ -24456,7 +29140,9 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.0.tgz", "integrity": "sha512-mNcltoe1R8o7STTegSOHdnJNN7s5EUvhoS7ShnTHDyOSd+8H+UdWODq6qSv67PjC8Zc5JRT8+oLAMCr0SIXw7g==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node-fetch": { "version": "2.7.0", @@ -24517,11 +29203,18 @@ "path-key": "^3.0.0" } }, + "nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "requires": { + "boolbase": "^1.0.0" + } + }, "nullthrows": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/nullthrows/-/nullthrows-1.1.1.tgz", - "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==", - "dev": true + "integrity": "sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==" }, "nwsapi": { "version": "2.2.7", @@ -24574,58 +29267,59 @@ } }, "object.entries": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.7.tgz", - "integrity": "sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", + "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "object.fromentries": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", - "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" } }, "object.groupby": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", - "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" } }, "object.hasown": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.3.tgz", - "integrity": "sha512-fFI4VcYpRHvSLXxP7yiZOMAd331cPfd2p7PFDVbgUsYOfCT3tICVqXWngbjr4m49OvsBwUBQ6O2uQoJvy3RexA==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", + "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", "dev": true, "requires": { - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" } }, "object.values": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", - "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", + "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "once": { @@ -24982,13 +29676,13 @@ } }, "playwright": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.41.1.tgz", - "integrity": "sha512-gdZAWG97oUnbBdRL3GuBvX3nDDmUOuqzV/D24dytqlKt+eI5KbwusluZRGljx1YoJKZ2NRPaeWiFTeGZO7SosQ==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", + "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", "dev": true, "requires": { "fsevents": "2.3.2", - "playwright-core": "1.41.1" + "playwright-core": "1.42.1" }, "dependencies": { "fsevents": { @@ -25001,19 +29695,36 @@ } }, "playwright-core": { - "version": "1.41.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.41.1.tgz", - "integrity": "sha512-/KPO5DzXSMlxSX77wy+HihKGOunh3hqndhqeo/nMxfigiKzogn8kfL0ZBDu0L1RKgan5XHCPmn6zXd2NUJgjhg==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", + "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", + "dev": true + }, + "popmotion": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/popmotion/-/popmotion-11.0.3.tgz", + "integrity": "sha512-Y55FLdj3UxkR7Vl3s7Qr4e9m0onSnP8W7d/xQLsoJM40vs6UKHFdygs6SWryasTZYqugMjm3BepCF4CWXDiHgA==", + "requires": { + "framesync": "6.0.1", + "hey-listen": "^1.0.8", + "style-value-types": "5.0.0", + "tslib": "^2.1.0" + } + }, + "possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", "dev": true }, "postcss": { - "version": "8.4.33", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.33.tgz", - "integrity": "sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg==", + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", "requires": { "nanoid": "^3.3.7", "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "source-map-js": "^1.2.0" } }, "postcss-import": { @@ -25047,11 +29758,6 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz", "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==" - }, - "yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==" } } }, @@ -25262,14 +29968,13 @@ } }, "query-string": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-7.1.3.tgz", - "integrity": "sha512-hh2WYhq4fi8+b+/2Kg9CEge4fDPvHS534aOOvOZeQ3+Vf2mCFsaFBYj0i+iXcAq6I9Vzp5fjMFBlONvayDC1qg==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-9.0.0.tgz", + "integrity": "sha512-4EWwcRGsO2H+yzq6ddHcVqkCQ2EFUSfDMEjF8ryp8ReymyZhIuaFRGLomeOQLkrzacMHoyky2HW0Qe30UbzkKw==", "requires": { - "decode-uri-component": "^0.2.2", - "filter-obj": "^1.1.0", - "split-on-first": "^1.0.0", - "strict-uri-encode": "^2.0.0" + "decode-uri-component": "^0.4.1", + "filter-obj": "^5.1.0", + "split-on-first": "^3.0.0" } }, "querystringify": { @@ -25284,9 +29989,9 @@ "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" }, "ramda": { - "version": "0.28.0", - "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.28.0.tgz", - "integrity": "sha512-9QnLuG/kPVgWvMQ4aODhsBUFKOUmnbUnsSXACv+NCQZcHbeb+v8Lodp8OVxtRULN1/xOyYLLaL6npE6dMq5QTA==" + "version": "0.29.1", + "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.29.1.tgz", + "integrity": "sha512-OfxIeWzd4xdUNxlWhgFazxsA/nl3mS4/jGZI5n00uWOoSSFRhC1b6gl6xvmzUamgmqELraWp0J/qqVlXYPDPyA==" }, "react": { "version": "18.2.0", @@ -25296,6 +30001,12 @@ "loose-envify": "^1.1.0" } }, + "react-accessible-treeview": { + "version": "2.8.3", + "resolved": "https://registry.npmjs.org/react-accessible-treeview/-/react-accessible-treeview-2.8.3.tgz", + "integrity": "sha512-taDTIYZ6p96/zIhJBUKvyGTXcInudatP/9fwKG0BW+VRf1PmU5hOT2FkDovDKzSwj2VSOj1PRx+E6ojhOA+2xA==", + "requires": {} + }, "react-datepicker": { "version": "4.25.0", "resolved": "https://registry.npmjs.org/react-datepicker/-/react-datepicker-4.25.0.tgz", @@ -25307,6 +30018,16 @@ "prop-types": "^15.7.2", "react-onclickoutside": "^6.13.0", "react-popper": "^2.3.0" + }, + "dependencies": { + "date-fns": { + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", + "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "requires": { + "@babel/runtime": "^7.21.0" + } + } } }, "react-diff-view": { @@ -25337,9 +30058,9 @@ "integrity": "sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==" }, "react-hook-form": { - "version": "7.49.3", - "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.49.3.tgz", - "integrity": "sha512-foD6r3juidAT1cOZzpmD/gOKt7fRsDhXXZ0y28+Al1CHgX+AY1qIN9VSIIItXRq1dN68QrRwl1ORFlwjBaAqeQ==", + "version": "7.51.2", + "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.51.2.tgz", + "integrity": "sha512-y++lwaWjtzDt/XNnyGDQy6goHskFualmDlf+jzEZvjvz6KWDf7EboL7pUvRCzPTJd0EOPpdekYaQLEvvG6m6HA==", "requires": {} }, "react-is": { @@ -25398,21 +30119,42 @@ "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.0.tgz", "integrity": "sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==" }, + "react-remove-scroll": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", + "integrity": "sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==", + "requires": { + "react-remove-scroll-bar": "^2.3.3", + "react-style-singleton": "^2.2.1", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.0", + "use-sidecar": "^1.1.2" + } + }, + "react-remove-scroll-bar": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", + "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", + "requires": { + "react-style-singleton": "^2.2.1", + "tslib": "^2.0.0" + } + }, "react-router": { - "version": "6.21.3", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.21.3.tgz", - "integrity": "sha512-a0H638ZXULv1OdkmiK6s6itNhoy33ywxmUFT/xtSoVyf9VnC7n7+VT4LjVzdIHSaF5TIh9ylUgxMXksHTgGrKg==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.3.tgz", + "integrity": "sha512-dr2eb3Mj5zK2YISHK++foM9w4eBnO23eKnZEDs7c880P6oKbrjz/Svg9+nxqtHQK+oMW4OtjZca0RqPglXxguQ==", "requires": { - "@remix-run/router": "1.14.2" + "@remix-run/router": "1.15.3" } }, "react-router-dom": { - "version": "6.21.3", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.21.3.tgz", - "integrity": "sha512-kNzubk7n4YHSrErzjLK72j0B5i969GsuCGazRl3G6j1zqZBLjuSlYBdVdkDOgzGdPIffUOc9nmgiadTEVoq91g==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.22.3.tgz", + "integrity": "sha512-7ZILI7HjcE+p31oQvwbokjk6OA/bnFxrhJ19n82Ex9Ph8fNAq+Hm/7KchpMGlTgWhUxRHMMCut+vEtNpWpowKw==", "requires": { - "@remix-run/router": "1.14.2", - "react-router": "6.21.3" + "@remix-run/router": "1.15.3", + "react-router": "6.22.3" } }, "react-shallow-renderer": { @@ -25441,6 +30183,16 @@ "react-transition-group": "^4.4.5" } }, + "react-style-singleton": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.1.tgz", + "integrity": "sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==", + "requires": { + "get-nonce": "^1.0.0", + "invariant": "^2.2.4", + "tslib": "^2.0.0" + } + }, "react-test-renderer": { "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-test-renderer/-/react-test-renderer-18.2.0.tgz", @@ -25514,9 +30266,9 @@ } }, "recharts": { - "version": "2.12.2", - "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.2.tgz", - "integrity": "sha512-9bpxjXSF5g81YsKkTSlaX7mM4b6oYI1mIYck6YkUcWuL3tomADccI51/6thY4LmvhYuRTwpfrOvE80Zc3oBRfQ==", + "version": "2.12.3", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.12.3.tgz", + "integrity": "sha512-vE/F7wTlokf5mtCqVDJlVKelCjliLSJ+DJxj79XlMREm7gpV7ljwbrwE3CfeaoDlOaLX+6iwHaVRn9587YkwIg==", "requires": { "clsx": "^2.0.0", "eventemitter3": "^4.0.1", @@ -25544,45 +30296,90 @@ } }, "reflect.getprototypeof": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz", - "integrity": "sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", + "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", "globalthis": "^1.0.3", "which-builtin-type": "^1.1.3" } }, + "regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" + }, + "regenerate-unicode-properties": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz", + "integrity": "sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==", + "requires": { + "regenerate": "^1.4.2" + } + }, "regenerator-runtime": { "version": "0.14.1", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, + "regenerator-transform": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", + "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", + "requires": { + "@babel/runtime": "^7.8.4" + } + }, "regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" } }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true + "regexpu-core": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", + "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", + "requires": { + "@babel/regjsgen": "^0.8.0", + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsparser": "^0.9.1", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.1.0" + } + }, + "regjsparser": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", + "requires": { + "jsesc": "~0.5.0" + }, + "dependencies": { + "jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==" + } + } }, "rehackt": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/rehackt/-/rehackt-0.0.3.tgz", - "integrity": "sha512-aBRHudKhOWwsTvCbSoinzq+Lej/7R8e8UoPvLZo5HirZIIBLGAgdG7SL9QpdcBoQ7+3QYPi3lRLknAzXBlhZ7g==", + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/rehackt/-/rehackt-0.0.6.tgz", + "integrity": "sha512-l3WEzkt4ntlEc/IB3/mF6SRgNHA6zfQR7BlGOgBTOmx7IJJXojDASav+NsgXHFjHn+6RmwqsGPFgZpabWpeOdw==", "requires": {} }, "relay-runtime": { @@ -25703,6 +30500,12 @@ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true }, + "resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true + }, "response-iterator": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/response-iterator/-/response-iterator-0.2.6.tgz", @@ -25739,10 +30542,26 @@ } }, "rollup": { - "version": "3.29.4", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", - "integrity": "sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw==", - "requires": { + "version": "4.14.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.14.0.tgz", + "integrity": "sha512-Qe7w62TyawbDzB4yt32R0+AbIo6m1/sqO7UPzFS8Z/ksL5mrfhA0v4CavfdmFav3D+ub4QeAgsGEe84DoWe/nQ==", + "requires": { + "@rollup/rollup-android-arm-eabi": "4.14.0", + "@rollup/rollup-android-arm64": "4.14.0", + "@rollup/rollup-darwin-arm64": "4.14.0", + "@rollup/rollup-darwin-x64": "4.14.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.14.0", + "@rollup/rollup-linux-arm64-gnu": "4.14.0", + "@rollup/rollup-linux-arm64-musl": "4.14.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.14.0", + "@rollup/rollup-linux-riscv64-gnu": "4.14.0", + "@rollup/rollup-linux-s390x-gnu": "4.14.0", + "@rollup/rollup-linux-x64-gnu": "4.14.0", + "@rollup/rollup-linux-x64-musl": "4.14.0", + "@rollup/rollup-win32-arm64-msvc": "4.14.0", + "@rollup/rollup-win32-ia32-msvc": "4.14.0", + "@rollup/rollup-win32-x64-msvc": "4.14.0", + "@types/estree": "1.0.5", "fsevents": "~2.3.2" } }, @@ -25776,13 +30595,13 @@ } }, "safe-array-concat": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.0.tgz", - "integrity": "sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dev": true, "requires": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" } @@ -25794,13 +30613,13 @@ "dev": true }, "safe-regex-test": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.2.tgz", - "integrity": "sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, "requires": { - "call-bind": "^1.0.5", - "get-intrinsic": "^1.2.2", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" } }, @@ -25861,27 +30680,38 @@ "dev": true }, "set-function-length": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", - "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, "requires": { - "define-data-property": "^1.1.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.2", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" } }, "set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, "requires": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" + } + }, + "set-value": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-4.1.0.tgz", + "integrity": "sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw==", + "requires": { + "is-plain-object": "^2.0.4", + "is-primitive": "^3.0.1" } }, "setimmediate": { @@ -25924,14 +30754,15 @@ "dev": true }, "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" } }, "siginfo": { @@ -25984,9 +30815,9 @@ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==" }, "space-separated-tokens": { "version": "2.0.2", @@ -25994,9 +30825,9 @@ "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==" }, "split-on-first": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-1.1.0.tgz", - "integrity": "sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/split-on-first/-/split-on-first-3.0.0.tgz", + "integrity": "sha512-qxQJTx2ryR0Dw0ITYyekNQWpz6f8dGd7vffGNflQQ3Iqj9NJ6qiZ7ELpZsJ/QBhIVAiDfXdag3+Gp8RvWa62AA==" }, "sponge-case": { "version": "1.0.1", @@ -26051,11 +30882,6 @@ "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==", "dev": true }, - "strict-uri-encode": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-2.0.0.tgz", - "integrity": "sha512-QwiXZgpRcKkhTj2Scnn++4PKtWsH0kpzZ62L2R6c/LUVYv7hVnZqcg2+sMuT6R7Jusu1vviK/MFsu6kNJfWlEQ==" - }, "string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -26098,53 +30924,57 @@ } }, "string.prototype.matchall": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz", - "integrity": "sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ==", + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", + "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "regexp.prototype.flags": "^1.5.0", - "set-function-name": "^2.0.0", - "side-channel": "^1.0.4" + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" } }, "string.prototype.trim": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", - "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" } }, "string.prototype.trimend": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", - "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "string.prototype.trimstart": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", - "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" } }, "stringify-entities": { @@ -26191,12 +31021,20 @@ "dev": true }, "strip-literal": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-1.3.0.tgz", - "integrity": "sha512-PugKzOsyXpArk0yWmUwqOZecSO0GH0bPoctLcqNDH9J04pVW3lflYE0ujElBGTloevcxF5MofAOZ7C5l2b+wLg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-2.1.0.tgz", + "integrity": "sha512-Op+UycaUt/8FbN/Z2TWPBLge3jWrP3xj10f3fnYxf052bKuS3EKs1ZQcVGjnEMdsNVAM+plXRdmjrZ/KgG3Skw==", "dev": true, "requires": { - "acorn": "^8.10.0" + "js-tokens": "^9.0.0" + }, + "dependencies": { + "js-tokens": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.0.tgz", + "integrity": "sha512-WriZw1luRMlmV3LGJaR6QOJjWwgLUTf89OwT2lUOyjX2dJGBwgmIkbcz+7WFZjrZM635JOIR517++e/67CP9dQ==", + "dev": true + } } }, "style-mod": { @@ -26212,6 +31050,15 @@ "inline-style-parser": "0.2.2" } }, + "style-value-types": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/style-value-types/-/style-value-types-5.0.0.tgz", + "integrity": "sha512-08yq36Ikn4kx4YU6RD7jWEv27v4V+PUsOGa4n/as8Et3CuODMJQ00ENeAVXAeydX4Z2j1XHZF1K2sX4mGl18fA==", + "requires": { + "hey-listen": "^1.0.8", + "tslib": "^2.1.0" + } + }, "subscriptions-transport-ws": { "version": "0.11.0", "resolved": "https://registry.npmjs.org/subscriptions-transport-ws/-/subscriptions-transport-ws-0.11.0.tgz", @@ -26324,6 +31171,27 @@ "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, + "svgo": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", + "integrity": "sha512-4PP6CMW/V7l/GmKRKzsLR8xxjdHTV4IMvhTnpuHwwBazSIlw5W/5SmPjN8Dwyt7lKbSJrRDgp4t9ph0HgChFBQ==", + "requires": { + "@trysound/sax": "0.2.0", + "commander": "^7.2.0", + "css-select": "^5.1.0", + "css-tree": "^2.3.1", + "css-what": "^6.1.0", + "csso": "^5.0.5", + "picocolors": "^1.0.0" + }, + "dependencies": { + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" + } + } + }, "swap-case": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/swap-case/-/swap-case-2.0.2.tgz", @@ -26345,17 +31213,17 @@ "dev": true }, "tailwind-merge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.2.1.tgz", - "integrity": "sha512-o+2GTLkthfa5YUt4JxPfzMIpQzZ3adD1vLVkvKE1Twl9UAhGsEbIZhHHZVRttyW177S8PDJI3bTQNaebyofK3Q==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.2.2.tgz", + "integrity": "sha512-tWANXsnmJzgw6mQ07nE3aCDkCK4QdT3ThPMCzawoYA2Pws7vSTCvz3Vrjg61jVUGfFZPJzxEP+NimbcW+EdaDw==", "requires": { - "@babel/runtime": "^7.23.7" + "@babel/runtime": "^7.24.0" } }, "tailwindcss": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.1.tgz", - "integrity": "sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz", + "integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==", "requires": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -26365,7 +31233,7 @@ "fast-glob": "^3.3.0", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", - "jiti": "^1.19.1", + "jiti": "^1.21.0", "lilconfig": "^2.1.0", "micromatch": "^4.0.5", "normalize-path": "^3.0.0", @@ -26436,12 +31304,6 @@ "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==", "dev": true }, - "time-zone": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/time-zone/-/time-zone-1.0.0.tgz", - "integrity": "sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==", - "dev": true - }, "tiny-invariant": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", @@ -26454,15 +31316,15 @@ "dev": true }, "tinypool": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.5.0.tgz", - "integrity": "sha512-paHQtnrlS1QZYKF/GnLoOM/DN9fqaGOFbCbxzAhwniySnzl9Ebk8w73/dd34DAhe/obUbPAOldTyYXQZxnPBPQ==", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-0.8.3.tgz", + "integrity": "sha512-Ud7uepAklqRH1bvwy22ynrliC7Dljz7Tm8M/0RBUW+YRa4YHhZ6e4PpgE+fu1zr/WqB1kbeuVrdfeuyIBpy4tw==", "dev": true }, "tinyspy": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.0.tgz", - "integrity": "sha512-d2eda04AN/cPOR89F7Xv5bK/jrQEhmcLFe6HFldoeO9AJtps+fqEnh486vnT/8y4bw38pSyxDcTCAq+Ks2aJTg==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-2.2.1.tgz", + "integrity": "sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==", "dev": true }, "title-case": { @@ -26496,6 +31358,11 @@ "is-number": "^7.0.0" } }, + "toggle-selection": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz", + "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" + }, "tough-cookie": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", @@ -26523,12 +31390,12 @@ } }, "tr46": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-4.1.1.tgz", - "integrity": "sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.0.0.tgz", + "integrity": "sha512-tk2G5R2KRwBd+ZN0zaEXpmzdKyOYksXwywulIX95MBODjSzMIuQnQ3m8JxgbhnL1LeVo7lqQKsYa1O3Htl7K5g==", "dev": true, "requires": { - "punycode": "^2.3.0" + "punycode": "^2.3.1" }, "dependencies": { "punycode": { @@ -26549,6 +31416,13 @@ "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==" }, + "ts-api-utils": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", + "integrity": "sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==", + "dev": true, + "requires": {} + }, "ts-interface-checker": { "version": "0.1.13", "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", @@ -26598,11 +31472,17 @@ } }, "ts-toolbelt": { - "version": "6.15.5", - "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-6.15.5.tgz", - "integrity": "sha512-FZIXf1ksVyLcfr7M317jbB67XFJhOO1YqdTcuGaq9q5jLUoTikukZ+98TPjKiP2jC5CgmYdWWYs0s2nLSU0/1A==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/ts-toolbelt/-/ts-toolbelt-9.6.0.tgz", + "integrity": "sha512-nsZd8ZeNUzukXPlJmTBwUAuABDe/9qtVDelJeT/qW0ow3ZS3BsQJtNkan1802aM9Uf68/Y8ljw86Hu0h5IUW3w==", "dev": true }, + "tsconfck": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.0.3.tgz", + "integrity": "sha512-4t0noZX9t6GcPTfBAbIbbIU4pfpCwh0ueq3S4O/5qXI1VwK1outmxhe9dOiEWqMz3MW2LKgDTpqWV+37IWuVbA==", + "requires": {} + }, "tsconfig-paths": { "version": "3.15.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", @@ -26631,23 +31511,6 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, - "tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "requires": { - "tslib": "^1.8.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - } - } - }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", @@ -26685,68 +31548,98 @@ "dev": true }, "typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dev": true, "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" } }, "typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, "requires": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" } }, "typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dev": true, "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" } }, "typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, "requires": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + } + }, + "types-ramda": { + "version": "0.29.10", + "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.10.tgz", + "integrity": "sha512-5PJiW/eiTPyXXBYGZOYGezMl6qj7keBiZheRwfjJZY26QPHsNrjfJnz0mru6oeqqoTHOni893Jfd6zyUXfQRWg==", + "dev": true, + "requires": { + "ts-toolbelt": "^9.6.0" } }, "typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", + "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", "devOptional": true }, + "typescript-eslint": { + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-7.5.0.tgz", + "integrity": "sha512-eKhF39LRi2xYvvXh3h3S+mCxC01dZTIZBlka25o39i81VeQG+OZyfC4i2GEDspNclMRdXkg9uGhmvWMhjph2XQ==", + "dev": true, + "requires": { + "@typescript-eslint/eslint-plugin": "7.5.0", + "@typescript-eslint/parser": "7.5.0", + "@typescript-eslint/utils": "7.5.0" + } + }, "ua-parser-js": { "version": "1.0.37", "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.37.tgz", "integrity": "sha512-bhTyI94tZofjo+Dn8SN6Zv8nBDvyXTymAdM3LDI/0IboIUwTu1rEhW7v2TfiVsoYWgkQ4kOVqnI8APUFbIQIFQ==", "dev": true }, + "uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + }, "ufo": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.3.2.tgz", - "integrity": "sha512-o+ORpgGwaYQXgqGDwd+hkS4PuZ3QnmqMMxRuajK/a38L6fTpcE5GPIfrf+L/KemFzfUpeUQc1rRS1iDBozvnFA==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz", + "integrity": "sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==", "dev": true }, "uglify-js": { @@ -26774,14 +31667,44 @@ "dev": true }, "undici": { - "version": "5.28.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.3.tgz", - "integrity": "sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA==", + "version": "5.28.4", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", + "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", "dev": true, "requires": { "@fastify/busboy": "^2.0.0" } }, + "undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "devOptional": true + }, + "unicode-canonical-property-names-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz", + "integrity": "sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==" + }, + "unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "requires": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + } + }, + "unicode-match-property-value-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz", + "integrity": "sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA==" + }, + "unicode-property-aliases-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==" + }, "unidiff": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/unidiff/-/unidiff-1.0.4.tgz", @@ -26955,6 +31878,14 @@ "integrity": "sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ==", "dev": true }, + "use-callback-ref": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", + "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", + "requires": { + "tslib": "^2.0.0" + } + }, "use-query-params": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/use-query-params/-/use-query-params-2.2.1.tgz", @@ -26963,6 +31894,15 @@ "serialize-query-params": "^2.0.2" } }, + "use-sidecar": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.2.tgz", + "integrity": "sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==", + "requires": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + } + }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -27049,110 +31989,163 @@ } }, "vite": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz", - "integrity": "sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==", + "version": "5.2.8", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.8.tgz", + "integrity": "sha512-OyZR+c1CE8yeHw5V5t59aXsUPPVTHMDjEZz8MgguLL/Q7NblxhZUlTu9xSPqlsUO/y+X7dlU05jdhvyycD55DA==", "requires": { - "esbuild": "^0.18.10", - "fsevents": "~2.3.2", - "postcss": "^8.4.27", - "rollup": "^3.27.1" + "esbuild": "^0.20.1", + "fsevents": "~2.3.3", + "postcss": "^8.4.38", + "rollup": "^4.13.0" } }, "vite-node": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-0.31.4.tgz", - "integrity": "sha512-uzL377GjJtTbuc5KQxVbDu2xfU/x0wVjUtXQR2ihS21q/NK6ROr4oG0rsSkBBddZUVCwzfx22in76/0ZZHXgkQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-1.4.0.tgz", + "integrity": "sha512-VZDAseqjrHgNd4Kh8icYHWzTKSCZMhia7GyHfhtzLW33fZlG9SwsB6CEhgyVOWkJfJ2pFLrp/Gj1FSfAiqH9Lw==", "dev": true, "requires": { "cac": "^6.7.14", "debug": "^4.3.4", - "mlly": "^1.2.0", - "pathe": "^1.1.0", + "pathe": "^1.1.1", "picocolors": "^1.0.0", - "vite": "^3.0.0 || ^4.0.0" - } - }, - "vite-plugin-svgr": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.3.0.tgz", - "integrity": "sha512-vWZMCcGNdPqgziYFKQ3Y95XP0d0YGp28+MM3Dp9cTa/px5CKcHHrIoPl2Jw81rgVm6/ZUNONzjXbZQZ7Kw66og==", - "requires": { - "@rollup/pluginutils": "^5.0.4", - "@svgr/core": "^8.1.0", - "@svgr/plugin-jsx": "^8.1.0" + "vite": "^5.0.0" } }, "vite-tsconfig-paths": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.1.tgz", - "integrity": "sha512-cfgJwcGOsIxXOLU/nELPny2/LUD/lcf1IbfyeKTv2bsupVbTH/xpFtdQlBmIP1GEK2CjjLxYhFfB+QODFAx5aw==", + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-4.3.2.tgz", + "integrity": "sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==", "requires": { "debug": "^4.1.1", "globrex": "^0.1.2", - "tsconfck": "^3.0.1" - }, - "dependencies": { - "tsconfck": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/tsconfck/-/tsconfck-3.0.1.tgz", - "integrity": "sha512-7ppiBlF3UEddCLeI1JRx5m2Ryq+xk4JrZuq4EuYXykipebaq1dV0Fhgr1hb7CkmHt32QSgOZlcqVLEtHBG4/mg==", - "requires": {} - }, - "typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", - "optional": true, - "peer": true - } + "tsconfck": "^3.0.3" } }, "vitest": { - "version": "0.31.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-0.31.4.tgz", - "integrity": "sha512-GoV0VQPmWrUFOZSg3RpQAPN+LPmHg2/gxlMNJlyxJihkz6qReHDV6b0pPDcqFLNEPya4tWJ1pgwUNP9MLmUfvQ==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-1.4.0.tgz", + "integrity": "sha512-gujzn0g7fmwf83/WzrDTnncZt2UiXP41mHuFYFrdwaLRVQ6JYQEiME2IfEjU3vcFL3VKa75XhI3lFgn+hfVsQw==", "dev": true, "requires": { - "@types/chai": "^4.3.5", - "@types/chai-subset": "^1.3.3", - "@types/node": "*", - "@vitest/expect": "0.31.4", - "@vitest/runner": "0.31.4", - "@vitest/snapshot": "0.31.4", - "@vitest/spy": "0.31.4", - "@vitest/utils": "0.31.4", - "acorn": "^8.8.2", - "acorn-walk": "^8.2.0", - "cac": "^6.7.14", - "chai": "^4.3.7", - "concordance": "^5.0.4", + "@vitest/expect": "1.4.0", + "@vitest/runner": "1.4.0", + "@vitest/snapshot": "1.4.0", + "@vitest/spy": "1.4.0", + "@vitest/utils": "1.4.0", + "acorn-walk": "^8.3.2", + "chai": "^4.3.10", "debug": "^4.3.4", - "local-pkg": "^0.4.3", - "magic-string": "^0.30.0", - "pathe": "^1.1.0", + "execa": "^8.0.1", + "local-pkg": "^0.5.0", + "magic-string": "^0.30.5", + "pathe": "^1.1.1", "picocolors": "^1.0.0", - "std-env": "^3.3.2", - "strip-literal": "^1.0.1", - "tinybench": "^2.5.0", - "tinypool": "^0.5.0", - "vite": "^3.0.0 || ^4.0.0", - "vite-node": "0.31.4", + "std-env": "^3.5.0", + "strip-literal": "^2.0.0", + "tinybench": "^2.5.1", + "tinypool": "^0.8.2", + "vite": "^5.0.0", + "vite-node": "1.4.0", "why-is-node-running": "^2.2.2" + }, + "dependencies": { + "execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + } + }, + "get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true + }, + "human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true + }, + "is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true + }, + "mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true + }, + "npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dev": true, + "requires": { + "path-key": "^4.0.0" + } + }, + "onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "requires": { + "mimic-fn": "^4.0.0" + } + }, + "path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true + }, + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true + }, + "strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true + } } }, + "vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==" + }, "w3c-keyname": { "version": "2.2.8", "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==" }, "w3c-xmlserializer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", - "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", + "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==", "dev": true, "requires": { - "xml-name-validator": "^4.0.0" + "xml-name-validator": "^5.0.0" } }, "warning": { @@ -27173,9 +32166,9 @@ } }, "web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", "dev": true }, "web-vitals": { @@ -27184,9 +32177,9 @@ "integrity": "sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg==" }, "webcrypto-core": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.8.tgz", - "integrity": "sha512-eBR98r9nQXTqXt/yDRtInszPMjTaSAMJAFDg2AHsgrnczawT1asx9YNBX6k5p+MekbPF4+s/UJJrr88zsTqkSg==", + "version": "1.7.9", + "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.9.tgz", + "integrity": "sha512-FE+a4PPkOmBbgNDIyRmcHhgXn+2ClRl3JzJdDu/P4+B8y81LqKe6RAsI9b3lAOHe1T1BMkSjsRHTYRikImZnVA==", "dev": true, "requires": { "@peculiar/asn1-schema": "^2.3.8", @@ -27202,16 +32195,10 @@ "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", "dev": true }, - "well-known-symbols": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/well-known-symbols/-/well-known-symbols-2.0.0.tgz", - "integrity": "sha512-ZMjC3ho+KXo0BfJb7JgtQ5IBuvnShdlACNkKkdsqBmYw3bPAaJfPeYUo6tLUaT5tG/Gkh7xkpBhKRQ9e7pyg9Q==", - "dev": true - }, "whatwg-encoding": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", - "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", + "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", "dev": true, "requires": { "iconv-lite": "0.6.3" @@ -27229,18 +32216,18 @@ } }, "whatwg-mimetype": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", - "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", + "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==", "dev": true }, "whatwg-url": { - "version": "12.0.1", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-12.0.1.tgz", - "integrity": "sha512-Ed/LrqB8EPlGxjS+TrsXcpUond1mhccS3pchLhzSgPCnTimUCKj3IZE75pAs5m6heB2U2TMerKFUXheyHY+VDQ==", + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.0.0.tgz", + "integrity": "sha512-1lfMEm2IEr7RIV+f4lUNPOqfFL+pO+Xw3fJSqmjX9AbXcXcYOkCe1P6+9VBZB6n94af16NfZf+sSk0JCBZC9aw==", "dev": true, "requires": { - "tr46": "^4.1.1", + "tr46": "^5.0.0", "webidl-conversions": "^7.0.0" } }, @@ -27304,16 +32291,16 @@ "dev": true }, "which-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", - "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "requires": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" } }, "why-is-node-running": { @@ -27366,9 +32353,9 @@ "requires": {} }, "xml-name-validator": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", - "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz", + "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==", "dev": true }, "xmlchars": { @@ -27389,10 +32376,9 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", - "dev": true + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", + "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==" }, "yaml-ast-parser": { "version": "0.0.43", @@ -27423,12 +32409,6 @@ } } }, - "yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true - }, "yauzl": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index cd854918cb..338fad4558 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -8,24 +8,21 @@ "build": "vite build", "serve": "vite preview", "build-serve": "npm run build && npm run serve", + "codegen": "graphql-codegen --config graphql.config.ts", + "codegen:openapi": "npx openapi-typescript http://localhost:8000/api/openapi.json --output src/infraops.d.ts", + "ci:test:e2e": "CI=1 DEBUG=pw:browser npm run test:e2e 2>/dev/null", "test": "vitest run", "test:watch": "vitest watch", "test:coverage": "vitest run --coverage", - "cypress": "cypress open", - "cypress:screenshots": "cypress open --env SCREENSHOTS=true", - "cypress:run": "npm run cypress:run:component", - "cypress:run:component": "cypress run --component", - "cypress:run:e2e": "ELECTRON_ENABLE_LOGGING=1 cypress run --e2e", - "cypress:run:e2e:screenshots": "cypress run --e2e --env SCREENSHOTS=true", - "cypress:run:spec": "ELECTRON_ENABLE_LOGGING=1 cypress run --spec", - "codegen": "graphql-codegen --config graphql.config.ts", - "codegen:openapi": "npx openapi-typescript http://localhost:8000/api/openapi.json --output src/infraops.d.ts", - "ci:test:e2e": "CI=1 npm run test:e2e", "test:e2e": "playwright test", "test:e2e:debug": "playwright test --debug", "test:e2e:headed": "playwright test --headed", "test:e2e:ui": "playwright test --ui", "test:e2e:screenshots": "UPDATE_DOCS_SCREENSHOTS=1 playwright test", + "cypress": "cypress open", + "cypress:run": "npm run cypress:run:component", + "cypress:run:component": "cypress run --component", + "cypress:run:spec": "ELECTRON_ENABLE_LOGGING=1 cypress run --spec", "prettier": "prettier './**/*.{js,jsx,ts,tsx,css,md,json}' --config ./.prettierrc.json", "prettier:fix": "prettier --write './**/*.{js,jsx,ts,tsx,css,md,json}' --config ./.prettierrc.json", "eslint": "eslint .", @@ -33,97 +30,109 @@ "format-code": "npm run prettier:fix && npm run eslint:fix" }, "dependencies": { - "@apollo/client": "^3.7.7", - "@codemirror/commands": "^6.3.2", - "@codemirror/lang-markdown": "^6.2.3", - "@codemirror/state": "^6.3.2", - "@codemirror/view": "^6.22.1", - "@headlessui/react": "^1.7.10", - "@heroicons/react": "^2.0.15", + "@apollo/client": "^3.9.10", + "@codemirror/commands": "^6.3.3", + "@codemirror/lang-markdown": "^6.2.4", + "@codemirror/state": "^6.4.1", + "@codemirror/view": "^6.26.1", + "@graphiql/plugin-explorer": "^1.0.4", + "@headlessui/react": "^1.7.18", + "@heroicons/react": "^2.1.3", "@hookform/error-message": "^2.0.1", - "@iconify-icon/react": "^1.0.8", - "@iconify-json/mdi": "^1.1.55", + "@iconify-icon/react": "^2.0.1", + "@iconify-json/mdi": "^1.1.64", + "@loadable/component": "^5.16.3", "@popperjs/core": "^2.11.8", + "@radix-ui/react-dropdown-menu": "^2.0.6", + "@radix-ui/react-popover": "^1.0.7", + "@radix-ui/react-progress": "^1.0.3", "@radix-ui/react-tooltip": "^1.0.7", - "@sentry/react": "^7.45.0", - "@sentry/tracing": "^7.45.0", - "@tailwindcss/forms": "^0.5.3", - "@uiw/react-color": "^2.0.5", - "@vitejs/plugin-react": "^4.0.0", - "autoprefixer": "^10.4.14", + "@svgr/rollup": "^8.1.0", + "@tailwindcss/forms": "^0.5.7", + "@uiw/react-color": "^2.1.1", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.19", "class-variance-authority": "^0.7.0", "clsx": "^2.1.0", "cm6-theme-basic-light": "^0.2.0", "cross-fetch": "^4.0.0", - "date-fns": "^2.29.3", - "handlebars": "^4.7.7", - "jotai": "^2.6.0", + "date-fns": "^3.6.0", + "graphiql": "^3.1.2", + "graphql": "^16.8.1", + "handlebars": "^4.7.8", + "jotai": "^2.7.2", "prismjs": "^1.29.0", - "query-string": "^7.1.3", - "ramda": "^0.28.0", + "query-string": "^9.0.0", + "ramda": "^0.29.1", "react": "^18.2.0", + "react-accessible-treeview": "^2.8.3", "react-datepicker": "^4.11.0", "react-diff-view": "^3.2.0", "react-dom": "^18.2.0", - "react-hook-form": "^7.43.5", + "react-hook-form": "^7.51.2", "react-loading": "^2.0.3", "react-markdown": "^9.0.1", "react-paginate": "^8.2.0", "react-popper": "^2.3.0", - "react-router-dom": "^6.21.1", + "react-router-dom": "^6.22.3", "react-simple-code-editor": "^0.13.1", - "react-toastify": "^9.1.2", - "recharts": "^2.12.2", + "react-toastify": "^9.1.3", + "recharts": "^2.12.3", "remark-gfm": "^4.0.0", "sha1": "^1.1.1", "subscriptions-transport-ws": "^0.11.0", - "tailwind-merge": "^2.2.1", + "tailwind-merge": "^2.2.2", "unidiff": "^1.0.4", "use-query-params": "^2.2.1", - "vite": "^4.3.8", - "vite-plugin-svgr": "^3.2.0", - "vite-tsconfig-paths": "^4.2.0", + "vite": "^5.2.8", + "vite-tsconfig-paths": "^4.3.2", "web-vitals": "^2.1.4" }, "devDependencies": { - "@graphql-codegen/cli": "^3.0.0", - "@graphql-codegen/typescript": "^3.0.0", - "@playwright/test": "^1.41.1", - "@testing-library/react": "^14.0.0", - "@types/node": "^16.18.12", - "@types/prismjs": "^1.26.0", - "@types/ramda": "^0.28.23", - "@types/react": "^18.0.28", + "@graphql-codegen/cli": "^5.0.2", + "@graphql-codegen/typescript": "^4.0.6", + "@playwright/test": "^1.42.1", + "@testing-library/react": "^14.2.2", + "@types/loadable__component": "^5.13.9", + "@types/node": "^20.12.3", + "@types/prismjs": "^1.26.3", + "@types/ramda": "^0.29.12", + "@types/react": "^18.2.74", "@types/react-datepicker": "^4.10.0", - "@types/react-dom": "^18.0.10", - "@types/react-test-renderer": "^18.0.0", - "@types/sha1": "^1.1.3", - "@typescript-eslint/eslint-plugin": "^5.55.0", - "@vitest/coverage-c8": "^0.31.1", - "c8": "^7.13.0", - "cypress": "^13.6.1", - "cypress-vite": "^1.5.0", - "eslint": "^8.36.0", - "eslint-config-prettier": "^8.8.0", - "eslint-config-standard-with-typescript": "^34.0.1", + "@types/react-dom": "^18.2.23", + "@types/react-test-renderer": "^18.0.7", + "@types/sha1": "^1.1.5", + "@typescript-eslint/eslint-plugin": "^7.5.0", + "@vitest/coverage-v8": "^1.4.0", + "cypress": "^13.7.2", + "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", "eslint-plugin-cypress": "^2.15.1", - "eslint-plugin-import": "^2.27.5", - "eslint-plugin-n": "^15.6.1", + "eslint-plugin-import": "^2.29.1", + "eslint-plugin-n": "^16.6.2", "eslint-plugin-promise": "^6.1.1", - "eslint-plugin-react": "^7.32.2", - "eslint-plugin-unused-imports": "^2.0.0", + "eslint-plugin-react": "^7.34.1", + "eslint-plugin-unused-imports": "^3.1.0", "husky": "^8.0.3", - "jsdom": "^21.1.2", + "jsdom": "^24.0.0", "lint-staged": "^13.2.0", "openapi-typescript": "^6.7.3", "postcss": "^8.4.23", "prettier": "2.8.8", "pretty-quick": "^3.1.3", "react-test-renderer": "^18.2.0", - "tailwindcss": "^3.3.2", - "ts-node": "^10.9.1", - "typescript": "^4.9.5", - "vitest": "^0.31.1" + "tailwindcss": "^3.4.3", + "ts-node": "^10.9.2", + "typescript": "^5.4.3", + "typescript-eslint": "^7.5.0", + "vitest": "^1.4.0" + }, + "overrides": { + "graphiql-explorer": { + "graphql": "^16.8.1", + "react": "^18.2.0", + "react-dom": "^18.2.0" + } }, "browserslist": { "production": [ diff --git a/frontend/playwright.config.ts b/frontend/playwright.config.ts index 82c4d0f7aa..280a99b570 100644 --- a/frontend/playwright.config.ts +++ b/frontend/playwright.config.ts @@ -26,7 +26,11 @@ export default defineConfig({ /* Opt out of parallel tests on CI. */ workers: process.env.CI ? 3 : undefined, /* Reporter to use. See https://playwright.dev/docs/test-reporters */ - reporter: [["list"], ["html", { open: "never" }]], + reporter: [ + ["list"], + ["html", { open: "never" }], + ["junit", { outputFile: "playwright-junit.xml" }], + ], /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ use: { /* Base URL to use in actions like `await page.goto('/')`. */ diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 258cfe4c68..0c864ef8c7 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,14 +1,46 @@ import { addCollection } from "@iconify-icon/react"; import mdiIcons from "@iconify-json/mdi/icons.json"; +import loadable from "@loadable/component"; import { Navigate, Route, Routes } from "react-router-dom"; -import "react-toastify/dist/ReactToastify.css"; + +import { ARTIFACT_OBJECT } from "./config/constants"; import { AuthProvider, RequireAuth } from "./hooks/useAuth"; -import { MAIN_ROUTES } from "./config/routes"; -import Layout from "./screens/layout/layout"; -import SignIn from "./screens/sign-in/sign-in"; + +import "react-toastify/dist/ReactToastify.css"; +import { IPAM_ROUTE } from "./screens/ipam/constants"; addCollection(mdiIcons); +const Layout = loadable(() => import("./screens/layout/layout")); +const SchemaPage = loadable(() => import("./screens/schema/schema-page")); +const GraphiQLPage = loadable(() => import("./screens/graphql/graphiql")); +const RedirectToGraphiQLPage = loadable(() => import("./screens/graphql/RedirectToGraphiQLPage")); +const ArtifactsObjectItemDetailsPaginated = loadable( + () => import("./screens/artifacts/object-item-details-paginated") +); +const BranchItemDetails = loadable(() => import("./screens/branches/branch-item-details")); +const BranchesItems = loadable(() => import("./screens/branches/branches-items")); +const TaskItemsScreen = loadable(() => import("./screens/tasks/task-items-screen")); +const TaskItemDetailsScreen = loadable(() => import("./screens/tasks/task-item-details-screen")); +const ProposedChanges = loadable(() => import("./screens/proposed-changes/proposed-changes-items")); +const ProposedChangesDetails = loadable( + () => import("./screens/proposed-changes/proposed-changes-details") +); +const ProposedChangesCreatePage = loadable( + () => import("./screens/proposed-changes/proposed-changes-create-page") +); +const UserProfile = loadable(() => import("./screens/user-profile/user-profile")); +const ObjectItemsPaginated = loadable( + () => import("./screens/object-items/object-items-paginated") +); +const ObjectItemDetailsPaginated = loadable( + () => import("./screens/object-item-details/object-item-details-paginated") +); +const Homepage = loadable(() => import("./screens/homepage")); +const SignIn = loadable(() => import("./screens/sign-in/sign-in")); +const IpamPage = loadable(() => import("./screens/ipam/ipam-page")); +const IpamRouter = loadable(() => import("./screens/ipam/ipam-router")); + const App = () => { return ( @@ -20,9 +52,31 @@ const App = () => { }> - {MAIN_ROUTES.map((route) => ( - - ))} + } /> + } /> + } + /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + }> + } /> + } /> + } /> + } /> + } /> + + } /> } /> } /> diff --git a/frontend/src/components/account-menu.tsx b/frontend/src/components/account-menu.tsx index 2dd8dc85a9..14929a8d27 100644 --- a/frontend/src/components/account-menu.tsx +++ b/frontend/src/components/account-menu.tsx @@ -5,7 +5,7 @@ import { Fragment, useEffect } from "react"; import { Link, useLocation, useNavigate } from "react-router-dom"; import { toast } from "react-toastify"; import { ACCESS_TOKEN_KEY, ACCOUNT_OBJECT } from "../config/constants"; -import { getProfileDetails } from "../graphql/queries/profile/getProfileDetails"; +import { getProfileDetails } from "../graphql/queries/accounts/getProfileDetails"; import { useLazyQuery } from "../hooks/useQuery"; import { userNavigation } from "../screens/layout/navigation-list"; import { schemaState } from "../state/atoms/schema.atom"; diff --git a/frontend/src/components/branch-selector.tsx b/frontend/src/components/branch-selector.tsx index e99fd72bd6..7132ef5903 100644 --- a/frontend/src/components/branch-selector.tsx +++ b/frontend/src/components/branch-selector.tsx @@ -18,6 +18,7 @@ import { SelectButton } from "./buttons/select-button"; import { DateDisplay } from "./display/date-display"; import { POPOVER_SIZE, PopOver } from "./display/popover"; import { SelectOption } from "./inputs/select"; +import { branchesToSelectOptions } from "../utils/branches"; const getBranchIcon = (branch: Branch | null, active?: Boolean) => branch && ( @@ -83,35 +84,7 @@ export default function BranchSelector() { ); - const branchesOptions: SelectOption[] = branches - .map((branch) => ({ - id: branch.id, - name: branch.name, - sync_with_git: branch.sync_with_git, - is_default: branch.is_default, - is_isolated: branch.is_isolated, - has_schema_changes: branch.has_schema_changes, - created_at: branch.created_at, - })) - .sort((branch1, branch2) => { - if (branch1.name === "main") { - return -1; - } - - if (branch2.name === "main") { - return 1; - } - - if (branch2.name === "main") { - return -1; - } - - if (branch1.name > branch2.name) { - return 1; - } - - return -1; - }); + const branchesOptions: SelectOption[] = branchesToSelectOptions(branches); const defaultBranch = branches?.filter((b) => b.is_default)[0]?.id; @@ -221,7 +194,7 @@ export default function BranchSelector() { ]; return ( -
+
, + VariantProps {} + +export const Button = forwardRef( + ({ className, variant, size, ...props }, ref) => { + return ( + + ); +}; diff --git a/frontend/src/components/buttons/rounded-button.tsx b/frontend/src/components/buttons/rounded-button.tsx index 0460c5a30d..eceb289db0 100644 --- a/frontend/src/components/buttons/rounded-button.tsx +++ b/frontend/src/components/buttons/rounded-button.tsx @@ -60,20 +60,12 @@ export const RoundedButton = forwardRef((props: const customClassName = getClassName(type); - const handleClick = (event: any) => { - if (type !== "submit") { - event.stopPropagation(); - } - - onClick && onClick(event); - }; - return ( diff --git a/frontend/src/components/conversations/thread.tsx b/frontend/src/components/conversations/thread.tsx index a456f8217a..102f01e557 100644 --- a/frontend/src/components/conversations/thread.tsx +++ b/frontend/src/components/conversations/thread.tsx @@ -98,7 +98,7 @@ export const Thread = (props: tThread) => { setIsLoading(false); setDisplayAddComment(false); } catch (error: any) { - console.error("An error occured while creating the comment: ", error); + console.error("An error occurred while creating the comment: ", error); setIsLoading(false); } diff --git a/frontend/src/components/display/badge.tsx b/frontend/src/components/display/badge.tsx index 688fe380fd..554c9044a3 100644 --- a/frontend/src/components/display/badge.tsx +++ b/frontend/src/components/display/badge.tsx @@ -78,9 +78,16 @@ type tBadgeProps = { disabled?: boolean; }; -export const Badge = (props: tBadgeProps) => { - const { type, className, children, onDelete, value, onClick, disabled } = props; - +export const Badge = ({ + type, + className, + children, + onDelete, + value, + onClick, + disabled, + ...props +}: tBadgeProps) => { const customClassName = getClassName(type, onClick || onDelete, disabled); const handleClick = (event: any) => { @@ -110,11 +117,12 @@ export const Badge = (props: tBadgeProps) => { className, onDelete && !disabled ? "cursor-pointer" : "" )} - onClick={handleClick}> + onClick={handleClick} + {...props}> {children} {onDelete && ( -
+
)} diff --git a/frontend/src/components/display/color-display.tsx b/frontend/src/components/display/color-display.tsx index 104a5102a0..a154d241ef 100644 --- a/frontend/src/components/display/color-display.tsx +++ b/frontend/src/components/display/color-display.tsx @@ -10,7 +10,7 @@ export const ColorDisplay = (props: tColorDisplay) => { return (
+ {isFromProfile ? ( + + + {source.display_label} + + ) : ( + source.display_label + )} + + ) : ( + "-" + ), + }, + { + name: "Updated at", + value: updatedAt ? formatFullDate(updatedAt) : "-", + }, + { + name: "Update time", + value: updatedAt ? formatRelativeTimeFromNow(updatedAt) : "-", + }, + { + name: "Owner", + value: owner ? ( + + {owner.display_label} + + ) : ( + "-" + ), + }, + { + name: "Is protected", + value: isProtected ? "True" : "False", + }, + ]; - const navigate = useNavigate(); - const [schemaKindName] = useAtom(schemaKindNameState); - let [referenceElement, setReferenceElement] = useState(); - let [popperElement, setPopperElement] = useState(); - let { styles, attributes } = usePopper(referenceElement, popperElement, { - modifiers: [ - { - name: "flip", - }, - ], - }); - - const navigateToObjectDetailsPage = (obj: any) => - navigate(constructPath(`/objects/${schemaKindName[obj.__typename]}/${obj.id}`)); - - // TODO: use the popover component return ( - - -
- -
-
- - -
- {!!header && header} - {items.map((item) => { - return ( -
-
{item.label}:
- {item.type === "date" && item.value && ( -
{formatDistance(new Date(item.value), new Date(), { addSuffix: true })}
- )} + + + + - {item.type === "link" && ( -
navigateToObjectDetailsPage(item.value)}> - {item.value?.display_label} -
- )} + + {!!header && header} - {item.type === "text" &&
{item.value}
} -
- ); - })} -
-
+ +
); } diff --git a/frontend/src/components/editor/index.tsx b/frontend/src/components/editor/index.tsx index 0d6bcd8b26..248cb115e7 100644 --- a/frontend/src/components/editor/index.tsx +++ b/frontend/src/components/editor/index.tsx @@ -47,7 +47,7 @@ export const MarkdownEditor: FC = ({
{ + const [key, value] = data; + + if (value.value) + return { + name: `${key}__value`, + value: value.value, + }; + + if (value.id) { + return { + name: `${key}__ids`, + value: [value.id], + }; + } + + if (value.list?.length) { + return { + name: `${key}__ids`, + value: value.list, + }; + } +}; + +const constructNewFilters = (data: any) => Object.entries(data).map(computeFilter).filter(Boolean); + +const parseFilter = (acc: any, filter: any, schema: any) => { + if (!filter?.name) return; + + if (filter.name.includes("__value")) { + const key = filter.name.replace("__value", ""); + + return { + ...acc, + [key]: { + value: filter.value, + }, + }; + } + + if (schema && filter.name.includes("__ids")) { + const key = filter.name.replace("__ids", ""); + + const field = + schema.attributes.find((attribute) => attribute.name === key) || + schema.relationships.find((relationship) => relationship.name === key); + + if (field.cardinality === "many") { + return { + ...acc, + [key]: { + edges: filter.value.map((id) => ({ node: { id } })), + }, + }; + } + + return { + ...acc, + [key]: { + node: { + id: filter.value, + }, + }, + }; + } + + return acc; +}; + +export const Filters = (props: tFilters) => { + const { schema } = props; + + const branch = useAtomValue(currentBranchAtom); + const schemaList = useAtomValue(schemaState); + const genericList = useAtomValue(genericsState); + + const [filters, setFilters] = useFilters(); + const [showFilters, setShowFilters] = useState(false); + + const removeFilters = () => { + const newFilters = filters.filter((filter) => SEARCH_FILTERS.includes(filter.name)); + + setFilters(newFilters); + }; + + const handleShowFilters = () => setShowFilters(true); + + const handleSubmit = (data: any) => { + const newFilters = constructNewFilters(data); + + setFilters([...filters, ...newFilters]); + + setShowFilters(false); + }; + + const filtersObject = filters.reduce((acc, filter) => parseFilter(acc, filter, schema), {}); + + const currentFilters = filters.filter((filter) => !SEARCH_FILTERS.includes(filter.name)); + + const fields = getFormStructureForCreateEdit({ + schema, + schemas: schemaList, + generics: genericList, + row: filtersObject, + isFilters: true, + }); + + return ( +
+
+ + + + + Filters: {currentFilters.length} + + {!!currentFilters.length && ( + + )} +
+ + +
+ Apply filters +
+
+ +
{branch?.name ?? DEFAULT_BRANCH_NAME}
+
+
+ +
{schema?.description}
+ + + + {schema?.kind} + +
+ } + open={showFilters} + setOpen={setShowFilters}> +
+ +
+ ); +}; diff --git a/frontend/src/components/form/checkbox.register.tsx b/frontend/src/components/form/checkbox.register.tsx deleted file mode 100644 index 98d90e95a4..0000000000 --- a/frontend/src/components/form/checkbox.register.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import OpsCheckox from "./checkbox"; - -interface Props { - name: string; - label: string; - value: boolean; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; - error?: FormFieldError; -} - -export const OpsCheckboxRegister = (props: Props) => { - const { name, register, setValue, config, isProtected, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value ?? "", - ...config, - }); - - return ( - { - setValue(inputRegister.name, value); - }} - isProtected={isProtected || props.disabled} - /> - ); -}; diff --git a/frontend/src/components/form/code-editor.register.tsx b/frontend/src/components/form/code-editor.register.tsx deleted file mode 100644 index 08b61e862d..0000000000 --- a/frontend/src/components/form/code-editor.register.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { useState } from "react"; -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { OpsCodeEditor } from "./code-editor"; - -interface Props { - name: string; - label: string; - value?: string; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; -} - -export const CodeEditorRegister = (props: Props) => { - const { name, value, register, setValue, config, ...propsToPass } = props; - const [currentValue, setCurrentValue] = useState(value ? JSON.stringify(value) : null); - - const inputRegister = register(name, { - value: value ?? "", - ...config, - }); - - return ( - { - // Set the JSON as string in state - setCurrentValue(value); - - if (!value) { - // Replace empty string with valid "null" - setValue(inputRegister.name, null); - } - - try { - // Store the value as JSON - const newValue = JSON.parse(value); - setValue(inputRegister.name, newValue); - } catch (e) { - console.log("e: ", e); - } - }} - /> - ); -}; diff --git a/frontend/src/components/form/color-picker.register.tsx b/frontend/src/components/form/color-picker.register.tsx deleted file mode 100644 index 2ad5550c41..0000000000 --- a/frontend/src/components/form/color-picker.register.tsx +++ /dev/null @@ -1,29 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { OpsColorPicker } from "./color-picker"; - -interface Props { - name: string; - label: string; - value: string; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; -} - -export const OpsColorPickerRegister = (props: Props) => { - const { name, register, setValue, config, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value || "", - ...config, - }); - - return ( - setValue(inputRegister.name, value)} /> - ); -}; diff --git a/frontend/src/components/form/date-picker.register.tsx b/frontend/src/components/form/date-picker.register.tsx deleted file mode 100644 index 6c51b22494..0000000000 --- a/frontend/src/components/form/date-picker.register.tsx +++ /dev/null @@ -1,29 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { OpsDatePicker } from "./date-picker"; - -interface Props { - name: string; - label: string; - value?: Date; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; -} - -export const OpsDatePickerRegister = (props: Props) => { - const { name, register, setValue, config, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value || null, - ...config, - }); - - return ( - setValue(inputRegister.name, value)} - /> - ); -}; diff --git a/frontend/src/components/form/input.register.tsx b/frontend/src/components/form/input.register.tsx deleted file mode 100644 index e294ba10c8..0000000000 --- a/frontend/src/components/form/input.register.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { OpsInput } from "./input"; - -interface Props { - inputType: string; - name: string; - label: string; - value: string; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; -} - -export const OpsInputRegister = (props: Props) => { - const { name, register, setValue, config, inputType, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value || "", - ...config, - }); - - return ( - setValue(inputRegister.name, value)} - /> - ); -}; diff --git a/frontend/src/components/form/input.tsx b/frontend/src/components/form/input.tsx index 4760a51bd4..c6e77e453d 100644 --- a/frontend/src/components/form/input.tsx +++ b/frontend/src/components/form/input.tsx @@ -35,7 +35,7 @@ export const OpsInput = (props: OpsInputProps) => { id={label} type={props.type} onChange={onChange} - defaultValue={value ?? ""} + value={value} className={classNames(className ?? "")} error={error} disabled={isProtected || disabled} diff --git a/frontend/src/components/form/list.register.tsx b/frontend/src/components/form/list.register.tsx deleted file mode 100644 index 5ae813b4d7..0000000000 --- a/frontend/src/components/form/list.register.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import { useState } from "react"; -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { SelectOption } from "../inputs/select"; -import OpsList from "./list"; - -interface Props { - name: string; - label: string; - value: (string | SelectOption)[]; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - disabled?: boolean; -} - -export const OpsListRegister = (props: Props) => { - const { name, value, register, setValue, config, isProtected, ...propsToPass } = props; - - const multiSelectRegister = register(name, { - value: value ?? "", - ...config, - }); - - const [selectedOptions, setSelectedOptions] = useState(value); - - return ( - { - setSelectedOptions(newValue as SelectOption[]); - setValue(multiSelectRegister.name, newValue); - }} - isProtected={isProtected || props.disabled} - /> - ); -}; diff --git a/frontend/src/components/form/multi-select.register.tsx b/frontend/src/components/form/multi-select.register.tsx deleted file mode 100644 index 63fb14f1bc..0000000000 --- a/frontend/src/components/form/multi-select.register.tsx +++ /dev/null @@ -1,37 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { SelectOption } from "../inputs/select"; -import OpsMultiSelect from "./multi-select"; - -interface Props { - name: string; - label: string; - value: SelectOption[]; - options: SelectOption[]; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - disabled?: boolean; -} - -export const OpsMultiSelectRegister = (props: Props) => { - const { name, value, register, setValue, config, isProtected, ...propsToPass } = props; - - const multiSelectRegister = register(name, { - value: value ?? "", - ...config, - }); - - return ( - { - setValue(multiSelectRegister.name, newValue); - }} - isProtected={isProtected || props.disabled} - /> - ); -}; diff --git a/frontend/src/components/form/multi-select.tsx b/frontend/src/components/form/multi-select.tsx index c3bba650c8..7a3af02810 100644 --- a/frontend/src/components/form/multi-select.tsx +++ b/frontend/src/components/form/multi-select.tsx @@ -4,27 +4,19 @@ import { Select, SelectOption } from "../inputs/select"; type OpsMultiSelectProps = { label: string; value: SelectOption[]; - options: SelectOption[]; + options?: SelectOption[]; onChange: (value: SelectOption[]) => void; error?: FormFieldError; isProtected?: boolean; }; export default function OpsMultiSelect(props: OpsMultiSelectProps) { - const { value, options, onChange, label, error, isProtected, ...propsToPass } = props; + const { label, isProtected, ...propsToPass } = props; return ( <> - ); } diff --git a/frontend/src/components/form/select-2-step.register.tsx b/frontend/src/components/form/select-2-step.register.tsx deleted file mode 100644 index df49722a9b..0000000000 --- a/frontend/src/components/form/select-2-step.register.tsx +++ /dev/null @@ -1,37 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { SelectOption } from "../inputs/select"; -import { OpsSelect2Step, iTwoStepDropdownData } from "./select-2-step"; - -interface Props { - label: string; - options: SelectOption[]; - value: string | iTwoStepDropdownData; - name: string; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; -} - -export const OpsSelect2StepRegister = (props: Props) => { - const { name, register, setValue, config, isProtected, disabled, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value?.child ?? "", - ...config, - }); - - return ( - { - setValue(inputRegister.name, option.child); - }} - isProtected={isProtected || disabled} - /> - ); -}; diff --git a/frontend/src/components/form/select.register.tsx b/frontend/src/components/form/select.register.tsx deleted file mode 100644 index ebeedbfc33..0000000000 --- a/frontend/src/components/form/select.register.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { SelectOption } from "../inputs/select"; -import { OpsSelect } from "./select"; - -type SelectRegisterProps = { - name: string; - value: string; - label: string; - options: SelectOption[]; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; - dropdown?: boolean; - enum?: boolean; -}; - -export const OpsSelectRegister = (props: SelectRegisterProps) => { - const { register, setValue, config, isProtected, disabled, name, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value ?? "", - ...config, - }); - - return ( - setValue(inputRegister.name, item)} - isProtected={isProtected || disabled} - /> - ); -}; diff --git a/frontend/src/components/form/select.tsx b/frontend/src/components/form/select.tsx index 022b2430f9..049df78c13 100644 --- a/frontend/src/components/form/select.tsx +++ b/frontend/src/components/form/select.tsx @@ -5,11 +5,13 @@ import { Select, SelectOption } from "../inputs/select"; type SelectProps = { label: string; value?: string | number | null; - options: Array; + options?: Array; onChange: (value: string | number) => void; error?: FormFieldError; isProtected?: boolean; isOptional?: boolean; + dropdown?: boolean; + enum?: boolean; }; export const OpsSelect = (props: SelectProps) => { diff --git a/frontend/src/components/form/switch.register.tsx b/frontend/src/components/form/switch.register.tsx deleted file mode 100644 index e339f97a2b..0000000000 --- a/frontend/src/components/form/switch.register.tsx +++ /dev/null @@ -1,39 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import OpsSwitch from "./switch"; - -interface Props { - name: string; - label: string; - value: boolean; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - onChange?: Function; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; -} - -export const OpsSwitchRegister = (props: Props) => { - const { name, register, setValue, config, onChange, isProtected, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value ?? "", - ...config, - }); - - return ( - { - if (onChange) { - onChange(value); - } - setValue(inputRegister.name, value); - }} - isProtected={isProtected || props.disabled} - /> - ); -}; diff --git a/frontend/src/components/form/textarea.register.tsx b/frontend/src/components/form/textarea.register.tsx deleted file mode 100644 index 67f6478f78..0000000000 --- a/frontend/src/components/form/textarea.register.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import { FieldValues, RegisterOptions, UseFormRegister, UseFormSetValue } from "react-hook-form"; -import { FormFieldError } from "../../screens/edit-form-hook/form"; -import { OpsTextarea } from "./textarea"; - -interface Props { - name: string; - label: string; - value: string; - register: UseFormRegister; - config?: RegisterOptions | undefined; - setValue: UseFormSetValue; - error?: FormFieldError; - isProtected?: boolean; - isOptional?: boolean; - disabled?: boolean; -} - -export const OpsTextareaRegister = (props: Props) => { - const { name, register, setValue, config, ...propsToPass } = props; - - const inputRegister = register(name, { - value: props.value ?? "", - ...config, - }); - - return setValue(inputRegister.name, value)} />; -}; diff --git a/frontend/src/components/form/textarea.tsx b/frontend/src/components/form/textarea.tsx index 66c8d3564f..43dbfed4ed 100644 --- a/frontend/src/components/form/textarea.tsx +++ b/frontend/src/components/form/textarea.tsx @@ -39,7 +39,7 @@ export const OpsTextarea = (props: OpsInputProps) => { { const { value, onChange } = props; @@ -39,7 +39,12 @@ export const ColorPicker = (props: any) => { return (
- + handleInputChange(e.target.value)} + className="flex-1" + />
{ /> {propsToPass.value && !hideEmpty && ( -
{removeButton}
+
+ {removeButton} +
)} {error?.message && ( @@ -72,7 +78,7 @@ export const Input = forwardRef((props: any, ref: any) => { )} {type === "password" && ( -
{displayButton}
+
{displayButton}
)}
); diff --git a/frontend/src/components/inputs/multiple-input.tsx b/frontend/src/components/inputs/multiple-input.tsx index c95db18f8b..a4857e7810 100644 --- a/frontend/src/components/inputs/multiple-input.tsx +++ b/frontend/src/components/inputs/multiple-input.tsx @@ -66,6 +66,7 @@ export const MultipleInput = React.forwardRef((props: MultipleInputProps, ref: a value={item} onDelete={handleDelete} className="mt-2" + data-testid="multi-select-input-badge" disabled={disabled}> {typeof item === "object" ? item.name : item} diff --git a/frontend/src/components/inputs/select.tsx b/frontend/src/components/inputs/select.tsx index 095c510b9d..bed8155c9f 100644 --- a/frontend/src/components/inputs/select.tsx +++ b/frontend/src/components/inputs/select.tsx @@ -31,6 +31,7 @@ import ModalDelete from "../modals/modal-delete"; import { Input } from "./input"; import { MultipleInput } from "./multiple-input"; +import { getObjectDisplayLabel } from "../../graphql/queries/objects/getObjectDisplayLabel"; import LoadingScreen from "../../screens/loading-screen/loading-screen"; import { getOptionsFromRelationship } from "../../utils/getSchemaObjectColumns"; @@ -68,6 +69,9 @@ type SelectProps = { isInherited?: boolean; }; +// Needed for async options to avoid duplicates issues +const comparedOptions = (a: SelectOption, b: SelectOption) => a?.id === b?.id; + export const Select = (props: SelectProps) => { const { options, @@ -109,8 +113,8 @@ export const Select = (props: SelectProps) => { const [localOptions, setLocalOptions] = useState(options); const [selectedOption, setSelectedOption] = useState( multiple - ? options.filter((option) => value.includes(option.id)) - : options?.find((option) => option?.id === value || option.name === value) + ? localOptions.filter((option) => value?.includes(option.id)) + : localOptions?.find((option) => option?.id === value || option.name === value) ); // Query to fetch options only if a peer is defined @@ -123,6 +127,14 @@ export const Select = (props: SelectProps) => { const [fetchOptions, { loading, data }] = useLazyQuery(optionsQuery); + const labelQueryString = peer ? getObjectDisplayLabel({ kind: peer }) : "query { ok }"; + + const labelQuery = gql` + ${labelQueryString} + `; + + const [fetchLabel] = useLazyQuery(labelQuery); + const optionsResult = peer && data ? data[peer].edges.map((edge: any) => edge.node) : []; const optionsData = getOptionsFromRelationship(optionsResult, schemaList); @@ -147,7 +159,7 @@ export const Select = (props: SelectProps) => { option?.name?.toString().toLowerCase().includes(query.toLowerCase()) ); - const finalOptions = [...(preventEmpty ? [] : [emptyOption]), ...filteredOptions]; + const finalOptions = [...(preventEmpty ? [] : [emptyOption]), ...(filteredOptions || [])]; const textColor = typeof selectedOption === "object" && !Array.isArray(selectedOption) @@ -683,7 +695,7 @@ export const Select = (props: SelectProps) => { return selectedOption?.name; } - return selectedOption; + return selectedOption ?? ""; }; const getInputStyle = () => { @@ -697,30 +709,72 @@ export const Select = (props: SelectProps) => { return {}; }; + // Fetch option display label if not defined by current selected option + const handleFetchLabel = async () => { + if (!selectedOption) return; + + if (peer && !multiple && !Array.isArray(selectedOption) && !selectedOption?.name) { + const { data } = await fetchLabel({ variables: { ids: [selectedOption?.id] } }); + + const label = data[peer]?.edges[0]?.node?.display_label; + + const newSelectedOption = { + ...selectedOption, + name: label ?? "Unkown", + } as SelectOption; + + setSelectedOption(newSelectedOption); + + return; + } + + if (!Array.isArray(selectedOption)) return; + + // Get ids only + const ids = selectedOption.map((o) => o.id) ?? []; + + // Get defined names only + const names = selectedOption.map((o) => o.name).filter(Boolean) ?? []; + + // If ids and names have !== lengths, then some names are not defined + if (peer && multiple && ids.length && ids.length !== names.length) { + const { data } = await fetchLabel({ variables: { ids } }); + + const newSelectedOptions = data[peer]?.edges.map((edge) => ({ + name: edge.node.display_label, + id: edge.node.id, + })); + + setSelectedOption(newSelectedOptions); + } + }; + + useEffect(() => { + // Avoid fetching labels if ther eis no value + if (!value) return; + + if (Array.isArray(value) && !value.length) return; + + handleFetchLabel(); + }, [value]); + + // If options from query are updated useEffect(() => { + if (!optionsData?.length) return; + setLocalOptions(optionsData); }, [optionsData?.length]); + // If options from parent are updated useEffect(() => { setLocalOptions(options); }, [options?.length]); - useEffect(() => { - const newOption = multiple - ? options.filter((option) => value.includes(option.id)) - : options?.find((option) => option?.id === value || option.name === value); - - setSelectedOption(newOption ?? ""); - }, [value]); - - // Needed for async options to avoid duplicates issues - const comparedOptions = (a: SelectOption, b: SelectOption) => a?.id === b?.id; - return (
{ + const docFullUrl = documentationUrl + ? documentationUrl.startsWith("http") + ? INFRAHUB_DOC_LOCAL + : `${INFRAHUB_DOC_LOCAL}${documentationUrl}` + : ""; + + return ( + + + + + + + + + + Documentation + + + + + + + + Schema + + + + + ); +}; diff --git a/frontend/src/components/search/search-actions.tsx b/frontend/src/components/search/search-actions.tsx index 21e02da68e..ae7162a31d 100644 --- a/frontend/src/components/search/search-actions.tsx +++ b/frontend/src/components/search/search-actions.tsx @@ -1,34 +1,38 @@ -import { useMemo } from "react"; import { useAtomValue } from "jotai"; -import { menuAtom } from "../../state/atoms/schema.atom"; -import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-modal"; +import { + genericsState, + IModelSchema, + menuFlatAtom, + schemaState, +} from "../../state/atoms/schema.atom"; +import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-anywhere"; import { MenuItem } from "../../screens/layout/sidebar/desktop-menu"; import { constructPath } from "../../utils/fetch"; import { Icon } from "@iconify-icon/react"; +import { Badge } from "../ui/badge"; type SearchProps = { query: string; }; export const SearchActions = ({ query }: SearchProps) => { - const menu = useAtomValue(menuAtom); - const menuItems = useMemo(() => { - const flattenMenuItems = (menuItems: MenuItem[]): MenuItem[] => { - return menuItems.reduce((acc, menuItem) => { - if (menuItem.children.length === 0) { - return [...acc, menuItem]; - } - - return [...acc, ...flattenMenuItems(menuItem.children)]; - }, []); - }; - - return flattenMenuItems(menu); - }, [menu.length]); - - const results = menuItems.filter(({ title }) => - title.toLowerCase().includes(query.toLowerCase()) + const nodes = useAtomValue(schemaState); + const generics = useAtomValue(genericsState); + const models: IModelSchema[] = [...nodes, ...generics]; + + const menuItems = useAtomValue(menuFlatAtom); + + const queryLowerCased = query.toLowerCase(); + const resultsMenu = menuItems.filter(({ title }) => + title.toLowerCase().includes(queryLowerCased) + ); + const resultsSchema = models.filter( + ({ kind, label, description }) => + kind?.toLowerCase().includes(queryLowerCased) || + label?.toLowerCase().includes(queryLowerCased) || + description?.toLowerCase().includes(queryLowerCased) ); + const results = [...resultsMenu, ...resultsSchema]; if (results.length === 0) return null; const firstThreeMatches = results.slice(0, 3); @@ -36,9 +40,13 @@ export const SearchActions = ({ query }: SearchProps) => { Go to - {firstThreeMatches.map((menuItem) => ( - - ))} + {firstThreeMatches.map((result) => { + return "namespace" in result ? ( + + ) : ( + + ); + })} ); }; @@ -50,9 +58,26 @@ type ActionOnMenuProps = { const ActionOnMenu = ({ menuItem }: ActionOnMenuProps) => { return ( - {menuItem.title} + Menu + + {menuItem.title} + + ); +}; + +const ActionOnSchema = ({ model }: { model: IModelSchema }) => { + const { kind, label, name } = model; + + return ( + + Schema - View + + + {model.namespace} + + {label || name || kind} + ); }; diff --git a/frontend/src/components/search/search-modal.tsx b/frontend/src/components/search/search-anywhere.tsx similarity index 83% rename from frontend/src/components/search/search-modal.tsx rename to frontend/src/components/search/search-anywhere.tsx index 239ccd0e65..7b791fe0c4 100644 --- a/frontend/src/components/search/search-modal.tsx +++ b/frontend/src/components/search/search-anywhere.tsx @@ -1,37 +1,48 @@ import { Combobox, Dialog, Transition } from "@headlessui/react"; -import { ChangeEventHandler, forwardRef, Fragment, ReactNode, useEffect, useState } from "react"; +import { + ChangeEventHandler, + forwardRef, + Fragment, + MouseEventHandler, + ReactNode, + useEffect, + useState, +} from "react"; import { Icon } from "@iconify-icon/react"; import { Link, LinkProps, useNavigate } from "react-router-dom"; import { classNames } from "../../utils/common"; import { SearchActions } from "./search-actions"; import { SearchNodes } from "./search-nodes"; import { SearchDocs } from "./search-docs"; +import Kbd from "../ui/kbd"; +import { Input } from "../ui/input"; type SearchInputProps = { className?: string; value?: string; onChange?: ChangeEventHandler; + onClick?: MouseEventHandler; }; -const SearchInput = ({ value, onChange, className = "" }: SearchInputProps) => { +const SearchTrigger = ({ value, onChange, onClick, className = "" }: SearchInputProps) => { return ( -
+
); }; @@ -39,7 +50,7 @@ const SearchInput = ({ value, onChange, className = "" }: SearchInputProps) => { type SearchModalProps = { className?: string; }; -export function SearchModal({ className = "" }: SearchModalProps) { +export function SearchAnywhere({ className = "" }: SearchModalProps) { let [isOpen, setIsOpen] = useState(false); function closeModal() { @@ -63,8 +74,13 @@ export function SearchModal({ className = "" }: SearchModalProps) { return ( <> -
- +
+
@@ -90,7 +106,7 @@ export function SearchModal({ className = "" }: SearchModalProps) { leave="ease-in duration-200" leaveFrom="opacity-100 scale-100" leaveTo="opacity-0 scale-95"> - +
@@ -104,7 +120,7 @@ type SearchAnywhereProps = { onSelection: (url?: string) => void; }; -const SearchAnywhere = forwardRef( +const SearchAnywhereDialog = forwardRef( ({ onSelection }, forwardedRef) => { const navigate = useNavigate(); const [query, setQuery] = useState(""); diff --git a/frontend/src/components/search/search-bar.tsx b/frontend/src/components/search/search-bar.tsx deleted file mode 100644 index 4f67903758..0000000000 --- a/frontend/src/components/search/search-bar.tsx +++ /dev/null @@ -1,162 +0,0 @@ -import { Icon } from "@iconify-icon/react"; -import { useAtomValue } from "jotai"; -import { useEffect, useState } from "react"; -import { useLocation } from "react-router-dom"; -import { NODE_OBJECT } from "../../config/constants"; -import graphqlClient from "../../graphql/graphqlClientApollo"; -import { SEARCH } from "../../graphql/queries/objects/search"; -import LoadingScreen from "../../screens/loading-screen/loading-screen"; -import { currentBranchAtom } from "../../state/atoms/branches.atom"; -import { datetimeAtom } from "../../state/atoms/time.atom"; -import { classNames, debounce } from "../../utils/common"; -import { Background } from "../display/background"; -import { POPOVER_SIZE, PopOver } from "../display/popover"; -import { Input } from "../inputs/input"; -import Transition from "../utils/transition"; -import { SearchResults } from "./search-results"; - -type tSearchInput = { - onChange: Function; - className?: string; - containerClassName?: string; - loading?: boolean; - placeholder?: string; - testId?: string; -}; - -export const SearchInput = (props: tSearchInput) => { - const { - className = "", - containerClassName = "", - loading, - onChange, - placeholder = "Search", - testId = "search-bar", - } = props; - - const [search, setSearch] = useState(""); - - const handleChange = (value: string) => { - setSearch(value); - onChange(value); - }; - - const handleFocus = () => { - if (!search) return; - - // Will reopen the results for the current search - onChange(search, true); - }; - - return ( -
- - - {loading && } - - {!loading && } -
- ); -}; - -export const SearchBar = () => { - const [isLoading, setIsLoading] = useState(false); - const [results, setResults] = useState({}); - const [search, setSearch] = useState(""); - const location = useLocation(); - - const branch = useAtomValue(currentBranchAtom); - const date = useAtomValue(datetimeAtom); - - const handleSearch = async (newValue: string = "") => { - const cleanedValue = newValue.trim(); - - try { - // Set search to set open / close if empty - setSearch(cleanedValue); - - if (!cleanedValue) return; - - setIsLoading(true); - - const { data } = await graphqlClient.query({ - query: SEARCH, - variables: { - search: cleanedValue, - }, - context: { - date, - branch: branch?.name, - }, - }); - - setIsLoading(false); - - if (!data?.[NODE_OBJECT]) return; - - setResults(data[NODE_OBJECT]); - } catch (e) { - setIsLoading(false); - } - }; - - // Debounce the query - const debounceHandleSearch = debounce(handleSearch); - - const handleChange = (value: string, immediate?: boolean) => { - // If immediate, triggers the search - if (immediate) return handleSearch(value); - - // Debounces the search - return debounceHandleSearch(value); - }; - - const handleClick = () => { - // Closes the panel on background click - setResults({}); - }; - - useEffect(() => { - // Close the panel on route change (when clicking an item) - setResults({}); - }, [location]); - - // Open if there is a search and a result (even if empty) - const isOpen = !!search && !!results?.edges; - - return ( -
- - - - -
- -
- - - - {() => } - - -
- ); -}; diff --git a/frontend/src/components/search/search-docs.tsx b/frontend/src/components/search/search-docs.tsx index d841a0e199..f585b38b1c 100644 --- a/frontend/src/components/search/search-docs.tsx +++ b/frontend/src/components/search/search-docs.tsx @@ -1,6 +1,6 @@ -import { useEffect, useState } from "react"; +import { Fragment, useEffect, useState } from "react"; import { fetchUrl } from "../../utils/fetch"; -import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-modal"; +import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-anywhere"; import { CONFIG, INFRAHUB_API_SERVER_URL } from "../../config/config"; import { Icon } from "@iconify-icon/react"; import { useDebounce } from "../../hooks/useDebounce"; @@ -68,10 +68,10 @@ const DocsResults = ({ breadcrumb, title, url }: SearchDocsResultProps) => { return ( {breadcrumb.slice(1).map((b) => ( - <> + {b} - + ))} {title} diff --git a/frontend/src/components/search/search-nodes.tsx b/frontend/src/components/search/search-nodes.tsx index d616f14748..c5d10fe607 100644 --- a/frontend/src/components/search/search-nodes.tsx +++ b/frontend/src/components/search/search-nodes.tsx @@ -12,7 +12,7 @@ import { constructPath } from "../../utils/fetch"; import { getObjectDetailsUrl } from "../../utils/objects"; import { format } from "date-fns"; import { Skeleton } from "../skeleton"; -import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-modal"; +import { SearchGroup, SearchGroupTitle, SearchResultItem } from "./search-anywhere"; import { useDebounce } from "../../hooks/useDebounce"; type SearchProps = { @@ -67,7 +67,7 @@ const NodesOptions = ({ node }: NodesOptionsProps) => { const schemaData = generic || schema; - const columns = getSchemaObjectColumns(schemaData, true, 7); + const columns = getSchemaObjectColumns({ schema: schemaData, forListView: true, limit: 7 }); const queryString = schemaData ? getObjectDetailsPaginated({ diff --git a/frontend/src/components/search/search-result-item.tsx b/frontend/src/components/search/search-result-item.tsx deleted file mode 100644 index 1bf9ddf165..0000000000 --- a/frontend/src/components/search/search-result-item.tsx +++ /dev/null @@ -1,83 +0,0 @@ -import { gql } from "@apollo/client"; -import { useAtomValue } from "jotai"; -import { Link as RouterLink } from "react-router-dom"; -import { getObjectDetailsPaginated } from "../../graphql/queries/objects/getObjectDetails"; -import useQuery from "../../hooks/useQuery"; -import LoadingScreen from "../../screens/loading-screen/loading-screen"; -import { genericsState, schemaState } from "../../state/atoms/schema.atom"; -import { schemaKindNameState } from "../../state/atoms/schemaKindName.atom"; -import { constructPath } from "../../utils/fetch"; -import { getObjectItemDisplayValue } from "../../utils/getObjectItemDisplayValue"; -import { getSchemaObjectColumns } from "../../utils/getSchemaObjectColumns"; -import { getObjectDetailsUrl } from "../../utils/objects"; -import { Badge } from "../display/badge"; -import { Circle } from "../display/circle"; - -type tSearchResultItem = { - item: any; -}; - -export const SearchResultItem = (props: tSearchResultItem) => { - const { item } = props; - - const schemaList = useAtomValue(schemaState); - const schemaKindName = useAtomValue(schemaKindNameState); - const genericList = useAtomValue(genericsState); - - const schema = schemaList.find((s) => s.kind === item.__typename); - const generic = genericList.find((s) => s.kind === item.__typename); - - const schemaData = generic || schema; - - const columns = getSchemaObjectColumns(schemaData, true, 7); - - const queryString = schemaData - ? getObjectDetailsPaginated({ - ...schemaData, - columns, - objectid: item.id, - }) - : // Empty query to make the gql parsing work - // TODO: Find another solution for queries while loading schema - "query { ok }"; - - const query = gql` - ${queryString} - `; - - // TODO: Find a way to avoid querying object details if we are on a tab - const { loading, data } = useQuery(query, { skip: !schemaData }); - - if (loading) return ; - - const objectDetailsData = schemaData && data && data[item.__typename]?.edges[0]?.node; - - if (!objectDetailsData) return
No data found for this object
; - - return ( -
- -
-
- - - {schemaKindName[item.__typename]} - -
{objectDetailsData?.name?.value}
-
- -
- {columns.map((column: any, index: number) => ( -
-
{column.label}
- {getObjectItemDisplayValue(objectDetailsData, column, schemaKindName)} -
- ))} -
-
-
-
- ); -}; diff --git a/frontend/src/components/search/search-results.tsx b/frontend/src/components/search/search-results.tsx deleted file mode 100644 index 0480950706..0000000000 --- a/frontend/src/components/search/search-results.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import { useAtomValue } from "jotai"; -import { schemaKindNameState } from "../../state/atoms/schemaKindName.atom"; -import { constructPath } from "../../utils/fetch"; -import { Link } from "../utils/link"; -import { SearchResultItem } from "./search-result-item"; - -type tSearchResults = { - results?: any; -}; - -export const SearchResults = (props: tSearchResults) => { - const { results = {} } = props; - - const schemaKindName = useAtomValue(schemaKindNameState); - - const { count, edges = [] } = results; - - if (count === 0) { - return
No results found
; - } - - const sortedResults: { [id: string]: any } = edges - .map((item: any) => item.node) - .reduce((acc: any, node: any) => { - return { - ...acc, - [node.__typename]: [...(acc[node.__typename] ?? []), node], - }; - }, {}); - - return ( -
- {Object.entries(sortedResults).map(([kind, nodes]) => ( - <> -
- {nodes.map((node: any, index: number) => ( - - ))} -
- -
- - Link to all {schemaKindName[kind]}s - {/* */} - -
- - ))} -
- ); -}; diff --git a/frontend/src/components/skeleton.tsx b/frontend/src/components/skeleton.tsx index ed13fc1fc5..60a9674870 100644 --- a/frontend/src/components/skeleton.tsx +++ b/frontend/src/components/skeleton.tsx @@ -5,7 +5,7 @@ export const Skeleton = ({ className = "", ...props }: HTMLAttributes; + tooltipFormatter?: (value: any) => React.ReactNode; + legendFormatter?: (name: string, value?: any) => React.ReactNode; +}; +export const PieChart = ({ data, tooltipFormatter, legendFormatter }: PieChartProps) => { + return ( + + + + + ( + + {legendFormatter ? legendFormatter(name, payload?.value) : name} + + )} + /> + + + ); +}; diff --git a/frontend/src/components/stats/progress-bar-chart.tsx b/frontend/src/components/stats/progress-bar-chart.tsx new file mode 100644 index 0000000000..d559a61972 --- /dev/null +++ b/frontend/src/components/stats/progress-bar-chart.tsx @@ -0,0 +1,38 @@ +import { HTMLAttributes } from "react"; +import * as ProgressPrimitive from "@radix-ui/react-progress"; +import { classNames } from "../../utils/common"; + +export const ProgressBar = ({ className, value, ...props }: ProgressPrimitive.ProgressProps) => ( + + + +); + +const sanitizeProgressBarValue = (value: number) => { + if (isNaN(value)) return 0; + + if (value > 100) return 100; + + return value; +}; + +interface ProgressBarChartProps extends HTMLAttributes { + value: number; +} + +export default function ProgressBarChart({ value, className, ...props }: ProgressBarChartProps) { + return ( +
+ + {value}% +
+ ); +} diff --git a/frontend/src/components/table/property-list.tsx b/frontend/src/components/table/property-list.tsx new file mode 100644 index 0000000000..c1a11e2fba --- /dev/null +++ b/frontend/src/components/table/property-list.tsx @@ -0,0 +1,66 @@ +import React from "react"; +import { classNames } from "../../utils/common"; + +export type Property = { + name: string; + value?: React.ReactNode; +}; + +export interface PropertyListProps extends React.HTMLAttributes { + properties: Array; + bodyClassName?: string; + labelClassName?: string; + valueClassName?: string; +} + +export interface PropertyRowProps extends React.HTMLAttributes { + data: Property; + labelClassName?: string; + valueClassName?: string; +} + +export const PropertyRow = ({ + data, + labelClassName, + valueClassName, + ...props +}: PropertyRowProps) => { + return ( + + + {data.name} + + + {data.value ?? "-"} + + + ); +}; + +export const PropertyList = ({ + properties, + className, + bodyClassName, + labelClassName, + valueClassName, + ...props +}: PropertyListProps) => { + return ( + + + {properties.map((property) => { + return ( + + ); + })} + +
+ ); +}; diff --git a/frontend/src/components/table/table.tsx b/frontend/src/components/table/table.tsx index f9ec6a4eb4..9202051a94 100644 --- a/frontend/src/components/table/table.tsx +++ b/frontend/src/components/table/table.tsx @@ -3,7 +3,7 @@ import { Link } from "react-router-dom"; import { useAuth } from "../../hooks/useAuth"; import NoDataFound from "../../screens/no-data-found/no-data-found"; import { classNames } from "../../utils/common"; -import { BUTTON_TYPES, Button } from "../buttons/button"; +import { Button } from "../buttons/button-primitive"; export type tColumn = { name: string; @@ -20,10 +20,11 @@ type tTableProps = { rows: tRow[]; constructLink?: Function; onDelete?: Function; + onUpdate?: Function; }; export const Table = (props: tTableProps) => { - const { columns, rows, onDelete } = props; + const { columns, rows, onDelete, onUpdate } = props; const auth = useAuth(); @@ -37,7 +38,7 @@ export const Table = (props: tTableProps) => { {column.label} ))} - {onDelete && } + {(onUpdate || onDelete) && } @@ -56,29 +57,41 @@ export const Table = (props: tTableProps) => { -
{row.values[column.name]}
+ {row.values[column.name] ?? "-"} )} {!row.link && (
- {row.values[column.name]} + {row.values[column.name] ?? "-"}
)} ))} - {onDelete && ( - - + {(onUpdate || onDelete) && ( + + {onUpdate && ( + + )} + + {onDelete && ( + + )} )} diff --git a/frontend/src/components/tabs.tsx b/frontend/src/components/tabs.tsx index aa19fee738..ecfb6431a3 100644 --- a/frontend/src/components/tabs.tsx +++ b/frontend/src/components/tabs.tsx @@ -24,7 +24,7 @@ export const Tabs = (props: TabsProps) => { const handleClick = (tab: Tab, index: number) => { if (tab.onClick) { - tab.onClick(); + return tab.onClick(); } setQspTab(index === 0 ? undefined : tab.name); diff --git a/frontend/src/components/time-selector.tsx b/frontend/src/components/time-selector.tsx index c317f1fe8f..b060247fe2 100644 --- a/frontend/src/components/time-selector.tsx +++ b/frontend/src/components/time-selector.tsx @@ -8,6 +8,7 @@ import { DateTimeParam, useQueryParam } from "use-query-params"; import { QSP } from "../config/qsp"; import { datetimeAtom } from "../state/atoms/time.atom"; import { classNames } from "../utils/common"; +import { Button } from "./buttons/button-primitive"; export const TimeFrameSelector = () => { const [date, setDate] = useAtom(datetimeAtom); @@ -37,18 +38,18 @@ export const TimeFrameSelector = () => { data-testid="timeframe-selector"> + enter="linear duration-300" + enterFrom="w-0 opacity-0" + enterTo="w-[174px] opacity-100" + leave="linear duration-300" + leaveFrom="w-[174px] opacity-100" + leaveTo="w-0 opacity-0" + className="flex items-center"> -
+
Current view time: {date && {format(date, "PP | H:mm")}}
@@ -75,12 +76,11 @@ export const TimeFrameSelector = () => { const ButtonStyled = forwardRef>( ({ className, ...props }, ref) => ( -
} @@ -463,6 +431,6 @@ export default function ArtifactsDetails() { row={objectDetailsData} /> -
+ ); } diff --git a/frontend/src/screens/branches/branch-details.tsx b/frontend/src/screens/branches/branch-details.tsx index a3293d65fb..514758642d 100644 --- a/frontend/src/screens/branches/branch-details.tsx +++ b/frontend/src/screens/branches/branch-details.tsx @@ -140,7 +140,7 @@ export const BranchDetails = () => { onDelete={async () => { await branchAction({ successMessage: "Branch deleted successfully!", - errorMessage: "An error occured while deleting the branch", + errorMessage: "An error occurred while deleting the branch", request: deleteBranch, options: { name: branch.name, @@ -205,7 +205,7 @@ export const BranchDetails = () => { onClick={() => branchAction({ successMessage: "Branch merged successfully!", - errorMessage: "An error occured while merging the branch", + errorMessage: "An error occurred while merging the branch", request: mergeBranch, options: { name: branch.name, @@ -231,7 +231,7 @@ export const BranchDetails = () => { onClick={() => branchAction({ successMessage: "Branch rebased successfully!", - errorMessage: "An error occured while rebasing the branch", + errorMessage: "An error occurred while rebasing the branch", request: rebaseBranch, options: { name: branch.name, @@ -248,7 +248,7 @@ export const BranchDetails = () => { onClick={() => branchAction({ successMessage: "The branch is valid!", - errorMessage: "An error occured while validating the branch", + errorMessage: "An error occurred while validating the branch", request: validateBranch, options: { name: branch.name, diff --git a/frontend/src/screens/branches/branche-item-details.tsx b/frontend/src/screens/branches/branch-item-details.tsx similarity index 54% rename from frontend/src/screens/branches/branche-item-details.tsx rename to frontend/src/screens/branches/branch-item-details.tsx index b64dae1bbd..2e59bb44c7 100644 --- a/frontend/src/screens/branches/branche-item-details.tsx +++ b/frontend/src/screens/branches/branch-item-details.tsx @@ -1,5 +1,4 @@ -import { ChevronRightIcon } from "@heroicons/react/24/outline"; -import { useNavigate, useParams } from "react-router-dom"; +import { Link, useParams } from "react-router-dom"; import { StringParam, useQueryParam } from "use-query-params"; import { TabsButtons } from "../../components/buttons/tabs-buttons"; import { QSP } from "../../config/qsp"; @@ -8,6 +7,7 @@ import { constructPath } from "../../utils/fetch"; import { Diff } from "../diff/diff"; import { BranchDetails } from "./branch-details"; import Content from "../layout/content"; +import { Icon } from "@iconify-icon/react"; export const BRANCH_TABS = { DETAILS: "details", @@ -36,35 +36,30 @@ const renderContent = (tab: string | null | undefined) => { } }; -export const BrancheItemDetails = () => { +const BranchItemDetails = () => { const { branchname } = useParams(); const [qspTab] = useQueryParam(QSP.BRANCH_TAB, StringParam); - const navigate = useNavigate(); useTitle(`${branchname} details`); - const branchesPath = constructPath("/branches"); - return ( - <> -
-
-
navigate(branchesPath)} - className="text-base font-semibold leading-6 text-gray-900 cursor-pointer hover:underline"> - Branches + + + + Branches + + +

{branchname}

-
-
+ } + /> - {renderContent(qspTab)} - +
{renderContent(qspTab)}
+ ); }; + +export default BranchItemDetails; diff --git a/frontend/src/screens/branches/branches-items.tsx b/frontend/src/screens/branches/branches-items.tsx index 1e91c46738..dfb22f5dea 100644 --- a/frontend/src/screens/branches/branches-items.tsx +++ b/frontend/src/screens/branches/branches-items.tsx @@ -2,7 +2,6 @@ import { Icon } from "@iconify-icon/react"; import { useAtom } from "jotai"; import * as R from "ramda"; import { useNavigate } from "react-router-dom"; -import { Retry } from "../../components/buttons/retry"; import { DateDisplay } from "../../components/display/date-display"; import { Tooltip } from "../../components/utils/tooltip"; import GET_BRANCHES from "../../graphql/queries/branches/getBranches"; @@ -11,8 +10,9 @@ import { useTitle } from "../../hooks/useTitle"; import { branchesState } from "../../state/atoms/branches.atom"; import { constructPath } from "../../utils/fetch"; import Content from "../layout/content"; +import { Badge } from "../../components/ui/badge"; -export const BranchesItems = () => { +const BranchesItems = () => { const [storedBranches, setBranches] = useAtom(branchesState); const navigate = useNavigate(); useTitle("Branches list"); @@ -33,13 +33,16 @@ export const BranchesItems = () => { return ( -
-

Branches ({branches?.length})

- -
- -
-
+ +

Branches

+ {branches.length} +
+ } + isReloadLoading={loading} + reload={handleRefresh} + />
    { ); }; + +export default BranchesItems; diff --git a/frontend/src/screens/device-list/device-filter-bar-paginated.tsx b/frontend/src/screens/device-list/device-filter-bar-paginated.tsx index 58e73664b3..325a08a803 100644 --- a/frontend/src/screens/device-list/device-filter-bar-paginated.tsx +++ b/frontend/src/screens/device-list/device-filter-bar-paginated.tsx @@ -2,6 +2,7 @@ import { ChevronDownIcon, ChevronRightIcon } from "@heroicons/react/20/solid"; import { useState } from "react"; import { Button } from "../../components/buttons/button"; import { BADGE_TYPES, Badge } from "../../components/display/badge"; +import { SEARCH_FILTERS } from "../../config/constants"; import { iComboBoxFilter } from "../../graphql/variables/filtersVar"; import useFilters from "../../hooks/useFilters"; import DeviceFilterBarContent from "./device-filter-bar-content"; @@ -21,7 +22,7 @@ export default function DeviceFilterBar(props: any) { const [filters, setFilters] = useFilters(); const handleClickReset = () => { - setFilters(); + setFilters([]); }; const handleClickRemoveFilter = (filter: any) => { @@ -51,15 +52,17 @@ export default function DeviceFilterBar(props: any) {