diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000..0c880bd262
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,112 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# TypeScript v1 declaration files
+typings/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
+.env*
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+
+# Next.js build output
+.next
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and *not* Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+packages/db/fixtures/sqlite/db
+packages/db/test/fixtures/sqlite/db
+
+# packages/dashboard specific rules
+packages/db-dashboard/build/
+playwright-report
+.DS_Store
+.swp
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000000..1bfe7e4da5
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,227 @@
+name: Run tests
+
+on:
+ push:
+ branches:
+ - main
+ paths-ignore:
+ - 'docs/**'
+ - '**.md'
+ pull_request:
+ paths-ignore:
+ - 'docs/**'
+ - '**.md'
+
+# This allows a subsequently queued workflow run to interrupt previous runs
+concurrency:
+ group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
+ cancel-in-progress: true
+
+env:
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
+
+jobs:
+ setup-node_modules:
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 15
+ strategy:
+ matrix:
+ os: [ubuntu-latest, windows-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: 18
+ cache: 'pnpm'
+ - name: pnpm fetch
+ run: pnpm fetch
+
+ ci-cli:
+ needs: setup-node_modules
+ runs-on: ${{ matrix.os }}
+ timeout-minutes: 15
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest, windows-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: pnpm install
+ run: pnpm install
+ - name: Run test suite
+ run: cd packages/cli && pnpm test
+
+ ci-db-dashboard:
+ needs: setup-node_modules
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 5
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: pnpm install
+ run: pnpm install
+ - name: Builds the dashboard
+ run: npm run dashboard:build
+ - name: Run test suite Dashboard
+ run: cd packages/db-dashboard && pnpm test
+
+ ci-config:
+ needs: setup-node_modules
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 15
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest, windows-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: pnpm install
+ run: pnpm install --offline
+ - name: Run test suite config manager
+ run: cd packages/config && pnpm test
+
+ ci-db:
+ needs: setup-node_modules
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 15
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest, windows-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: ikalnytskyi/action-setup-postgres@v3
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: pnpm install
+ run: pnpm install --offline
+ - name: Builds the dashboard
+ run: pnpm run dashboard:build
+ - name: Run test suite core
+ run: cd packages/db-core && pnpm test
+ - name: Run test suite Platformatic DB
+ run: cd packages/db && pnpm test
+
+ ci-db-authorization:
+ needs: setup-node_modules
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 5
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: Start docker containers for testing
+ run: docker-compose up -d postgresql
+ - name: pnpm install
+ run: pnpm install --offline
+ - name: Run test suite
+ run: cd packages/db-authorization && pnpm test
+
+ ci-db-core:
+ needs: setup-node_modules
+ runs-on: ${{matrix.os}}
+ timeout-minutes: 5
+ strategy:
+ matrix:
+ db: [postgresql, mariadb, mysql, mysql8, sqlite]
+ node-version: [16, 18]
+ os: [ubuntu-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: Start docker containers for testing
+ run: docker-compose up -d ${{ matrix.db }}
+ if: ${{ matrix.db != 'sqlite' }}
+ - name: pnpm install
+ run: pnpm install --offline
+ - name: Wait for DB
+ run: sleep 10
+ if: ${{ matrix.db != 'sqlite' }}
+ - name: Run test suite sql-mapper
+ run: cd packages/sql-mapper && pnpm run test:typescript && pnpm run test:${{ matrix.db }}; cd ../..
+ - name: Run test suite sql-json-schema-mapper
+ run: cd packages/sql-json-schema-mapper && pnpm run test:${{ matrix.db }}; cd ../..
+ - name: Run test suite sql-openapi
+ run: cd packages/sql-openapi && pnpm run test:typescript && pnpm run test:${{ matrix.db }}; cd ../..
+ - name: Run test suite sql-graphql
+ run: cd packages/sql-graphql && pnpm run test:typescript && pnpm run test:${{ matrix.db }}; cd ..
+
+ ci-auth-login:
+ needs: setup-node_modules
+ runs-on: ${{ matrix.os }}
+ timeout-minutes: 5
+ strategy:
+ matrix:
+ node-version: [16, 18]
+ os: [ubuntu-latest, windows-latest]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: pnpm install
+ run: pnpm install --offline
+ - name: Run test suite
+ run: cd packages/authenticate && pnpm test; cd ../..
+
+ playwright-e2e:
+ needs: setup-node_modules
+ runs-on: ubuntu-latest
+ timeout-minutes: 5
+ steps:
+ - uses: actions/checkout@v3
+ - uses: pnpm/action-setup@v2.2.2
+ - uses: actions/setup-node@v3
+ with:
+ node-version: ${{ matrix.node-version }}
+ cache: 'pnpm'
+ - name: Start docker containers for testing
+ run: docker-compose up -d postgresql
+ - name: pnpm install
+ run: pnpm install --offline --frozen-lockfile
+ - name: Builds the dashboard
+ run: pnpm run dashboard:build
+ - name: Install Playwright browsers
+ run: cd packages/db-dashboard && pnpm exec playwright install
+ - name: Wait for DB
+ run: sleep 10
+ - name: Run Platformatic DB server and E2E tests
+ run: |
+ node ./packages/cli/cli.js db --config=./packages/db-dashboard/test/e2e/fixtures/e2e-test-config.json &
+ sleep 5 &&
+ cd packages/db-dashboard && pnpm run test:e2e
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
new file mode 100644
index 0000000000..b87605de55
--- /dev/null
+++ b/.github/workflows/docker.yml
@@ -0,0 +1,29 @@
+name: docker build
+
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ buildx:
+ runs-on: ubuntu-latest
+ environment: main
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Login to DockerHub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_TOKEN }}
+ - name: Build and push
+ uses: docker/build-push-action@v3
+ with:
+ push: true
+ tags: platformatic/platformatic-private:latest
+ platforms: linux/amd64,linux/arm64
diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml
new file mode 100644
index 0000000000..40bab1d870
--- /dev/null
+++ b/.github/workflows/issues.yml
@@ -0,0 +1,25 @@
+name: Add new issue/PR to project
+
+on:
+ issues:
+ types:
+ - opened
+
+jobs:
+ add-to-project:
+ name: Add issue or PR to project
+ runs-on: ubuntu-latest
+ steps:
+ - name: Generate token
+ id: generate_token
+ uses: vidavidorra/github-app-token@v1.0.0
+ with:
+ appId: ${{ secrets.INTERNAL_GH_APP_ID }}
+ privateKey: ${{ secrets.INTERNAL_GH_APP_SECRET }}
+ - name: Add to Project
+ env:
+ TOKEN: ${{ steps.generate_token.outputs.token }}
+ uses: actions/add-to-project@338ac1805ece459f9c25a3e7a2b749fec994576d
+ with:
+ project-url: https://github.com/orgs/platformatic/projects/1
+ github-token: ${{ env.TOKEN }}
diff --git a/.github/workflows/update-docs.yml b/.github/workflows/update-docs.yml
new file mode 100644
index 0000000000..a0960e65a6
--- /dev/null
+++ b/.github/workflows/update-docs.yml
@@ -0,0 +1,23 @@
+name: "Trigger OSS repo"
+on:
+ release:
+ types: [published]
+
+ push:
+ branches:
+ - main
+ paths:
+ - 'docs/**'
+ - '**.md'
+jobs:
+ build-and-publish:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Update docs
+ if: ${{ github.event_name == 'release' }}
+ run: |
+ curl -XPOST -u "${{ secrets.GH_API_USERNAME }}:${{ secrets.GH_API_TOKEN }}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/platformatic/oss/dispatches --data '{"event_type": "update_docs"}'
+ - name: Force update docs
+ if: ${{ github.event_name == 'push' }}
+ run: |
+ curl -XPOST -u "${{ secrets.GH_API_USERNAME }}:${{ secrets.GH_API_TOKEN }}" -H "Accept: application/vnd.github.everest-preview+json" -H "Content-Type: application/json" https://api.github.com/repos/platformatic/oss/dispatches --data '{"event_type": "update_docs", "inputs": { "force": true }}'
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000..0c880bd262
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,112 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# TypeScript v1 declaration files
+typings/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
+.env*
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+
+# Next.js build output
+.next
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and *not* Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+packages/db/fixtures/sqlite/db
+packages/db/test/fixtures/sqlite/db
+
+# packages/dashboard specific rules
+packages/db-dashboard/build/
+playwright-report
+.DS_Store
+.swp
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 0000000000..9459312b53
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1,3 @@
+package-lock=true
+auto-install-peers=true
+strict-peer-dependencies=false
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000..19e1bf63d6
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,94 @@
+# Platformatic
+
+## Running and Developing DB
+
+### Preparation
+
+1. Clone this repository
+1. Install pnpm `npm i pnpm --location=global`
+2. Install dependencies for root project: `pnpm i`
+4. Install docker with Docker Desktop or [Colima](https://github.com/abiosoft/colima)
+
+
+### Start the RDBMS
+
+We use Docker to start all the databases we develop against.
+
+On Linux, execute: `docker compose up`
+
+On Intel Macs: `docker compose up -f docker-compose-mac.yml`
+
+On Apple Silicon Macs: `docker compose up -f docker-compose-apple-silicon.yml`
+
+### Start platformatic db
+
+Create directories to work from:
+
+```sh
+mkdir -p my-demo/migrations
+```
+
+Install all dependencies:
+```sh
+pnpm i
+```
+
+The CLI package is now available at **./node_modules/.bin/platformatic**. Use
+`pnpm link` to use `platformatic` everywhere.
+```sh
+(cd packages/cli && pnpm link)
+```
+
+### Run dashboard development server
+
+Use the command
+```sh
+npm run dashboard:start
+```
+
+This will start a webpack server on port `3000` by default, with watcher and hot-reload (as a standard `create-react-app` application).
+
+Note that GraphiQL will _not_ work because platformatic-db has not been started
+yet.
+
+### Run platformatic-db service
+
+First build the dashboard for production with the command
+```sh
+pnpm run dashboard:build
+```
+
+This will create compressed files and assets under **packages/dashboard/build** directory.
+To run the service:
+```sh
+platformatic db
+```
+This will load config from local directory (i.e using config file **platformatic.db.json**).
+
+If you want to use another config file use the option `--config=/path/to/some.json`.
+
+### Testing
+
+1. [Run docker](#run-docker)
+1. Run `npm run dashboard:build`
+1. Run tests: `npm test`
+
+### Releasing
+
+All platformatic modules share the same release number and are released
+in a single process. In order to avoid internal breakages, dependencies as
+part of this repository are using the `workspace:*` which will be replaced
+by precise versions during publish by pnpm.
+
+The procedure to release is simple:
+
+1. Update the version of the root `package.json`
+1. run `./scripts/sync-version.sh`
+1. run `pnpm -r publish`
+
+### Creating and merging a PR
+On the top of the PR description, if this is a fix of a github issue, add:
+```
+fixes #issuenum
+```
+When all checks are passed and the changes are approved, merge the PR with `squash and merge` option
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000..ada0bb99da
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,45 @@
+FROM node:18-alpine
+
+ENV HOME=/home
+ENV PLT_HOME=$HOME/platformatic/
+ENV PNPM_HOME=$HOME/pnpm
+ENV APP_HOME=$HOME/app
+ENV PATH=/home/pnpm:$PATH
+
+RUN mkdir $PNPM_HOME
+
+# Install Platformatic in the $PLT_HOME folder
+WORKDIR $PLT_HOME
+
+# Install required packages
+RUN apk update && apk add --no-cache dumb-init python3 libc-dev make g++
+
+# Install pnpm
+RUN npm i pnpm --location=global
+
+# Copy lock files
+COPY package.json ./
+COPY pnpm-lock.yaml ./
+COPY pnpm-workspace.yaml ./
+
+# Fetch all dependencies
+RUN pnpm fetch --prod
+
+# Copy files
+COPY . .
+
+# Install all the deps in the source code
+RUN pnpm install --frozen-lockfile --prod --offline
+
+# Add platformatic to path
+RUN cd packages/cli && pnpm link --global
+
+# Move to the app directory
+WORKDIR $APP_HOME
+
+# Reduce our permissions from root to a normal user
+RUN chown node:node .
+USER node
+
+ENTRYPOINT ["dumb-init"]
+CMD ["platformatic"]
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000..a1bed5dc36
--- /dev/null
+++ b/README.md
@@ -0,0 +1,35 @@
+# Platformatic
+
+Platformatic is a set a Open Source tools that you can use to build your own
+_Internal Developer Platform_.
+
+The first of these tools is **Platformatic DB** — more will follow!
+
+## Install
+
+```bash
+npm install platformatic
+
+# Start a new project
+npx platformatic db init
+```
+
+Follow our [Quick Start Guide](https://oss.platformatic.dev/docs/getting-started/quick-start-guide)
+guide to get up and running with Platformatic DB.
+
+## Documentation
+
+- [Getting Started](https://oss.platformatic.dev/docs/category/getting-started)
+- [Reference](https://oss.platformatic.dev/docs/category/reference)
+- [Guides](https://oss.platformatic.dev/docs/category/guides)
+
+Check out our full documentation at [oss.platformatic.dev](https://oss.platformatic.dev).
+
+## Support
+
+Having issues? Drop in to the [Platformatic Discord](https://discord.com/channels/1011258196905689118/1011258204371554307)
+for help.
+
+## License
+
+Apache 2.0
diff --git a/demo/auth/migrations/001.do.sql b/demo/auth/migrations/001.do.sql
new file mode 100644
index 0000000000..0a09b9f9cb
--- /dev/null
+++ b/demo/auth/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+);
diff --git a/demo/auth/migrations/001.undo.sql b/demo/auth/migrations/001.undo.sql
new file mode 100644
index 0000000000..f5465cf307
--- /dev/null
+++ b/demo/auth/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE pages;
diff --git a/demo/auth/migrations/002.do.sql b/demo/auth/migrations/002.do.sql
new file mode 100644
index 0000000000..098ff52de4
--- /dev/null
+++ b/demo/auth/migrations/002.do.sql
@@ -0,0 +1,5 @@
+CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255) NOT NULL
+);
+ALTER TABLE pages ADD COLUMN category_id INTEGER REFERENCES categories(id);
diff --git a/demo/auth/migrations/002.undo.sql b/demo/auth/migrations/002.undo.sql
new file mode 100644
index 0000000000..048007a86d
--- /dev/null
+++ b/demo/auth/migrations/002.undo.sql
@@ -0,0 +1,2 @@
+ALTER TABLE pages DROP COLUMN category_id;
+DROP TABLE categories;
diff --git a/demo/auth/migrations/003.do.sql b/demo/auth/migrations/003.do.sql
new file mode 100644
index 0000000000..08ce54b5a2
--- /dev/null
+++ b/demo/auth/migrations/003.do.sql
@@ -0,0 +1 @@
+ALTER TABLE pages ADD COLUMN user_id INTEGER;
diff --git a/demo/auth/migrations/003.undo.sql b/demo/auth/migrations/003.undo.sql
new file mode 100644
index 0000000000..9fcc1cee49
--- /dev/null
+++ b/demo/auth/migrations/003.undo.sql
@@ -0,0 +1 @@
+ALTER TABLE pages DROP COLUMN user_id;
diff --git a/demo/auth/platformatic.db.json b/demo/auth/platformatic.db.json
new file mode 100644
index 0000000000..7ba2c3b6f9
--- /dev/null
+++ b/demo/auth/platformatic.db.json
@@ -0,0 +1,51 @@
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": "3042"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1:5432/postgres",
+ "graphql": {
+ "graphiql": true
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ },
+ "plugin": {
+ "path": "./plugin.js"
+ },
+ "authorization": {
+ "adminSecret": "platformatic",
+ "rules": [
+ {
+ "role": "user",
+ "entity": "page",
+ "delete": false,
+ "defaults": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ },
+ "find": {
+ "checks": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ }
+ },
+ "save": {
+ "checks": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ }
+ }
+ },
+ {
+ "role": "anonymous",
+ "entity": "page",
+ "find": false,
+ "delete": false,
+ "save": false
+ }
+ ]
+ }
+}
diff --git a/demo/auth/plugin.js b/demo/auth/plugin.js
new file mode 100644
index 0000000000..7f681f3a06
--- /dev/null
+++ b/demo/auth/plugin.js
@@ -0,0 +1,34 @@
+'use strict'
+
+module.exports = async function app (app) {
+ app.log.info('loaded')
+
+ app.get('/hello', async function () {
+ return {
+ message: 'Hello World!'
+ }
+ })
+
+ // console.log(await app.platformatic.entities.page.find({ fields: ['title'] }))
+
+ app.graphql.extendSchema(`
+ extend type Query {
+ hello: String,
+ titles: [String]
+ }
+ `)
+ app.graphql.defineResolvers({
+ Query: {
+ hello: () => 'Hello World!',
+ titles: async () => {
+ const { db, sql } = app.platformatic
+
+ const titles = await db.query(sql`
+ SELECT title FROM pages
+ `)
+
+ return titles.map(({ title }) => title)
+ }
+ }
+ })
+}
diff --git a/demo/basic/migrations/001.do.sql b/demo/basic/migrations/001.do.sql
new file mode 100644
index 0000000000..0a09b9f9cb
--- /dev/null
+++ b/demo/basic/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+);
diff --git a/demo/basic/migrations/001.undo.sql b/demo/basic/migrations/001.undo.sql
new file mode 100644
index 0000000000..f5465cf307
--- /dev/null
+++ b/demo/basic/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE pages;
diff --git a/demo/basic/migrations/002.do.sql b/demo/basic/migrations/002.do.sql
new file mode 100644
index 0000000000..098ff52de4
--- /dev/null
+++ b/demo/basic/migrations/002.do.sql
@@ -0,0 +1,5 @@
+CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255) NOT NULL
+);
+ALTER TABLE pages ADD COLUMN category_id INTEGER REFERENCES categories(id);
diff --git a/demo/basic/migrations/002.undo.sql b/demo/basic/migrations/002.undo.sql
new file mode 100644
index 0000000000..048007a86d
--- /dev/null
+++ b/demo/basic/migrations/002.undo.sql
@@ -0,0 +1,2 @@
+ALTER TABLE pages DROP COLUMN category_id;
+DROP TABLE categories;
diff --git a/demo/basic/platformatic.db.json b/demo/basic/platformatic.db.json
new file mode 100644
index 0000000000..663ed5382e
--- /dev/null
+++ b/demo/basic/platformatic.db.json
@@ -0,0 +1,18 @@
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": "3042"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1:5432/postgres",
+ "graphql": {
+ "graphiql": true
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
diff --git a/docker-compose-apple-silicon.yml b/docker-compose-apple-silicon.yml
new file mode 100644
index 0000000000..ca869e6c51
--- /dev/null
+++ b/docker-compose-apple-silicon.yml
@@ -0,0 +1,33 @@
+version: "3.3"
+services:
+ postgresql:
+ ports:
+ - "5432:5432"
+ image: "arm64v8/postgres:14-alpine"
+ environment:
+ - POSTGRES_PASSWORD=postgres
+ mariadb:
+ ports:
+ - "3307:3306"
+ image: "arm64v8/mariadb:10.9"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql:
+ platform: 'linux/amd64'
+ ports:
+ - "3306:3306"
+ image: "mysql:5.7"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql8:
+ ports:
+ - "3308:3306"
+ image: "arm64v8/mysql:8-oracle"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+
+
+
diff --git a/docker-compose-mac.yml b/docker-compose-mac.yml
new file mode 100644
index 0000000000..f542d63bd4
--- /dev/null
+++ b/docker-compose-mac.yml
@@ -0,0 +1,32 @@
+version: "3.3"
+services:
+ postgresql:
+ ports:
+ - "5432:5432"
+ image: "postgres:14-alpine"
+ environment:
+ - POSTGRES_PASSWORD=postgres
+ mariadb:
+ ports:
+ - "3307:3306"
+ image: "mariadb:10.9"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql:
+ ports:
+ - "3306:3306"
+ image: "mysql:5.7"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql8:
+ ports:
+ - "3308:3306"
+ image: "mysql:8"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+
+
+
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000000..94ae3cdf3e
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,32 @@
+version: "3.3"
+services:
+ postgresql:
+ ports:
+ - "127.0.0.1:5432:5432"
+ image: "postgres:14-alpine"
+ environment:
+ - POSTGRES_PASSWORD=postgres
+ mariadb:
+ ports:
+ - "127.0.0.1:3307:3306"
+ image: "mariadb:10.9"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql:
+ ports:
+ - "127.0.0.1:3306:3306"
+ image: "mysql:5.7"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+ mysql8:
+ ports:
+ - "127.0.0.1:3308:3306"
+ image: "mysql:8"
+ environment:
+ - MYSQL_ALLOW_EMPTY_PASSWORD=yes
+ - MYSQL_DATABASE=graph
+
+
+
diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md
new file mode 100644
index 0000000000..b53f66837e
--- /dev/null
+++ b/docs/contributing/contributing.md
@@ -0,0 +1,3 @@
+# Contributing
+
+Details coming soon.
diff --git a/docs/contributing/documentation-style-guide.md b/docs/contributing/documentation-style-guide.md
new file mode 100644
index 0000000000..fbb6c09891
--- /dev/null
+++ b/docs/contributing/documentation-style-guide.md
@@ -0,0 +1,238 @@
+---
+credits: https://github.com/fastify/fastify/blob/main/docs/Guides/Style-Guide.md
+---
+
+# Documentation Style Guide
+
+Welcome to the *Platformatic Documentation Style Guide*. This guide is here to provide
+you with a conventional writing style for users writing developer documentation on
+our Open Source framework. Each topic is precise and well explained to help you write
+documentation users can easily understand and implement.
+
+## Who is this guide for?
+
+This guide is for anyone who loves to build with Platformatic or wants to contribute
+to our documentation. You do not need to be an expert in writing technical
+documentation. This guide is here to help you.
+
+Visit [CONTRIBUTING.md](https://github.com/platformatic/platformatic/blob/main/CONTRIBUTING.md)
+file on GitHub to join our Open Source folks.
+
+## Before you write
+
+You should have a basic understanding of:
+
+* JavaScript
+* Node.js
+* Git
+* GitHub
+* Markdown
+* HTTP
+* NPM
+
+### Consider your Audience
+
+Before you start writing, think about your audience. In this case, your audience
+should already know HTTP, JavaScript, NPM, and Node.js. It is necessary to keep
+your readers in mind because they are the ones consuming your content. You want
+to give as much useful information as possible. Consider the vital things they
+need to know and how they can understand them. Use words and references that
+readers can relate to easily. Ask for feedback from the community, it can help
+you write better documentation that focuses on the user and what you want to
+achieve.
+
+### Get straight to the point
+
+Give your readers a clear and precise action to take. Start with what is most
+important. This way, you can help them find what they need faster. Mostly,
+readers tend to read the first content on a page, and many will not scroll
+further.
+
+**Example**
+
+Less like this: Colons are very important to register a parametric path. It lets
+the framework know there is a new parameter created. You can place the colon
+before the parameter name so the parametric path can be created.
+
+More Like this: To register a parametric path, put a colon before the parameter
+name. Using a colon lets the framework know it is a parametric path and not a
+static path.
+
+### Images and video should enhance the written documentation
+
+
+Images and video should only be added if they complement the written
+documentation, for example to help the reader form a clearer mental model of a
+concept or pattern.
+
+Images can be directly embedded, but videos should be included by linking to an
+external site, such as YouTube. You can add links by using
+`[Title](https://www.websitename.com)` in the Markdown.
+
+
+
+
+### Avoid plagiarism
+
+Make sure you avoid copying other people's work. Keep it as original as
+possible. You can learn from what they have done and reference where it is from
+if you used a particular quote from their work.
+
+
+## Word Choice
+
+There are a few things you need to use and avoid when writing your documentation
+to improve readability for readers and make documentation neat, direct, and
+clean.
+
+
+### When to use the second person "you" as the pronoun
+
+When writing articles or guides, your content should communicate directly to
+readers in the second person ("you") addressed form. It is easier to give them
+direct instruction on what to do on a particular topic. To see an example, visit
+the [Quick Start Guide](../getting-started/quick-start-guide.md).
+
+**Example**
+
+Less like this: we can use the following plugins.
+
+More like this: You can use the following plugins.
+
+> According to [Wikipedia](#), ***You*** is usually a second person pronoun.
+> Also, used to refer to an indeterminate person, as a more common alternative
+> to a very formal indefinite pronoun.
+
+## When to avoid the second person "you" as the pronoun
+
+One of the main rules of formal writing such as reference documentation, or API
+documentation, is to avoid the second person ("you") or directly addressing the
+reader.
+
+**Example**
+
+Less like this: You can use the following recommendation as an example.
+
+More like this: As an example, the following recommendations should be
+referenced.
+
+To view a live example, refer to the [Decorators](../reference/configuration.md)
+reference document.
+
+
+### Avoid using contractions
+
+Contractions are the shortened version of written and spoken forms of a word,
+i.e. using "don't" instead of "do not". Avoid contractions to provide a more
+formal tone.
+
+### Avoid using condescending terms
+
+Condescending terms are words that include:
+
+* Just
+* Easy
+* Simply
+* Basically
+* Obviously
+
+The reader may not find it easy to use Platformatic; avoid
+words that make it sound simple, easy, offensive, or insensitive. Not everyone
+who reads the documentation has the same level of understanding.
+
+### Starting with a verb
+
+Mostly start your description with a verb, which makes it simple and precise for
+the reader to follow. Prefer using present tense because it is easier to read
+and understand than the past or future tense.
+
+**Example**
+
+ Less like this: There is a need for Node.js to be installed before you can be
+ able to use Platformatic.
+
+ More like this: Install Node.js to make use of Platformatic.
+
+### Grammatical moods
+
+Grammatical moods are a great way to express your writing. Avoid sounding too
+bossy while making a direct statement. Know when to switch between indicative,
+imperative, and subjunctive moods.
+
+
+**Indicative** - Use when making a factual statement or question.
+
+Example: Since there is no testing framework available, "Platformatic recommends ways
+to write tests".
+
+**Imperative** - Use when giving instructions, actions, commands, or when you
+write your headings.
+
+Example: Install dependencies before starting development.
+
+
+**Subjunctive** - Use when making suggestions, hypotheses, or non-factual
+statements.
+
+Example: Reading the documentation on our website is recommended to get
+comprehensive knowledge of the framework.
+
+### Use **active** voice instead of **passive**
+
+Using active voice is a more compact and direct way of conveying your
+documentation.
+
+**Example**
+
+
+Passive: The node dependencies and packages are installed by npm.
+
+Active: npm installs packages and node dependencies.
+
+## Writing Style
+
+### Documentation titles
+
+When creating a new guide, API, or reference in the `/docs/` directory, use
+short titles that best describe the topic of your documentation. Name your files
+in kebab-cases and avoid Raw or camelCase. To learn more about kebab-case you
+can visit this medium article on [Case
+Styles](https://medium.com/better-programming/string-case-styles-camel-pascal-snake-and-kebab-case-981407998841).
+
+**Examples**:
+
+>`hook-and-plugins.md`,
+
+ `adding-test-plugins.md`,
+
+ `removing-requests.md`.
+
+### Hyperlinks
+
+Hyperlinks should have a clear title of what it references. Here is how your
+hyperlink should look:
+
+```MD
+
+
+// Add clear & brief description
+[Fastify Plugins] (https://www.fastify.io/docs/latest/Plugins/)
+
+
+
+// incomplete description
+[Fastify] (https://www.fastify.io/docs/latest/Plugins/)
+
+// Adding title in link brackets
+[](https://www.fastify.io/docs/latest/Plugins/ "fastify plugin")
+
+// Empty title
+[](https://www.fastify.io/docs/latest/Plugins/)
+
+// Adding links localhost URLs instead of using code strings (``)
+[http://localhost:3000/](http://localhost:3000/)
+
+```
+
+Include in your documentation as many essential references as possible, but
+avoid having numerous links when writing for beginners to avoid distractions.
diff --git a/docs/getting-started/architecture.md b/docs/getting-started/architecture.md
new file mode 100644
index 0000000000..6974c22547
--- /dev/null
+++ b/docs/getting-started/architecture.md
@@ -0,0 +1,28 @@
+# Architecture
+
+Platformatic is a collection of Open Source tools designed to eliminate friction
+in backend development. The first of those tools is Platformatic DB, which is developed
+as `@platformatic/db`.
+
+## Platformatic DB
+
+Platformatic DB can expose a SQL database by dynamically mapping it to REST/OpenAPI
+and GraphQL endpoints. It supports a limited subset of the SQL query language, but
+also allows developers to add their own custom routes and resolvers.
+
+![Platformatic DB Architecture](./platformatic-architecture.png)
+
+Platformatic DB is composed of a few key libraries:
+
+1. `@platformatic/sql-mapper` - follows the [Data Mapper pattern](https://en.wikipedia.org/wiki/Data_mapper_pattern) to build an API on top of a SQL database.
+ Internally it uses the [`@database` project](https://www.atdatabases.org/).
+1. `@platformatic/sql-openapi` - uses `sql-mapper` to create a series of REST routes and matching OpenAPI definitions.
+ Internally it uses [`@fastify/swagger`](https://github.com/fastify/fastify-swagger).
+1. `@platformatic/sql-graphql` - uses `sql-mapper` to create a GraphQL endpoint and schema. `sql-graphql` also support Federation.
+ Internally it uses [`mercurius`](https://github.com/mercuriusjs/mercurius).
+
+Platformatic DB allows you to load a [Fastify plugin](https://www.fastify.io/docs/latest/Reference/Plugins/) during server startup that contains your own application-specific code.
+The plugin can add more routes or resolvers — these will automatically be shown in the OpenAPI and GraphQL schemas.
+
+SQL database migrations are also supported. They're implemented internally with the [`postgrator`](https://www.npmjs.com/package/postgrator) library.
+
diff --git a/docs/getting-started/movie-quotes-app-tutorial.md b/docs/getting-started/movie-quotes-app-tutorial.md
new file mode 100644
index 0000000000..04cb64c874
--- /dev/null
+++ b/docs/getting-started/movie-quotes-app-tutorial.md
@@ -0,0 +1,1888 @@
+# Movie Quotes App Tutorial
+
+This tutorial will help you learn how to build a full stack application on top
+of Platformatic DB. We're going to build an application that allows us to
+save our favourite movie quotes. We'll also be building in custom API functionality
+that allows for some neat user interaction on our frontend.
+
+You can find the complete code for the application that we're going to build
+[on GitHub](https://github.com/platformatic/tutorial-movie-quotes-app).
+
+:::note
+
+We'll be building the frontend of our application with the [Astro](https://astro.build/)
+framework, but the GraphQL API integration steps that we're going to cover can
+be applied with most frontend frameworks.
+
+:::
+
+## What we're going to cover
+
+In this tutorial we'll learn how to:
+
+- Create a Platformatic API
+- Apply database migrations
+- Create relationships between our API entities
+- Populate our database tables
+- Build a frontend application that integrates with our GraphQL API
+- Extend our API with custom functionality
+- Enable CORS on our Platformatic API
+
+## Prerequisites
+
+To follow along with this tutorial you'll need to have these things installed:
+
+- [Node.js](https://nodejs.org/) >= v16.17.0 or >= v18.8.0
+- [npm](https://docs.npmjs.com/cli/) v7 or later
+- A code editor, for example [Visual Studio Code](https://code.visualstudio.com/)
+
+You'll also need to have some experience with JavaScript, and be comfortable with
+running commands in a terminal.
+
+## Build the backend
+
+### Create a Platformatic API
+
+First, let's create our project directory:
+
+```bash
+mkdir -p tutorial-movie-quotes-app/apps/movie-quotes-api/
+
+cd tutorial-movie-quotes-app/apps/movie-quotes-api/
+```
+
+Then let's create a `package.json` file:
+
+```bash
+npm init --yes
+```
+
+Now we can install the [platformatic](https://www.npmjs.com/package/platformatic)
+CLI as a dependency:
+
+```bash
+npm install platformatic
+```
+
+Let's also add some npm run scripts for convenience:
+
+```bash
+npm pkg set scripts.start="platformatic db start"
+
+npm pkg set scripts.dev="npm start"
+```
+
+Now we're going to configure our API. Let's create our Platformatic configuration
+file, **`platformatic.db.json`**:
+
+```json
+{
+ "server": {
+ "logger": {
+ "level": "{PLT_SERVER_LOGGER_LEVEL}"
+ },
+ "hostname": "{PLT_SERVER_HOSTNAME}",
+ "port": "{PORT}"
+ },
+ "core": {
+ "connectionString": "{DATABASE_URL}"
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
+```
+
+Now we'll create a **`.env`** file with settings for our configuration to use:
+
+```
+PORT=3042
+PLT_SERVER_HOSTNAME=127.0.0.1
+PLT_SERVER_LOGGER_LEVEL=info
+DATABASE_URL=sqlite://./movie-quotes.sqlite
+```
+
+:::info
+
+Take a look at the [Configuration reference](/reference/configuration.md)
+to see all the supported configuration settings.
+
+:::
+
+### Define the database schema
+
+Let's create a new directory to store our migration files:
+
+```bash
+mkdir migrations
+```
+
+Then we'll create a migration file named **`001.do.sql`** in the **`migrations`**
+directory:
+
+```sql
+CREATE TABLE quotes (
+ id INTEGER PRIMARY KEY,
+ quote TEXT NOT NULL,
+ said_by VARCHAR(255) NOT NULL,
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP
+);
+```
+
+Let's also create `.gitignore` so that we avoid accidentally committing our
+SQLite database:
+
+```bash
+echo '*.sqlite' > .gitignore
+```
+
+Now we can start the Platformatic DB server:
+
+```bash
+npm run dev
+```
+
+Our Platformatic DB server should start, and we'll see messages like these:
+
+```
+[11:26:48.772] INFO (15235): running 001.do.sql
+[11:26:48.864] INFO (15235): server listening
+ url: "http://127.0.0.1:3042"
+```
+
+Let's open a new terminal and make a request to our server's REST API that
+creates a new quote:
+
+```bash
+curl --request POST --header "Content-Type: application/json" \
+ -d "{ \"quote\": \"Toto, I've got a feeling we're not in Kansas anymore.\", \"saidBy\": \"Dorothy Gale\" }" \
+ http://localhost:3042/quotes
+```
+
+We should receive a response like this from the API:
+
+```json
+{"id":1,"quote":"Toto, I've got a feeling we're not in Kansas anymore.","saidBy":"Dorothy Gale","createdAt":"2022-09-13 10:39:35"}
+```
+
+### Create an entity relationship
+
+Now let's create a migration file named **`002.do.sql`** in the **`migrations`**
+directory:
+
+```sql
+CREATE TABLE movies (
+ id INTEGER PRIMARY KEY,
+ name TEXT NOT NULL UNIQUE
+);
+
+-- TODO: Add a foreign key constraint so quotes.movie_id must exist in movies.id
+ALTER TABLE quotes ADD COLUMN movie_id INTEGER REFERENCES movies(id);
+```
+
+This SQL will create a new `movies` database table and also add a `movie_id`
+column to the `quotes` table. This will allow us to store movie data in the
+`movies` table and then reference them by ID in our `quotes` table.
+
+Let's stop the Platformatic DB server with `Ctrl + C`, and then start it again:
+
+```bash
+npm run dev
+```
+
+The new migration should be automatically applied and we'll see the log message
+`running 002.do.sql`.
+
+Our Platformatic DB server also provides a GraphQL API. Let's open up the GraphiQL
+application in our web browser:
+
+> http://localhost:3042/graphiql
+
+Now let's run this query with GraphiQL to add the movie for the quote that we
+added earlier:
+
+```graphql
+mutation {
+ saveMovie(input: { name: "The Wizard of Oz" }) {
+ id
+ }
+}
+```
+
+We should receive a response like this from the API:
+
+```json
+{
+ "data": {
+ "saveMovie": {
+ "id": "1"
+ }
+ }
+}
+```
+
+Now we can update our quote to reference the movie:
+
+```graphql
+mutation {
+ saveQuote(input: { id: 1, movieId: 1 }) {
+ id
+ quote
+ saidBy
+ createdAt
+ movie {
+ id
+ name
+ }
+ }
+}
+```
+
+We should receive a response like this from the API:
+
+```json
+{
+ "data": {
+ "saveQuote": {
+ "id": "1",
+ "quote": "Toto, I've got a feeling we're not in Kansas anymore.",
+ "saidBy": "Dorothy Gale",
+ "movie": {
+ "id": "1",
+ "name": "The Wizard of Oz"
+ }
+ }
+ }
+}
+```
+
+Our Platformatic DB server has automatically identified the relationship
+between our `quotes` and `movies` database tables. This allows us to make
+GraphQL queries that retrieve quotes and their associated movies at the same
+time. For example, to retrieve all quotes from our database we can run:
+
+```graphql
+query {
+ quotes {
+ id
+ quote
+ saidBy
+ createdAt
+ movie {
+ id
+ name
+ }
+ }
+}
+```
+
+To view the GraphQL schema that's generated for our API by Platformatic DB,
+we can run this command in our terminal:
+
+```bash
+npx platformatic db schema graphql
+```
+
+The GraphQL schema shows all of the queries and mutations that we can run
+against our GraphQL API, as well as the types of data that it expects as input.
+
+### Populate the database
+
+Our movie quotes database is looking a little empty! We're going to create a
+"seed" script to populate it with some data.
+
+Let's create a new file named **`seed.js`** and copy and paste in this code:
+
+```javascript
+'use strict'
+
+const quotes = [
+ {
+ quote: "Toto, I've got a feeling we're not in Kansas anymore.",
+ saidBy: 'Dorothy Gale',
+ movie: 'The Wizard of Oz'
+ },
+ {
+ quote: "You're gonna need a bigger boat.",
+ saidBy: 'Martin Brody',
+ movie: 'Jaws'
+ },
+ {
+ quote: 'May the Force be with you.',
+ saidBy: 'Han Solo',
+ movie: 'Star Wars'
+ },
+ {
+ quote: 'I have always depended on the kindness of strangers.',
+ saidBy: 'Blanche DuBois',
+ movie: 'A Streetcar Named Desire'
+ }
+]
+
+module.exports = async function ({ entities, db, sql }) {
+ for (const values of quotes) {
+ const movie = await entities.movie.save({ input: { name: values.movie } })
+
+ console.log('Created movie:', movie)
+
+ const quote = {
+ quote: values.quote,
+ saidBy: values.saidBy,
+ movieId: movie.id
+ }
+
+ await entities.quote.save({ input: quote })
+
+ console.log('Created quote:', quote)
+ }
+}
+```
+
+
+
+:::info
+Take a look at the [Seed a Database](/guides/seed-a-database.md) guide to learn more
+about how database seeding works with Platformatic DB.
+:::
+
+Let's stop our Platformatic DB server running and remove our SQLite database:
+
+```
+rm movie-quotes.db
+```
+
+Now let's create a fresh SQLite database by running our migrations:
+
+```bash
+npx platformatic db migrate
+```
+
+And then let's populate the `quotes` and `movies` tables with data using our
+seed script:
+
+```bash
+npx platformatic db seed seed.js
+```
+
+Our database is full of data, but we don't have anywhere to display it. It's
+time to start building our frontend!
+
+## Build the frontend
+
+We're now going to use [Astro](https://astro.build/) to build our frontend
+application. If you've not used it before, you might find it helpful
+to read [this overview](https://docs.astro.build/en/core-concepts/astro-components/)
+on how Astro components are structured.
+
+:::tip
+Astro provide some extensions and tools to help improve your
+[Editor Setup](https://docs.astro.build/en/editor-setup/) when building an
+Astro application.
+:::
+
+### Create an Astro application
+
+In the root of our project, let's create a new directory for our frontent
+application:
+
+```bash
+mkdir -p apps/movie-quotes-frontend/
+
+cd apps/movie-quotes-frontend/
+```
+
+And then we'll create a new `package.json` file:
+
+```bash
+npm init --yes
+```
+
+Now we can install [astro](https://www.npmjs.com/package/astro) as a dependency:
+
+```bash
+npm install --save-dev astro
+```
+
+Then let's set up some npm run scripts for convenience:
+
+```bash
+npm pkg delete scripts.test
+npm pkg set scripts.dev="astro dev --port 3000"
+npm pkg set scripts.start="astro dev --port 3000"
+npm pkg set scripts.build="astro build"
+```
+
+Now we'll create our Astro configuration file, **`astro.config.mjs`** and
+copy and paste in this code:
+
+```javascript
+import { defineConfig } from 'astro/config'
+
+// https://astro.build/config
+export default defineConfig({
+ output: 'server'
+})
+```
+
+And we'll also create a **`tsconfig.json`** file and add in this configuration:
+
+```json
+{
+ "extends": "astro/tsconfigs/base",
+ "compilerOptions": {
+ "types": ["astro/client"]
+ }
+}
+```
+
+> We won't be writing our frontend application with TypeScript, but adding this
+> configuration file allows Astro to provide TODO
+> https://docs.astro.build/en/guides/typescript/
+
+Now let's create the directories where we'll be adding the components for our
+frontend application:
+
+```bash
+mkdir -p src/pages src/layouts src/components
+```
+
+And inside the **`src/pages`** directory let's create our first page, **`index.astro`**:
+
+```astro
+
Movie Quotes
+```
+
+Now we can start up the Astro development server with:
+
+```bash
+npm run dev
+```
+
+And then load up the frontend in our browser at [http://localhost:3000](http://localhost:3000)
+
+### Create a layout
+
+In the **`src/layouts`** directory, let's create a new file named **`Layout.astro`**:
+
+```astro
+---
+export interface Props {
+ title: string;
+ page?: string;
+}
+const { title, page } = Astro.props;
+---
+
+
+
+
+
+
+ {title}
+
+
+
+
+ All quotes
+
+
+
+
+```
+
+The code between the `---` is known as the component script, and the
+code after that is the component template. The component script will *only* run
+on the server side when a web browser makes a request. The component template
+is rendered server side and sent back as an HTML response to the web browser.
+
+Now we'll update **`src/pages/index.astro`** to use this `Layout` component.
+Let's replace the contents of **`src/pages/index.astro`** with this code:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+---
+
+
+
+ We'll list all the movie quotes here.
+
+
+```
+
+### Integrate the urql GraphQL client
+
+We're now going to integrate the [URQL](https://formidable.com/open-source/urql/)
+GraphQL client into our frontend application. This will allow us to run queries
+and mutations against our Platformatic GraphQL API.
+
+Let's first install [@urql/core](https://www.npmjs.com/package/@urql/core) and
+[graphql](https://www.npmjs.com/package/graphql) as project dependencies:
+
+```bash
+npm install @urql/core graphql
+```
+
+Then let's create a new **`.env`** file and add this configuration:
+
+```
+PUBLIC_GRAPHQL_API_ENDPOINT=http://127.0.0.1:3042/graphql
+```
+
+Now we'll create a new directory:
+
+```bash
+mkdir src/lib
+```
+
+And then create a new file named **`src/lib/quotes-api.js`**. In that file we'll
+create a new URQL client:
+
+```javascript
+// src/lib/quotes-api.js
+
+import { createClient } from '@urql/core';
+
+const graphqlClient = createClient({
+ url: import.meta.env.PUBLIC_GRAPHQL_API_ENDPOINT,
+ requestPolicy: "network-only"
+});
+```
+
+We'll also add a thin wrapper around the client that does some basic error
+handling for us:
+
+```javascript
+// src/lib/quotes-api.js
+
+async function graphqlClientWrapper(method, gqlQuery, queryVariables = {}) {
+ const queryResult = await graphqlClient[method](
+ gqlQuery,
+ queryVariables
+ ).toPromise();
+
+ if (queryResult.error) {
+ console.error("GraphQL error:", queryResult.error);
+ }
+
+ return {
+ data: queryResult.data,
+ error: queryResult.error,
+ };
+}
+
+export const quotesApi = {
+ async query(gqlQuery, queryVariables = {}) {
+ return await graphqlClientWrapper("query", gqlQuery, queryVariables);
+ },
+ async mutation(gqlQuery, queryVariables = {}) {
+ return await graphqlClientWrapper("mutation", gqlQuery, queryVariables);
+ }
+}
+```
+
+And lastly, we'll export `gql` from the `@urql/core` package, to make it
+simpler for us to write GraphQL queries in our pages:
+
+```javascript
+// src/lib/quotes-api.js
+
+export { gql } from "@urql/core";
+```
+
+Stop the Astro dev server and then start it again so it picks up the **`.env`**
+file:
+
+```bash
+npm run dev
+```
+
+### Display all quotes
+
+Let's display all the movie quotes in **`src/pages/index.astro`**.
+
+First, we'll update the component script at the top and add in a query to
+our GraphQL API for quotes:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+// highlight-start
+import { quotesApi, gql } from '../lib/quotes-api';
+
+const { data } = await quotesApi.query(gql`
+ query {
+ quotes {
+ id
+ quote
+ saidBy
+ createdAt
+ movie {
+ id
+ name
+ }
+ }
+ }
+`);
+
+const quotes = data?.quotes || [];
+// highlight-end
+---
+```
+
+Then we'll update the component template to display the quotes:
+
+```astro
+
+
+// highlight-start
+ {quotes.length > 0 ? quotes.map((quote) => (
+
+
+ {quote.quote}
+
+
+ — {quote.saidBy}, {quote.movie?.name}
+
+
+ Added {new Date(quote.createdAt).toUTCString()}
+
+
+ )) : (
+ No movie quotes have been added.
+ )}
+// highlight-end
+
+
+```
+
+And just like that, we have all the movie quotes displaying on the page!
+
+### Integrate Tailwind for styling
+
+Automatically add the [@astrojs/tailwind integration](https://docs.astro.build/en/guides/integrations-guide/tailwind/):
+
+```bash
+npx astro add tailwind --yes
+```
+
+Add the Tailwind CSS [Typography](https://tailwindcss.com/docs/typography-plugin)
+and [Forms](https://github.com/tailwindlabs/tailwindcss-forms) plugins:
+
+```bash
+npm install --save-dev @tailwindcss/typography @tailwindcss/forms
+```
+
+Import the plugins in our Tailwind configuration file:
+
+```javascript
+// tailwind.config.cjs
+
+/** @type {import('tailwindcss').Config} */
+module.exports = {
+ content: ['./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}'],
+ theme: {
+ extend: {}
+ },
+// highlight-start
+ plugins: [
+ require('@tailwindcss/forms'),
+ require('@tailwindcss/typography')
+ ]
+// highlight-end
+}
+```
+
+Stop the Astro dev server and then start it again so it picks up all the
+configuration changes:
+
+```bash
+npm run dev
+```
+
+### Style the listing page
+
+To style our listing page, let's add CSS classes to the component template in
+**`src/layouts/Layout.astro`**:
+
+```astro
+---
+export interface Props {
+ title: string;
+ page?: string;
+}
+
+const { title, page } = Astro.props;
+
+// highlight-next-line
+const navActiveClasses = "font-bold bg-yellow-400 no-underline";
+---
+
+
+
+
+
+
+ {title}
+
+// highlight-next-line
+
+// highlight-next-line
+
+// highlight-next-line
+
+// highlight-next-line
+ All quotes
+
+// highlight-next-line
+
+
+
+```
+
+Then let's add CSS classes to the component template in **`src/pages/index.astro`**:
+
+```astro
+
+
+ {quotes.length > 0 ? quotes.map((quote) => (
+// highlight-next-line
+
+// highlight-next-line
+
+// highlight-next-line
+ {quote.quote}
+
+// highlight-next-line
+
+ — {quote.saidBy}, {quote.movie?.name}
+
+// highlight-next-line
+
+// highlight-next-line
+ Added {new Date(quote.createdAt).toUTCString()}
+
+
+ )) : (
+ No movie quotes have been added.
+ )}
+
+
+```
+
+Our listing page is now looking much more user friendly!
+
+### Create an add quote page
+
+We're going to create a form component that we can use for adding and editing
+quotes.
+
+First let's create a new component file, **`src/components/QuoteForm.astro`**:
+
+```astro
+---
+export interface QuoteFormData {
+ id?: number;
+ quote?: string;
+ saidBy?: string;
+ movie?: string;
+}
+
+export interface Props {
+ action: string;
+ values?: QuoteFormData;
+ saveError?: boolean;
+ loadError?: boolean;
+ submitLabel: string;
+}
+
+const { action, values = {}, saveError, loadError, submitLabel } = Astro.props;
+---
+
+{saveError && There was an error saving the quote. Please try again.
}
+{loadError && There was an error loading the quote. Please try again.
}
+
+
+```
+
+Create a new page file, **`src/pages/add.astro`**:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+import QuoteForm from '../components/QuoteForm.astro';
+import type { QuoteFormData } from '../components/QuoteForm.astro';
+
+let formData: QuoteFormData = {};
+let saveError = false;
+---
+
+
+
+ Add a quote
+
+
+
+```
+
+And now let's add a link to this page in the layout navigation in **`src/layouts/Layout.astro`**:
+
+```astro
+
+ All quotes
+// highlight-next-line
+ Add a quote
+
+```
+
+### Send form data to the API
+
+When a user submits the add quote form we want to send the form data to our API
+so it can then save it to our database. Let's wire that up now.
+
+First we're going to create a new file, **`src/lib/request-utils.js`**:
+
+```javascript
+export function isPostRequest (request) {
+ return request.method === 'POST'
+}
+
+export async function getFormData (request) {
+ const formData = await request.formData()
+
+ return Object.fromEntries(formData.entries())
+}
+```
+
+
+
+Then let's update the component script in **`src/pages/add.astro`** to use
+these new request utility functions:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+import QuoteForm from '../components/QuoteForm.astro';
+import type { QuoteFormData } from '../components/QuoteForm.astro';
+
+// highlight-next-line
+import { isPostRequest, getFormData } from '../lib/request-utils';
+
+let formData: QuoteFormData = {};
+let saveError = false;
+
+// highlight-start
+if (isPostRequest(Astro.request)) {
+ formData = await getFormData(Astro.request);
+}
+// highlight-end
+---
+```
+
+
+
+When we create a new quote entity record via our API, we need to include a
+`movieId` field that references a movie entity record. This means that when a
+user submits the add quote form we need to:
+
+- Check if a movie entity record already exists with that movie name
+- Return the movie `id` if it does exist
+- If it doesn't exist, create a new movie entity record and return the movie ID
+
+Let's update the `import` statement at the top of **`src/lib/quotes-api.js`**
+
+```diff
+-import { createClient } from '@urql/core'
++import { createClient, gql } from '@urql/core'
+```
+
+And then add a new method that will return a movie ID for us:
+
+```javascript
+async function getMovieId (movieName) {
+ movieName = movieName.trim()
+
+ let movieId = null
+
+ // Check if a movie already exists with the provided name.
+ const queryMoviesResult = await quotesApi.query(
+ gql`
+ query ($movieName: String!) {
+ movies(where: { name: { eq: $movieName } }) {
+ id
+ }
+ }
+ `,
+ { movieName }
+ )
+
+ if (queryMoviesResult.error) {
+ return null
+ }
+
+ const movieExists = queryMoviesResult.data?.movies.length === 1
+ if (movieExists) {
+ movieId = queryMoviesResult.data.movies[0].id
+ } else {
+ // Create a new movie entity record.
+ const saveMovieResult = await quotesApi.mutation(
+ gql`
+ mutation ($movieName: String!) {
+ saveMovie(input: { name: $movieName }) {
+ id
+ }
+ }
+ `,
+ { movieName }
+ )
+
+ if (saveMovieResult.error) {
+ return null
+ }
+
+ movieId = saveMovieResult.data?.saveMovie.id
+ }
+
+ return movieId
+}
+```
+
+And let's export it too:
+
+```javascript
+export const quotesApi = {
+ async query (gqlQuery, queryVariables = {}) {
+ return await graphqlClientWrapper('query', gqlQuery, queryVariables)
+ },
+ async mutation (gqlQuery, queryVariables = {}) {
+ return await graphqlClientWrapper('mutation', gqlQuery, queryVariables)
+ },
+// highlight-next-line
+ getMovieId
+}
+```
+
+Now we can wire up the last parts in the **`src/pages/add.astro`** component
+script:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+import QuoteForm from '../components/QuoteForm.astro';
+import type { QuoteFormData } from '../components/QuoteForm.astro';
+
+// highlight-next-line
+import { quotesApi, gql } from '../lib/quotes-api';
+import { isPostRequest, getFormData } from '../lib/request-utils';
+
+let formData: QuoteFormData = {};
+let saveError = false;
+
+if (isPostRequest(Astro.request)) {
+ formData = await getFormData(Astro.request);
+
+// highlight-start
+ const movieId = await quotesApi.getMovieId(formData.movie);
+
+ if (movieId) {
+ const quote = {
+ quote: formData.quote,
+ saidBy: formData.saidBy,
+ movieId,
+ };
+
+ const { error } = await quotesApi.mutation(gql`
+ mutation($quote: QuoteInput!) {
+ saveQuote(input: $quote) {
+ id
+ }
+ }
+ `, { quote });
+
+ if (!error) {
+ return Astro.redirect('/');
+ } else {
+ saveError = true;
+ }
+ } else {
+ saveError = true;
+ }
+// highlight-end
+}
+```
+
+
+
+### Add autosuggest for movies
+
+We can create a better experience for our users by autosuggesting the movie name
+when they're adding a new quote.
+
+Let's open up **`src/components/QuoteForm.astro`** and import our API helper methods
+in the component script:
+
+```astro
+import { quotesApi, gql } from '../lib/quotes-api.js';
+```
+
+Then let's add in a query to our GraphQL API for all movies:
+
+```astro
+const { data } = await quotesApi.query(gql`
+ query {
+ movies {
+ name
+ }
+ }
+`);
+
+const movies = data?.movies || [];
+```
+
+Now lets update the *Movie* field in the component template to use the
+array of movies that we've retrieved from the API:
+
+```astro
+
+ Movie
+// highlight-start
+
+
+ {movies.map(({ name }) => (
+ {name}
+ ))}
+
+// highlight-end
+
+```
+
+
+
+### Create an edit quote page
+
+Let's create a new directory, **`src/pages/edit/`**:
+
+```bash
+mkdir src/pages/edit/
+```
+
+And inside of it, let's create a new page, **`[id].astro`**:
+
+```astro
+---
+import Layout from '../../layouts/Layout.astro';
+import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';
+
+const id = Number(Astro.params.id);
+
+let formValues: QuoteFormData = {};
+let loadError = false;
+let saveError = false;
+---
+
+
+
+ Edit quote
+
+
+
+```
+
+You'll see that we're using the same `QuoteForm` component that our add quote
+page uses. Now we're going to wire up our edit page so that it can load an
+existing quote from our API and save changes back to the API when the form is
+submitted.
+
+In the **`[id.astro]`** component script, let's add some code to take care of
+these tasks:
+
+```astro
+---
+import Layout from '../../layouts/Layout.astro';
+import QuoteForm, { QuoteFormData } from '../../components/QuoteForm.astro';
+
+// highlight-start
+import { quotesApi, gql } from '../../lib/quotes-api';
+import { isPostRequest, getFormData } from '../../lib/request-utils';
+// highlight-end
+
+const id = Number(Astro.params.id);
+
+let formValues: QuoteFormData = {};
+let loadError = false;
+let saveError = false;
+
+// highlight-start
+if (isPostRequest(Astro.request)) {
+ const formData = await getFormData(Astro.request);
+ formValues = formData;
+
+ const movieId = await quotesApi.getMovieId(formData.movie);
+
+ if (movieId) {
+ const quote = {
+ id,
+ quote: formData.quote,
+ saidBy: formData.saidBy,
+ movieId,
+ };
+
+ const { error } = await quotesApi.mutation(gql`
+ mutation($quote: QuoteInput!) {
+ saveQuote(input: $quote) {
+ id
+ }
+ }
+ `, { quote });
+
+ if (!error) {
+ return Astro.redirect('/');
+ } else {
+ saveError = true;
+ }
+ } else {
+ saveError = true;
+ }
+} else {
+ const { data } = await quotesApi.query(gql`
+ query($id: ID!) {
+ getQuoteById(id: $id) {
+ id
+ quote
+ saidBy
+ movie {
+ id
+ name
+ }
+ }
+ }
+ `, { id });
+
+ if (data?.getQuoteById) {
+ formValues = {
+ ...data.getQuoteById,
+ movie: data.getQuoteById.movie.name
+ };
+ } else {
+ loadError = true;
+ }
+}
+// highlight-end
+---
+```
+
+
+
+Load up [http://localhost:3000/edit/1](http://localhost:3000/edit/1) in your
+browser to test out the edit quote page.
+
+Now we're going to add edit links to the quotes listing page. Let's start by
+creating a new component **`src/components/QuoteActionEdit.astro`**:
+
+```astro
+---
+export interface Props {
+ id: number;
+}
+
+const { id } = Astro.props;
+---
+
+
+
+
+
+ Edit
+
+```
+
+Then let's import this component and use it in our listing page,
+**`src/pages/index.astro`**:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+// highlight-next-line
+import QuoteActionEdit from '../components/QuoteActionEdit.astro';
+import { quotesApi, gql } from '../lib/quotes-api';
+
+// ...
+---
+
+
+
+ {quotes.length > 0 ? quotes.map((quote) => (
+
+ ...
+
+// highlight-start
+
+
+
+ Added {new Date(quote.createdAt).toUTCString()}
+// highlight-end
+
+
+ )) : (
+ No movie quotes have been added.
+ )}
+
+
+```
+
+### Add delete quote functionality
+
+Our Movie Quotes app can create, retrieve and update quotes. Now we're going
+to implement the D in CRUD — delete!
+
+First let's create a new component, **`src/components/QuoteActionDelete.astro`**:
+
+```astro
+---
+export interface Props {
+ id: number;
+}
+
+const { id } = Astro.props;
+---
+
+
+
+
+
+ Delete
+
+
+```
+
+
+
+And then we'll drop it into our listing page, **`src/pages/index.astro`**:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+import QuoteActionEdit from '../components/QuoteActionEdit.astro';
+// highlight-next-line
+import QuoteActionDelete from '../components/QuoteActionDelete.astro';
+import { quotesApi, gql } from '../lib/quotes-api';
+
+// ...
+---
+
+
+
+ {quotes.length > 0 ? quotes.map((quote) => (
+
+ ...
+
+
+
+// highlight-next-line
+
+
+ Added {new Date(quote.createdAt).toUTCString()}
+
+
+...
+```
+
+At the moment when a delete form is submitted from our listing page, we get
+an Astro 404 page. Let's fix this by creating a new directory, **`src/pages/delete/`**:
+
+```bash
+mkdir src/pages/delete/
+```
+
+And inside of it, let's create a new page, **`[id].astro`**:
+
+```astro
+---
+import Layout from '../../layouts/Layout.astro';
+
+import { quotesApi, gql } from '../../lib/quotes-api';
+import { isPostRequest } from '../../lib/request-utils';
+
+if (isPostRequest(Astro.request)) {
+ const id = Number(Astro.params.id);
+
+ const { error } = await quotesApi.mutation(gql`
+ mutation($id: ID!) {
+ deleteQuotes(where: { id: { eq: $id }}) {
+ id
+ }
+ }
+ `, { id });
+
+ if (!error) {
+ return Astro.redirect('/');
+ }
+}
+---
+
+
+ Delete quote
+ There was an error deleting the quote. Please try again.
+
+
+```
+
+
+
+Now if we click on a delete quote button on our listings page, it should call our
+GraphQL API to delete the quote. To make this a little more user friendly, let's
+add in a confirmation dialog so that users don't delete a quote by accident.
+
+
+
+
+Let's create a new directory, **`src/scripts/`**:
+
+```bash
+mkdir src/scripts/
+```
+
+And inside of that directory let's create a new file, **`quote-actions.js`**:
+
+```javascript
+// src/scripts/quote-actions.js
+
+export function confirmDeleteQuote (form) {
+ if (confirm('Are you sure want to delete this quote?')) {
+ form.submit()
+ }
+}
+```
+
+Then we can pull it in as client side JavaScript on our listing page,
+**`src/pages/index.astro`**:
+
+```astro
+
+ ...
+
+
+
+```
+
+
+
+## Build a "like" quote feature
+
+We've built all the basic CRUD (Create, Retrieve, Update & Delete) features
+into our application. Now let's build a feature so that users can interact
+and "like" their favourite movie quotes.
+
+To build this feature we're going to add custom functionality to our API
+and then add a new component, along with some client side JavaScript, to
+our frontend.
+
+### Create an API migration
+
+We're now going to work on the code for API, under the **`apps/movie-quotes-api`**
+directory.
+
+First let's create a migration that adds a `likes` column to our `quotes`
+database table. We'll create a new migration file, **`migrations/003.do.sql`**:
+
+```sql
+ALTER TABLE quotes ADD COLUMN likes INTEGER default 0;
+```
+
+This migration will automatically be applied when we next start our Platformatic
+API.
+
+### Create an API plugin
+
+To add custom functionality to our Platformatic API, we need to create a
+[Fastify plugin](https://www.fastify.io/docs/latest/Reference/Plugins/) and
+update our API configuration to use it.
+
+Let's create a new file, **`plugin.js`**, and inside it we'll add the skeleton
+structure for our plugin:
+
+```javascript
+// plugin.js
+
+'use strict'
+
+module.exports = async function plugin (app) {
+ app.log.info('plugin loaded')
+}
+```
+
+Now let's register our plugin in our API configuration file, **`platformatic.db.json`**:
+
+```json
+{
+ ...
+ "migrations": {
+ "dir": "./migrations"
+// highlight-start
+ },
+ "plugin": {
+ "path": "./plugin.js"
+ }
+// highlight-end
+}
+```
+
+And then we'll start up our Platformatic API:
+
+```bash
+npm run dev
+```
+
+We should see log messages that tell us that our new migration has been
+applied and our plugin has been loaded:
+
+```
+[10:09:20.052] INFO (146270): running 003.do.sql
+[10:09:20.129] INFO (146270): plugin loaded
+[10:09:20.209] INFO (146270): server listening
+ url: "http://127.0.0.1:3042"
+```
+
+Now it's time to start adding some custom functionality inside our plugin.
+
+### Add a REST API route
+
+
+
+We're going to add a REST route to our API that increments the count of
+likes for a specific quote: `/quotes/:id/like`
+
+First let's add [fluent-json-schema](https://www.npmjs.com/package/fluent-json-schema) as a dependency for our API:
+
+```bash
+npm install fluent-json-schema
+```
+
+We'll use `fluent-json-schema` to help us generate a JSON Schema. We can then
+use this schema to validate the request path parameters for our route (`id`).
+
+Now let's add our REST API route in **`plugin.js`**:
+
+```javascript
+'use strict'
+
+// highlight-next-line
+const S = require('fluent-json-schema')
+
+module.exports = async function plugin (app) {
+ app.log.info('plugin loaded')
+
+ // This JSON Schema will validate the request path parameters.
+ // It reuses part of the schema that Platormatic DB has
+ // automatically generated for our Quote entity.
+// highlight-start
+ const schema = {
+ params: S.object().prop('id', app.getSchema('Quote').properties.id)
+ }
+
+ app.post('/quotes/:id/like', { schema }, async function (request, response) {
+ return {}
+ })
+// highlight-end
+}
+```
+
+We can now make a `POST` request to our new API route:
+
+```bash
+curl --request POST http://localhost:3042/quotes/1/like
+```
+
+:::info
+Learn more about how validation works in the
+[Fastify validation documentation](https://www.fastify.io/docs/latest/Reference/Validation-and-Serialization/).
+:::
+
+Our API route is currently returning an empty object (`{}`). Let's wire things
+up so that it increments the number of likes for the quote with the specified ID.
+To do this we'll add a new function inside of our plugin:
+
+```javascript
+module.exports = async function plugin (app) {
+ app.log.info('plugin loaded')
+
+// highlight-start
+ async function incrementQuoteLikes (id) {
+ const { db, sql } = app.platformatic
+
+ const result = await db.query(sql`
+ UPDATE quotes SET likes = likes + 1 WHERE id=${id} RETURNING likes
+ `)
+
+ return result[0]?.likes
+ }
+// highlight-end
+
+ // ...
+}
+```
+
+And then we'll call that function in our route handler function:
+
+```javascript
+app.post('/quotes/:id/like', { schema }, async function (request, response) {
+// highlight-next-line
+ return { likes: await incrementQuoteLikes(request.params.id) }
+})
+```
+
+Now when we make a `POST` request to our API route:
+
+```bash
+curl --request POST http://localhost:3042/quotes/1/like
+```
+
+We should see that the `likes` value for the quote is incremented every time
+we make a request to the route.
+
+```json
+{"likes":1}
+```
+
+
+
+### Add a GraphQL API mutation
+
+We can add a `likeQuote` mutation to our GraphQL API by reusing the
+`incrementQuoteLikes` function that we just created.
+
+Let's add this code at the end of our plugin, inside **`plugin.js`**:
+
+```javascript
+module.exports = async function plugin (app) {
+ // ...
+
+// highlight-start
+ app.graphql.extendSchema(`
+ extend type Mutation {
+ likeQuote(id: ID!): Int
+ }
+ `)
+
+ app.graphql.defineResolvers({
+ Mutation: {
+ likeQuote: async (_, { id }) => await incrementQuoteLikes(id)
+ }
+ })
+// highlight-end
+}
+```
+
+The code we've just added extends our API's GraphQL schema and defines
+a corresponding resolver for the `likeQuote` mutation.
+
+We can now load up GraphiQL in our web browser and try out our new `likeQuote`
+mutation with this GraphQL query:
+
+```graphql
+mutation {
+ likeQuote(id: 1)
+}
+```
+
+:::info
+Learn more about how to extend the GraphQL schema and define resolvers in the
+[Mercurius API documentation](https://mercurius.dev/#/docs/api/options).
+:::
+
+### Enable CORS on the API
+
+When we build "like" functionality into our frontend, we'll be making a client
+side HTTP request to our GraphQL API. Our backend API and our frontend are running
+on different origins, so we need to configure our API to allow requests from
+the frontend. This is known as Cross-Origin Resource Sharing (CORS).
+
+To enable CORS on our API, let's open up our API's **`.env`** file and add in
+a new setting:
+
+```
+PLT_SERVER_CORS_ORIGIN=http://localhost:3000
+```
+
+The value of `PLT_SERVER_CORS_ORIGIN` is our frontend application's origin.
+
+Now we can add a `cors` configuration object in our API's configuration file,
+**`platformatic.db.json`**:
+
+```json
+{
+ "server": {
+ "logger": {
+ "level": "{PLT_SERVER_LOGGER_LEVEL}"
+ },
+ "hostname": "{PLT_SERVER_HOSTNAME}",
+ "port": "{PORT}",
+// highlight-start
+ "cors": {
+ "origin": "{PLT_SERVER_CORS_ORIGIN}"
+ }
+// highlight-end
+ },
+ ...
+}
+```
+
+The HTTP responses from all endpoints on our API will now include the header:
+
+```
+access-control-allow-origin: http://localhost:3000
+```
+
+This will allow JavaScript running on web pages under the `http://localhost:3000`
+origin to make requests to our API.
+
+### Add like quote functionality
+
+Now that our API supports "liking" a quote, let's integrate it as a feature in
+our frontend.
+
+First we'll create a new component, **`src/components/QuoteActionLike.astro`**:
+
+```astro
+---
+export interface Props {
+ id: number;
+ likes: number;
+}
+
+const { id, likes } = Astro.props;
+---
+
+
+
+
+ {likes}
+
+
+
+```
+
+And in our listing page, **`src/pages/index.astro`**, let's import our new
+component and add it into the interface:
+
+```astro
+---
+import Layout from '../layouts/Layout.astro';
+import QuoteActionEdit from '../components/QuoteActionEdit.astro';
+import QuoteActionDelete from '../components/QuoteActionDelete.astro';
+// highlight-next-line
+import QuoteActionLike from '../components/QuoteActionLike.astro';
+import { quotesApi, gql } from '../lib/quotes-api';
+
+// ...
+---
+
+
+
+ {quotes.length > 0 ? quotes.map((quote) => (
+
+ ...
+
+
+// highlight-next-line
+
+
+
+
+ Added {new Date(quote.createdAt).toUTCString()}
+
+
+...
+```
+
+Then let's update the GraphQL query in this component's script to retrieve the
+`likes` field for all quotes:
+
+```javascript
+const { data } = await quotesApi.query(gql`
+ query {
+ quotes {
+ id
+ quote
+ saidBy
+// highlight-next-line
+ likes
+ createdAt
+ movie {
+ id
+ name
+ }
+ }
+ }
+`);
+```
+
+Now we have the likes showing for each quote, let's wire things up so that
+clicking on the like component for a quote will call our API and add a like.
+
+Let's open up **`src/scripts/quote-actions.js`** and add a new function that
+makes a request to our GraphQL API:
+
+```javascript
+// highlight-next-line
+import { quotesApi, gql } from '../lib/quotes-api.js'
+
+export function confirmDeleteQuote (form) {
+ if (confirm('Are you sure want to delete this quote?')) {
+ form.submit()
+ }
+}
+
+// highlight-start
+export async function likeQuote (likeQuote) {
+ likeQuote.classList.add('liked')
+ likeQuote.classList.remove('cursor-pointer')
+
+ const id = Number(likeQuote.dataset.quoteId)
+
+ const { data } = await quotesApi.mutation(gql`
+ mutation($id: ID!) {
+ likeQuote(id: $id)
+ }
+ `, { id })
+
+ if (data?.likeQuote) {
+ likeQuote.querySelector('.likes-count').innerText = data.likeQuote
+ }
+}
+// highlight-end
+```
+
+And then let's attach the `likeQuote` function to the click event for each
+like quote component on our listing page. We can do this by adding a little
+extra code inside the `
+```
+
+### Sort the listing by top quotes
+
+Now that users can like their favourite quotes, as a final step, we'll allow
+for sorting quotes on the listing page by the number of likes they have.
+
+Let's update **`src/pages/index.astro`** to read a `sort` query string parameter
+and use it the GraphQL query that we make to our API:
+
+```astro
+---
+// ...
+
+// highlight-start
+const allowedSortFields = ["createdAt", "likes"];
+const searchParamSort = new URL(Astro.request.url).searchParams.get("sort");
+const sort = allowedSortFields.includes(searchParamSort) ? searchParamSort : "createdAt";
+// highlight-end
+
+const { data } = await quotesApi.query(gql`
+ query {
+// highlight-next-line
+ quotes(orderBy: {field: ${sort}, direction: DESC}) {
+ id
+ quote
+ saidBy
+ likes
+ createdAt
+ movie {
+ id
+ name
+ }
+ }
+ }
+`);
+
+const quotes = data?.quotes || [];
+---
+// highlight-next-line
+
+...
+```
+
+Then let's replace the 'All quotes' link in the `` in **`src/layouts/Layout.astro`**
+with two new links:
+
+```astro
+
+// highlight-start
+ Latest quotes
+ Top quotes
+// highlight-end
+ Add a quote
+
+```
+
+With these few extra lines of code, our users can now sort quotes by when they
+were created or by the number of likes that they have. Neat!
+
+## Wrapping up
+
+And we're done — you now have the knowledge you need to build a full stack
+application on top of Platformatic DB.
+
+We can't wait to see what you'll build next!
diff --git a/docs/getting-started/platformatic-architecture.png b/docs/getting-started/platformatic-architecture.png
new file mode 100644
index 0000000000..e19a8cdd18
Binary files /dev/null and b/docs/getting-started/platformatic-architecture.png differ
diff --git a/docs/getting-started/platformatid-db-architecture.excalidraw b/docs/getting-started/platformatid-db-architecture.excalidraw
new file mode 100644
index 0000000000..04d7f461f5
--- /dev/null
+++ b/docs/getting-started/platformatid-db-architecture.excalidraw
@@ -0,0 +1,1777 @@
+{
+ "type": "excalidraw",
+ "version": 2,
+ "source": "https://excalidraw.com",
+ "elements": [
+ {
+ "type": "rectangle",
+ "version": 551,
+ "versionNonce": 253868331,
+ "isDeleted": false,
+ "id": "1zlVmmd_Y9S9Oz0S_fTUH",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 408.666015625,
+ "y": 254.265625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fab005",
+ "width": 522.21875,
+ "height": 73.2265625,
+ "seed": 418516020,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "zfmfD9pZLlP_G075cvUTv",
+ "type": "arrow"
+ },
+ {
+ "id": "mzCbJHPkBmRZyt8z7yCAe",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1662501577385,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "rectangle",
+ "version": 973,
+ "versionNonce": 199542836,
+ "isDeleted": false,
+ "id": "4CCO4Ro-Gy5uYIPERvP7H",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 785.0390625,
+ "y": 636.41796875,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 135.51171875,
+ "height": 74.8515625,
+ "seed": 906185780,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "zfmfD9pZLlP_G075cvUTv",
+ "type": "arrow"
+ },
+ {
+ "id": "rQggHgShjGGXQ0hiwBQVd",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 184,
+ "versionNonce": 668075276,
+ "isDeleted": false,
+ "id": "R_U11fvrIlgCS1IlhmWIQ",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 585.58984375,
+ "y": 286.25390625,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 136,
+ "height": 25,
+ "seed": 553319692,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Your Frontend",
+ "baseline": 18,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "Your Frontend"
+ },
+ {
+ "type": "rectangle",
+ "version": 1250,
+ "versionNonce": 2047189428,
+ "isDeleted": false,
+ "id": "W06kzPlnPRgvKtfyagn_y",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 410.49609375,
+ "y": 381.2578125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 519.26171875,
+ "height": 180.80078124999997,
+ "seed": 1222202124,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "7lr5UwO6mqEbEA4WdbjS3",
+ "type": "arrow"
+ },
+ {
+ "id": "zfmfD9pZLlP_G075cvUTv",
+ "type": "arrow"
+ },
+ {
+ "id": "rQggHgShjGGXQ0hiwBQVd",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 1593,
+ "versionNonce": 1162225739,
+ "isDeleted": false,
+ "id": "b8p9mgZWBw2sgu28jjLm-",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 754.9140625,
+ "y": 350.640625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 165,
+ "height": 25,
+ "seed": 759852684,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662501578885,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Platformatic DB",
+ "baseline": 18,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "Platformatic DB"
+ },
+ {
+ "type": "line",
+ "version": 4826,
+ "versionNonce": 188026420,
+ "isDeleted": false,
+ "id": "WkWYX211VfpfEJZakoT-m",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 415.6994711197414,
+ "y": 636.1128296351798,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "#228be6",
+ "width": 88.21658171083376,
+ "height": 113.8575037534261,
+ "seed": 1401759284,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": null,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0.29089298333313673,
+ 86.05288422061678
+ ],
+ [
+ 0.013613108737802165,
+ 95.84963140781468
+ ],
+ [
+ 4.543349062013738,
+ 100.08268472409586
+ ],
+ [
+ 20.317928500125443,
+ 103.66521849306073
+ ],
+ [
+ 46.98143617553956,
+ 104.78076599153316
+ ],
+ [
+ 72.45665455006592,
+ 102.9996310009587
+ ],
+ [
+ 85.99182564238487,
+ 98.74007888522631
+ ],
+ [
+ 87.90077837148979,
+ 95.14923176741362
+ ],
+ [
+ 88.16888387182134,
+ 87.26194204835767
+ ],
+ [
+ 87.95845222911922,
+ 7.219356674957439
+ ],
+ [
+ 87.48407176050935,
+ -0.3431928547433216
+ ],
+ [
+ 81.81967725989045,
+ -4.569951534960701
+ ],
+ [
+ 69.89167127292335,
+ -7.017866506201685
+ ],
+ [
+ 42.70935725136615,
+ -9.076737761892943
+ ],
+ [
+ 20.91603533578692,
+ -7.849028196182914
+ ],
+ [
+ 3.775735655469765,
+ -3.684787148572539
+ ],
+ [
+ -0.047697839012426885,
+ -0.0517060607782156
+ ],
+ [
+ 0,
+ 0
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2560,
+ "versionNonce": 957423372,
+ "isDeleted": false,
+ "id": "gEFv_XFXnFnIvMtMWGW8s",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 416.38683270923883,
+ "y": 701.4029576659674,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "transparent",
+ "width": 88.30808627974527,
+ "height": 9.797916664247975,
+ "seed": 1407616780,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": null,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 2.326538897826852,
+ 3.9056133261361587
+ ],
+ [
+ 12.359939318521995,
+ 7.182387014695761
+ ],
+ [
+ 25.710950037209347,
+ 9.166781347006062
+ ],
+ [
+ 46.6269757640547,
+ 9.347610268342288
+ ],
+ [
+ 71.03526003420632,
+ 8.084235941711592
+ ],
+ [
+ 85.2899738827162,
+ 3.4881086608341767
+ ],
+ [
+ 88.30808627974527,
+ -0.45030639590568633
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2647,
+ "versionNonce": 1935484852,
+ "isDeleted": false,
+ "id": "vt3ifycjA7xAUXWBfhMbE",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 415.2930911971775,
+ "y": 668.3288031038957,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "transparent",
+ "width": 88.30808627974527,
+ "height": 9.797916664247975,
+ "seed": 686964660,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": null,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 2.326538897826852,
+ 3.9056133261361587
+ ],
+ [
+ 12.359939318521995,
+ 7.182387014695761
+ ],
+ [
+ 25.710950037209347,
+ 9.166781347006062
+ ],
+ [
+ 46.6269757640547,
+ 9.347610268342288
+ ],
+ [
+ 71.03526003420632,
+ 8.084235941711592
+ ],
+ [
+ 85.2899738827162,
+ 3.4881086608341767
+ ],
+ [
+ 88.30808627974527,
+ -0.45030639590568633
+ ]
+ ]
+ },
+ {
+ "type": "ellipse",
+ "version": 5667,
+ "versionNonce": 1513184652,
+ "isDeleted": false,
+ "id": "ogy32Rih_TrO63WbbEXuy",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 414.1722685110156,
+ "y": 628.0881334080838,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "#fff",
+ "width": 87.65074610854188,
+ "height": 17.72670397681366,
+ "seed": 1176094092,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "type": "arrow",
+ "id": "bxuMGTzXLn7H-uBCptINx"
+ },
+ {
+ "id": "7lr5UwO6mqEbEA4WdbjS3",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "ellipse",
+ "version": 1034,
+ "versionNonce": 575017268,
+ "isDeleted": false,
+ "id": "blhO-aMedBW56hjM8iBeb",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 485.68200081159625,
+ "y": 652.6639362852167,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "#fff",
+ "width": 12.846057046979809,
+ "height": 13.941904362416096,
+ "seed": 360196404,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "ellipse",
+ "version": 1083,
+ "versionNonce": 432393228,
+ "isDeleted": false,
+ "id": "OO8WqGjES_ZtV1HgusFX3",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 485.68200081159625,
+ "y": 683.2663901442771,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "#fff",
+ "width": 12.846057046979809,
+ "height": 13.941904362416096,
+ "seed": 1013003276,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "ellipse",
+ "version": 1137,
+ "versionNonce": 659933876,
+ "isDeleted": false,
+ "id": "XYWNN5j1eug8c7TYVSHsR",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 485.68200081159625,
+ "y": 716.5271913187738,
+ "strokeColor": "#0a11d3",
+ "backgroundColor": "#fff",
+ "width": 12.846057046979809,
+ "height": 13.941904362416096,
+ "seed": 478470836,
+ "groupIds": [
+ "6bLMR27dyecCy-nls2xaX",
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 814,
+ "versionNonce": 2043177612,
+ "isDeleted": false,
+ "id": "gMXOy183cvSYG2XLH1Nps",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 523.53515625,
+ "y": 651.0625000000001,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 114,
+ "height": 75,
+ "seed": 1990988468,
+ "groupIds": [
+ "u14TNEkdqVuYB_Aj5ykEw"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "MySQL\nPostgreSQL\nSQLite",
+ "baseline": 68,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "MySQL\nPostgreSQL\nSQLite"
+ },
+ {
+ "type": "rectangle",
+ "version": 246,
+ "versionNonce": 1377935909,
+ "isDeleted": false,
+ "id": "PvIWYwKCctyqauI5pTaz2",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 437.234375,
+ "y": 395.46484375,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 214,
+ "height": 49,
+ "seed": 1003500172,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "ePp9eFiz5m2-X9DieNpXq"
+ },
+ {
+ "id": "zfmfD9pZLlP_G075cvUTv",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1662501565240,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 164,
+ "versionNonce": 2126526348,
+ "isDeleted": false,
+ "id": "ePp9eFiz5m2-X9DieNpXq",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 442.234375,
+ "y": 407.46484375,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 204,
+ "height": 25,
+ "seed": 785206068,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "REST",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "PvIWYwKCctyqauI5pTaz2",
+ "originalText": "REST"
+ },
+ {
+ "type": "rectangle",
+ "version": 486,
+ "versionNonce": 936510213,
+ "isDeleted": false,
+ "id": "p8xcTPjfpNzqdS2Bi6oAZ",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 683.826171875,
+ "y": 394.291015625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 205,
+ "height": 49,
+ "seed": 994139020,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "NCCySoN90b0h8dl252sC8",
+ "type": "text"
+ },
+ {
+ "type": "text",
+ "id": "NCCySoN90b0h8dl252sC8"
+ }
+ ],
+ "updated": 1662501577385,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 409,
+ "versionNonce": 996763148,
+ "isDeleted": false,
+ "id": "NCCySoN90b0h8dl252sC8",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 688.826171875,
+ "y": 406.291015625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 195,
+ "height": 25,
+ "seed": 415036212,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "GraphQL",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "p8xcTPjfpNzqdS2Bi6oAZ",
+ "originalText": "GraphQL"
+ },
+ {
+ "type": "arrow",
+ "version": 2380,
+ "versionNonce": 382783668,
+ "isDeleted": false,
+ "id": "7lr5UwO6mqEbEA4WdbjS3",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 454.9013510980401,
+ "y": 570.55078125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 0,
+ "height": 50.53939662172979,
+ "seed": 646336524,
+ "groupIds": [],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "W06kzPlnPRgvKtfyagn_y",
+ "focus": 0.828967721884312,
+ "gap": 8.4921875
+ },
+ "endBinding": {
+ "elementId": "ogy32Rih_TrO63WbbEXuy",
+ "focus": -0.07065063572675613,
+ "gap": 7.019406618337827
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 50.53939662172979
+ ]
+ ]
+ },
+ {
+ "type": "arrow",
+ "version": 1800,
+ "versionNonce": 1766871115,
+ "isDeleted": false,
+ "id": "zfmfD9pZLlP_G075cvUTv",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 662.849929238139,
+ "y": 334.2265625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 0,
+ "height": 42.35937499999994,
+ "seed": 740245132,
+ "groupIds": [],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662501582196,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "1zlVmmd_Y9S9Oz0S_fTUH",
+ "focus": 0.026523219960451445,
+ "gap": 6.734375
+ },
+ "endBinding": {
+ "elementId": "W06kzPlnPRgvKtfyagn_y",
+ "focus": -0.028028347263413595,
+ "gap": 4.671875000000057
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0,
+ 42.35937499999994
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 502,
+ "versionNonce": 154619883,
+ "isDeleted": false,
+ "id": "9WfekBK46yxJpRloc_OZg",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 688.9609375,
+ "y": 504.76953125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fff",
+ "width": 172,
+ "height": 39,
+ "seed": 1128027188,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "K86bABr6PKIfF1LYTHu3e"
+ }
+ ],
+ "updated": 1662501566593,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 303,
+ "versionNonce": 1588802484,
+ "isDeleted": false,
+ "id": "K86bABr6PKIfF1LYTHu3e",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 693.9609375,
+ "y": 511.76953125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 162,
+ "height": 25,
+ "seed": 1515850252,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Your Code",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "9WfekBK46yxJpRloc_OZg",
+ "originalText": "Your Code"
+ },
+ {
+ "type": "rectangle",
+ "version": 432,
+ "versionNonce": 333233835,
+ "isDeleted": false,
+ "id": "jti7SjH-dppIs0Sfz7P-6",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 461.6171875,
+ "y": 505.3828125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 172,
+ "height": 39,
+ "seed": 644178100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "QbZUz4LyUXI8NfqStLibW",
+ "type": "text"
+ },
+ {
+ "type": "text",
+ "id": "QbZUz4LyUXI8NfqStLibW"
+ }
+ ],
+ "updated": 1662501565241,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 257,
+ "versionNonce": 1325047092,
+ "isDeleted": false,
+ "id": "QbZUz4LyUXI8NfqStLibW",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 466.6171875,
+ "y": 512.3828125,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 162,
+ "height": 25,
+ "seed": 1705325708,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "sql-mapper",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "jti7SjH-dppIs0Sfz7P-6",
+ "originalText": "sql-mapper"
+ },
+ {
+ "type": "text",
+ "version": 275,
+ "versionNonce": 867285428,
+ "isDeleted": false,
+ "id": "7EfMfgnZWfNvLQtr-x3uF",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 803.3828125,
+ "y": 662.5234375,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fff",
+ "width": 100,
+ "height": 25,
+ "seed": 1646758924,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Migrations",
+ "baseline": 18,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "Migrations"
+ },
+ {
+ "type": "arrow",
+ "version": 104,
+ "versionNonce": 1312318260,
+ "isDeleted": false,
+ "id": "rQggHgShjGGXQ0hiwBQVd",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 849.7734375,
+ "y": 568.66015625,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 0.03125,
+ "height": 66.19140625,
+ "seed": 578856588,
+ "groupIds": [],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412597026,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "W06kzPlnPRgvKtfyagn_y",
+ "focus": -0.6916403151899281,
+ "gap": 6.6015625
+ },
+ "endBinding": {
+ "elementId": "4CCO4Ro-Gy5uYIPERvP7H",
+ "focus": -0.043849355501159114,
+ "gap": 1.56640625
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0.03125,
+ 66.19140625
+ ]
+ ]
+ },
+ {
+ "type": "ellipse",
+ "version": 1690,
+ "versionNonce": 361510412,
+ "isDeleted": false,
+ "id": "lFQ5wDy-KUnUz3oDzkI25",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 239.69356547871712,
+ "y": 67.47880783610327,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 21.49669286347574,
+ "height": 23.588242019607666,
+ "seed": 1348447412,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "line",
+ "version": 1660,
+ "versionNonce": 16974004,
+ "isDeleted": false,
+ "id": "JMyz9WWOhsSP478-dH8kL",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 249.34114010330978,
+ "y": 91.1380667584855,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 1.2113207534300774,
+ "height": 27.54598873639832,
+ "seed": 675653772,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -1.2113207534300774,
+ 27.54598873639832
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 1613,
+ "versionNonce": 1160877196,
+ "isDeleted": false,
+ "id": "-f4M1kqgJJ1DqIHSarxxq",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 248.18103511512425,
+ "y": 119.44818324746751,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 10.904198461697792,
+ "height": 16.79898457094717,
+ "seed": 649066036,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 10.904198461697792,
+ 16.79898457094717
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 1589,
+ "versionNonce": 264817204,
+ "isDeleted": false,
+ "id": "wTdHoAnzcbiBjRED9_-VP",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 246.67659186550435,
+ "y": 118.31986521987952,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 9.329943818404125,
+ "height": 15.60615726413436,
+ "seed": 1427683084,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -9.329943818404125,
+ 15.60615726413436
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 1566,
+ "versionNonce": 1168581388,
+ "isDeleted": false,
+ "id": "lWzkvf-pHYnTTwDCCoYUU",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 238.90401457661704,
+ "y": 96.50986296364448,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 10.592121550031646,
+ "height": 10.166772277834836,
+ "seed": 792399796,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 10.592121550031646,
+ 10.166772277834836
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 1586,
+ "versionNonce": 639198132,
+ "isDeleted": false,
+ "id": "eZyQeLBRJefnO4eEmCIOm",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 250.16954159533526,
+ "y": 106.46768276298613,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 15.26402850171518,
+ "height": 8.608699400331258,
+ "seed": 1099298188,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 15.26402850171518,
+ -8.608699400331258
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 1817,
+ "versionNonce": 1835458956,
+ "isDeleted": false,
+ "id": "nRSmHuDmrd58aq7nRjD9v",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 259.25536801028943,
+ "y": 73.70638903822018,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 18.012785949061733,
+ "height": 8.792983231273032,
+ "seed": 2049374516,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -5.551156049147498,
+ 4.156441847180949
+ ],
+ [
+ -14.767067932950507,
+ 1.7161569081851142
+ ],
+ [
+ -18.012785949061733,
+ 8.792983231273032
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2089,
+ "versionNonce": 706399540,
+ "isDeleted": false,
+ "id": "aXuUF4qv85paleGvjq1Vf",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 241.12989079109474,
+ "y": 73.78023080944443,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 7.256241079962556,
+ "height": 20.702676936887435,
+ "seed": 1523016716,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -0.7145030549373154,
+ -3.2217494351862745
+ ],
+ [
+ -3.6837258052759845,
+ -3.9328337752254474
+ ],
+ [
+ -7.256241079962556,
+ 1.6608630719016904
+ ],
+ [
+ -5.283976448978563,
+ 16.76984316166199
+ ],
+ [
+ -2.813724840310993,
+ 9.33657487598008
+ ],
+ [
+ 0,
+ 0
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 1055,
+ "versionNonce": 866832396,
+ "isDeleted": false,
+ "id": "vPmumowJzhJh6C166dtT6",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0.32340402082123276,
+ "x": 234.39442736485796,
+ "y": 90.43231788867585,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 6.043745042549293,
+ "height": 12.566983256668966,
+ "seed": 584692404,
+ "groupIds": [
+ "BigUhShJehOiqWIjcDoW9"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "ellipse",
+ "version": 2205,
+ "versionNonce": 1295189684,
+ "isDeleted": false,
+ "id": "xApIn7h3gZf3eY10bjrUb",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 299.3665088217107,
+ "y": 71.88057076815883,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 24.744967041785156,
+ "height": 22.77537981587287,
+ "seed": 1744387124,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "line",
+ "version": 2267,
+ "versionNonce": 409792140,
+ "isDeleted": false,
+ "id": "RED9kg0RWDjfRC331frur",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 310.36246414441894,
+ "y": 94.66231306364153,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 1.2141925957388797,
+ "height": 27.611295745848107,
+ "seed": 477024524,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -1.2141925957388797,
+ 27.611295745848107
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2220,
+ "versionNonce": 604508212,
+ "isDeleted": false,
+ "id": "4tmwbSvpDvV5L9eOL3w5W",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 309.318726188599,
+ "y": 122.87810392550332,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 10.93005052309211,
+ "height": 16.83881220082897,
+ "seed": 1131124148,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 10.93005052309211,
+ 16.83881220082897
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2234,
+ "versionNonce": 208092428,
+ "isDeleted": false,
+ "id": "BhFX6QUNp6ZRAaC14lRsq",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 309.2530763831227,
+ "y": 122.12933640276594,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 10.893068867789728,
+ "height": 15.119678146262912,
+ "seed": 721997708,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -10.893068867789728,
+ 15.119678146262912
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2207,
+ "versionNonce": 1048570292,
+ "isDeleted": false,
+ "id": "T5q_miW2zK8Z6_GHMzHuy",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 299.79149008152416,
+ "y": 99.89494714462973,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 9.29401757679057,
+ "height": 9.667397272649206,
+ "seed": 39239476,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 9.29401757679057,
+ 9.667397272649206
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2274,
+ "versionNonce": 1494465420,
+ "isDeleted": false,
+ "id": "7eDKh8D3tRxN9AnkQNJCH",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 309.9030355081573,
+ "y": 109.63564914257302,
+ "strokeColor": "#000000",
+ "backgroundColor": "#ced4da",
+ "width": 16.61048353051358,
+ "height": 8.347356780081856,
+ "seed": 48168460,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 16.61048353051358,
+ -8.347356780081856
+ ]
+ ]
+ },
+ {
+ "type": "line",
+ "version": 2214,
+ "versionNonce": 1069585204,
+ "isDeleted": false,
+ "id": "-MsR8pFblQ_RWCQ50tTvg",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 295.3274989522442,
+ "y": 73.87763473934886,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 35.55274923479633,
+ "height": 7.233182720600468,
+ "seed": 1562425524,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false,
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": null,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 35.55274923479633,
+ 7.233182720600468
+ ]
+ ]
+ },
+ {
+ "type": "rectangle",
+ "version": 2521,
+ "versionNonce": 145517068,
+ "isDeleted": false,
+ "id": "mxV4CeWFqORnIifYgQTpP",
+ "fillStyle": "solid",
+ "strokeWidth": 2,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0.1875122815022081,
+ "x": 303.27728681036575,
+ "y": 69.14462472277884,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fff",
+ "width": 21.279164889176773,
+ "height": 7.3403547293505,
+ "seed": 216751244,
+ "groupIds": [
+ "-ijW-sGctTCBEecgCTOPZ"
+ ],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1662412601919,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "arrow",
+ "version": 1412,
+ "versionNonce": 647641268,
+ "isDeleted": false,
+ "id": "mzCbJHPkBmRZyt8z7yCAe",
+ "fillStyle": "solid",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 282.90625,
+ "y": 142.60546875,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fa5252",
+ "width": 378.79296875,
+ "height": 102.58203125,
+ "seed": 1011840012,
+ "groupIds": [],
+ "strokeSharpness": "round",
+ "boundElements": [],
+ "updated": 1662412613395,
+ "link": null,
+ "locked": false,
+ "startBinding": null,
+ "endBinding": {
+ "elementId": "1zlVmmd_Y9S9Oz0S_fTUH",
+ "focus": 0.056278788827343405,
+ "gap": 9.078125
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 151.6953125,
+ 69.025390625
+ ],
+ [
+ 333.46484375,
+ 15.73046875
+ ],
+ [
+ 378.79296875,
+ 102.58203125
+ ]
+ ]
+ }
+ ],
+ "appState": {
+ "gridSize": null,
+ "viewBackgroundColor": "#ffffff"
+ },
+ "files": {}
+}
\ No newline at end of file
diff --git a/docs/getting-started/quick-start-guide.md b/docs/getting-started/quick-start-guide.md
new file mode 100644
index 0000000000..f207e4796f
--- /dev/null
+++ b/docs/getting-started/quick-start-guide.md
@@ -0,0 +1,228 @@
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+# Quick Start Guide
+
+In this guide you'll learn how to create and run your first API with
+Platformatic DB. Let's get started!
+
+:::info
+
+This guide uses [SQLite](https://www.sqlite.org/) for the database, but
+Platformatic DB also supports [PostgreSQL](https://www.postgresql.org/),
+[MySQL](https://www.mysql.com/) and [MariaDB](https://mariadb.org/) databases.
+
+:::
+
+## Requirements
+
+Platformatic supports macOS, Linux and Windows ([WSL](https://docs.microsoft.com/windows/wsl/) recommended).
+
+To follow along with this guide you'll need to have these things installed:
+
+- [Node.js](https://nodejs.org/) >= v16.17.0 or >= v18.8.0
+- [npm](https://docs.npmjs.com/cli/) v7 or later
+- A code editor, for example [Visual Studio Code](https://code.visualstudio.com/)
+
+## Create a new API project
+
+Create a directory for your new API project:
+
+```bash
+mkdir quick-start
+
+cd quick-start
+```
+
+Then create a `package.json` file and install the [platformatic](https://www.npmjs.com/package/platformatic)
+CLI as a project dependency:
+
+
+
+
+```bash
+npm init --yes
+
+npm install platformatic
+```
+
+
+
+
+```bash
+yarn init --yes
+
+yarn add platformatic
+```
+
+
+
+
+```bash
+pnpm init
+
+pnpm add platformatic
+```
+
+
+
+
+
+## Add a database schema
+
+In your project directory (`quick-start`), create a `migrations` directory to
+store your database migration files:
+
+```bash
+mkdir migrations
+```
+
+Then create a new migration file named **`001.do.sql`** in the **`migrations`**
+directory.
+
+Copy and paste this SQL query into the migration file:
+
+```sql title="migrations/001.do.sql"
+CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+);
+```
+
+When it's run by Platformatic, this query will create a new database table
+named `pages`.
+
+:::tip
+
+You can check syntax for SQL queries on the [Database.Guide SQL Reference](https://database.guide/sql-reference-for-beginners/).
+
+:::
+
+## Configure your API
+
+In your project directory, create a new Platformatic configuration file named
+**`platformatic.db.json`**.
+
+Copy and paste in this configuration:
+
+```json title="platformatic.db.json"
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": "3042"
+ },
+ "core": {
+ "connectionString": "sqlite://./pages.db"
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
+```
+
+This configuration tells Platformatic to:
+
+- Run an API server on `http://127.0.0.1:3042/`
+- Connect to an SQLite database stored in a file named `pages.db`
+- Look for database migration files in the `migrations` directory
+
+:::tip
+
+The [Configuration reference](/reference/configuration.md) explains all of the
+supported configuration options.
+
+:::
+
+## Start your API server
+
+In your project directory, use the Platformatic CLI to start your API server:
+
+```bash
+npx platformatic db start
+```
+
+This will:
+
+1. Run your SQL migration file and create a `pages` table in the SQLite database.
+1. Automatically map your SQL database to REST and GraphQL API interfaces.
+1. Start the Platformatic API server.
+
+Your Platformatic API is now up and running! 🌟
+
+## Next steps
+
+### Use the REST API interface
+
+You can use cURL to make requests to the REST interface of your API, for example:
+
+#### Create a new page
+
+```bash
+curl -X POST -H "Content-Type: application/json" \
+ -d "{ \"title\": \"Hello Platformatic DB\" }" \
+ http://localhost:3042/pages
+```
+
+You should receive a response from your API like this:
+
+```json
+{"id":1,"title":"Hello Platformatic DB"}
+```
+
+#### Get all pages
+
+```bash
+curl http://localhost:3042/pages
+```
+
+You should receive a response from your API like this, with an array
+containing all the pages in your database:
+
+```json
+[{"id":1,"title":"Hello Platformatic DB"}]
+```
+
+:::tip
+
+Take a look at the [REST API reference](/reference/sql-rest/introduction.md) for an
+overview of the REST interface that the API provides.
+
+:::
+
+#### Swagger OpenAPI documentation
+
+You can explore the OpenAPI documentation for your REST API in the Swagger UI at
+[http://localhost:3042/documentation](http://localhost:3042/documentation)
+
+### Use the GraphQL API interface
+
+Open [http://localhost:3042/graphiql](http://localhost:3042/graphiql) in your
+web browser to explore the GraphQL interface of your API.
+
+Try out this GraphQL query to retrieve all pages from your API:
+
+```graphql
+query {
+ pages {
+ id
+ title
+ }
+}
+```
+
+:::tip
+
+Learn more about your API's GraphQL interface in the
+[GraphQL API reference](reference/sql-graphql/introduction.md).
+
+:::
+
+
diff --git a/docs/guides/add-custom-functionality/extend-graphql.md b/docs/guides/add-custom-functionality/extend-graphql.md
new file mode 100644
index 0000000000..3d91d4bdbc
--- /dev/null
+++ b/docs/guides/add-custom-functionality/extend-graphql.md
@@ -0,0 +1,117 @@
+# Extend GraphQL Schema
+
+## Sum Function
+
+Copy and paste this code into `./sample-plugin.js` file
+
+```js
+'use strict'
+module.exports = async(app, opts) => {
+ app.graphql.extendSchema(`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ `)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+}
+```
+
+This will add a new GraphQL query called `add` which will simply add the two inputs `x` and `y` provided.
+
+You don't need to reload the server, since it will watch this file and hot-reload itself.
+Let's query the server with the following body
+
+```graphql
+
+query{
+ add(x: 1, y: 2)
+}
+
+```
+You can use `curl` command to run this query
+
+```
+$ curl --location --request POST 'http://localhost:3042/graphql' \
+--header 'Content-Type: application/json' \
+--data-raw '{"query":"query{\n add(x: 1, y: 2)\n}"}'
+```
+
+You will get this output, with the sum.
+
+```json
+{
+ "data": {
+ "add": 3
+ }
+}
+```
+
+## Extend Entities API
+
+Let's implement a `getPageByTitle` query
+
+```js
+'use strict'
+module.exports = async(app, opts) => {
+ app.graphql.extendSchema(`
+ extend type Query {
+ getPageByTitle(title: String): Page
+ }
+ `)
+ app.graphql.defineResolvers({
+ Query: {
+ getPageByTitle: async(_, { title }) => {
+ const res = await app.platformatic.entities.page.find({
+ where: {
+ title: {
+ eq: title
+ }
+ }
+ })
+ if (res) {
+ return res[0]
+ }
+ return null
+ }
+ }
+ })
+}
+```
+
+`Page` GraphQL type is already defined by Platformatic DB on start.
+
+We are going to run this code against this GraphQL query
+
+```graphql
+query{
+ getPageByTitle(title: "First Page"){
+ id
+ title
+ }
+}
+```
+
+You can use `curl` command to run this query
+```
+$ curl --location --request POST 'http://localhost:3042/graphql' \
+--header 'Content-Type: application/json' \
+--data-raw '{"query":"query{\n getPageByTitle(title: \"First Page\"){\n id\n title\n }\n}"}'
+```
+
+You will get an output similar to this
+
+```json
+{
+ "data": {
+ "getPageByTitle": {
+ "id": "1",
+ "title": "First Page"
+ }
+ }
+}
+```
+
diff --git a/docs/guides/add-custom-functionality/extend-rest.md b/docs/guides/add-custom-functionality/extend-rest.md
new file mode 100644
index 0000000000..ee65ed4571
--- /dev/null
+++ b/docs/guides/add-custom-functionality/extend-rest.md
@@ -0,0 +1,88 @@
+# Extend REST API
+
+We will follow same examples implemented in [GraphQL examples](./extend-graphql): a sum function and an API to get pages by title.
+
+## Sum Function
+
+Copy and paste this code into `./sample-plugin.js` file
+
+```js
+'use strict'
+module.exports = async(app, opts) => {
+ app.get('/sum', async(req, reply) => {
+ const { x, y } = req.body
+ return { sum: (x + y)}
+ })
+}
+```
+
+You don't need to reload the server, since it will watch this file and hot-reload itself.
+
+Let's make a `POST /sum` request to the server with the following body
+
+```json
+{
+ "x": 1,
+ "y": 2
+}
+```
+
+You can use `curl` command to run this query
+
+```
+$ curl --location --request POST 'http://localhost:3042/sum' \
+--header 'Content-Type: application/json' \
+--data-raw '{
+ "x": 1,
+ "y": 2
+}'
+```
+
+You will get this output, with the sum.
+
+```json
+{
+ "sum": 3
+}
+```
+
+## Extend Entities API
+
+Let's implement a `/page-by-title` endpoint, using Entities API
+
+```js
+'use strict'
+module.exports = async(app, opts) => {
+ app.get('/page-by-title', async(req, reply) => {
+ const res = await app.platformatic.entities.page.find({
+ where: {
+ title: {
+ eq: title
+ }
+ }
+ })
+ if (res) {
+ return res[0]
+ }
+ return null
+ })
+}
+```
+We will make a `GET /page-by-title?title=First%20Page` request, and we expect a single page as output.
+
+You can use `curl` command to run this query
+```
+$ curl --location --request GET 'http://localhost:3042/page-by-title?title=First Page'
+
+```
+
+You will get an output similar to this
+
+```json
+{
+ "id": "1",
+ "title": "First Page",
+ "body": "This is the first sample page"
+}
+```
+
diff --git a/docs/guides/add-custom-functionality/introduction.md b/docs/guides/add-custom-functionality/introduction.md
new file mode 100644
index 0000000000..3c98ee80c3
--- /dev/null
+++ b/docs/guides/add-custom-functionality/introduction.md
@@ -0,0 +1,49 @@
+# Add Custom Functionality
+
+If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard [Fastify](https://fastify.io) plugin.
+
+The config file will specify where the plugin file is located as the example below:
+
+```json
+{
+ ...
+ "plugin": {
+ "path": "./plugin/index.js"
+ }
+}
+```
+The path is relative to the config file path.
+
+Since it uses [fastify-isolate](https://github.com/mcollina/fastify-isolate) under the hood, all other options of that package may be specified under the `plugin` property.
+
+Once the config file is set up, you can write your plugin
+
+```js
+module.exports = async function (app) {
+ app.log.info('plugin loaded')
+ // Extend GraphQL Schema with resolvers
+ app.graphql.extendSchema(`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ `)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+
+ // Create a new route, see https://www.fastify.io/docs/latest/Reference/Routes/ for more info
+ app.post('/sum', (req, reply) => {
+ const {x, y} = req.body
+ return { result: x + y }
+ })
+
+ // access platformatic entities data
+ app.get('/all-entities', (req, reply) => {
+ const entities = Object.keys(app.platformatic.entities)
+ return { entities }
+ })
+}
+
+```
diff --git a/docs/guides/add-custom-functionality/prerequisites.md b/docs/guides/add-custom-functionality/prerequisites.md
new file mode 100644
index 0000000000..52eb6783fa
--- /dev/null
+++ b/docs/guides/add-custom-functionality/prerequisites.md
@@ -0,0 +1,110 @@
+# Prerequisites
+
+In the following examples we assume you already
+- cloned `platformatic/platformatic` repo from Github
+- ran `pnpm install` to install all dependencies
+- have [Docker](https://docker.io) and [`docker-compose`](https://docs.docker.com/compose/install/) installed and running on your machine
+
+## Config File
+
+Create a `platformatic.db.json` file in the root project, it will be loaded automatically by Platformatic (no need of `-c, --config` flag).
+
+```json
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 3042,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions"
+ },
+ "plugin": {
+ "path": "plugin.js"
+ }
+}
+```
+
+- Once Platformatic DB starts, its API will be available at `http://127.0.0.1:3042`
+- It will connect and read the schema from a PostgreSQL DB
+- Will read migrations from `./migrations` directory
+- Will load custom functionallity from `./plugin.js` file.
+## Database and Migrations
+
+Start the database using the sample `docker-compose.yml` file.
+
+```
+$ docker-compose up -d postgresql
+```
+
+For migrations create a `./migrations` directory and a `001.do.sql` file with following contents
+
+```sql
+CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL,
+ body TEXT NOT NULL
+);
+INSERT INTO pages (title, body) VALUES ('First Page', 'This is the first sample page');
+INSERT INTO pages (title, body) VALUES ('Second Page', 'This is the second sample page');
+INSERT INTO pages (title, body) VALUES ('Third Page', 'This is the third sample page');
+```
+
+## Plugin
+
+Copy and paste this boilerplate code into `./plugin.js` file. We will fill this in the examples.
+```js
+'use strict'
+
+module.exports = async (app, opts) {
+ // we will fill this later
+}
+```
+
+## Start the server
+
+Run
+
+```
+$ platformatic db start
+```
+
+You will get an output similar to this
+
+```
+ /////////////
+ ///// /////
+ /// ///
+ /// ///
+ /// ///
+ && /// /// &&
+ &&&&&& /// /// &&&&&&
+ &&&& /// /// &&&&
+ &&& /// /// &&&&&&&&&&&&
+ &&& /// /////// //// && &&&&&
+ && /// /////////////// &&&
+ &&& /// /// &&&
+ &&& /// // &&
+ &&& /// &&
+ &&& /// &&&
+ &&&& /// &&&
+ &&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+
+[11:19:46.562] INFO (65122): running 001.do.sql
+[11:19:46.929] INFO (65122): server listening
+ url: "http://127.0.0.1:3042"
+```
+
+Now is possible to create some examples, like [extend GraphQL Schema](./extend-graphql), [extend REST API](./extend-rest)
diff --git a/docs/guides/deployment.md b/docs/guides/deployment.md
new file mode 100644
index 0000000000..a38ec2361c
--- /dev/null
+++ b/docs/guides/deployment.md
@@ -0,0 +1,218 @@
+# Deployment
+
+Requirements:
+
+1. Dockerfile with access to `platformatic` CLI
+2. A fly.io account
+3. A platformatic app that works locally
+
+## On Fly.io
+
+1. Need a fly.io account and the CLI tool: https://fly.io/docs/hands-on/
+2. Navigate to your project on your local machine
+1. Create a **Dockerfile**:
+ ```dockerfile
+ FROM platformatic/platformatic:latest
+
+ USER root
+
+ WORKDIR /opt/
+ COPY migrations migrations
+ COPY platformatic.db.json platformatic.db.json
+
+ EXPOSE 3042
+
+ CMD ["platformatic", "db"]
+ ```
+1. Create an app on fly: `fly launch --no-deploy --generate-name --org personal --region mad`
+ * or just `fly launch` and follow the prompts
+ * if there is no database at this point, `--no-deploy` can be removed
+4. Expose the correct port, matching **platformatic.db.json** and **Dockerfile**:
+ ```diff
+ [[services]]
+ http_checks = []
+ - internal_port = 8080
+ + internal_port = 3042
+ processes = ["app"]
+ protocol = "tcp"
+ script_checks = []
+ ```
+9. Now deploy: `fly deploy`
+
+### With sqlite
+
+1. Follow steps above, skipping deployment until the end
+2. Create a volume for database storage: `fly volumes create data`
+ * will create storage in the same region as application
+ * defaults to 3GB size, use `-s` to change: `-s 10` is 10GB
+3. Update mount in **fly.toml**, replacing ``:
+ ```toml
+ [mounts]
+ source = "data"
+ destination = "/opt//.platformatic/data"
+ ```
+4. Create directory in project, this will be where the sqlite database goes:
+ ```bash
+ mkdir -p .platformatic/data
+ touch .platformatic/data/.gitkeep
+ ```
+5. Make sure sqlite databases are ignored to avoid inconsistencies in
+ deployment:
+ ```bash
+ echo "*.db" >> .gitignore
+ ```
+6. Update connection string to the sqlite database, replacing ``:
+ ```json
+ {
+ "core": {
+ "connectionString": "sqlite://.platformatic/data/.db"
+ }
+ }
+ ```
+7. Add migrations folder, migrations, and configuration. _Note_ app will not run
+ if there is a migrations folder and no migrations.
+ 1. Create folder and simple migration if not already available:
+ ```bash
+ mkdir migrations
+ echo "CREATE TABLE demo (id uuid PRIMARY KEY);" > migrations/001.do.sql
+ ```
+ 2. Update configuration:
+ ```json
+ {
+ "migrations": {
+ "dir": "./migrations"
+ }
+ }
+ ```
+8. Optionally, [add `sqlite` to the **Dockerfile** to help with debugging](#adding-sqlite-for-debugging)
+10. Deploy the app `fly deploy`
+
+#### Adding `sqlite` for debugging
+
+Create a script for launching the database, call it **db-cli.sh**:
+```bash
+#!/bin/sh
+set -x
+# DSN will be defined in the Dockerfile
+sqlite3 $DSN
+```
+
+Add the following snippet to the **Dockerfile**:
+```dockerfile
+# Setup sqlite viewer
+# Replace with your app name
+RUN apk add sqlite
+ENV DSN "/opt//.platformatic/data/demo.db"
+COPY db-cli.sh /usr/local/bin/db-cli
+RUN chmod +x /usr/local/bin/db-cli
+```
+
+With fly.io, it becomes easy to boot directly into the database by running the
+following command from the local machine:
+
+```bash
+fly ssh console -C db-cli
+```
+
+#### Adding Litestream and S3 for backups
+
+This requires an AWS account and the appropriate setup in AWS. Follow the
+[Litestream guide for configuring an AWS user](https://litestream.io/guides/s3/) and then come back here to
+integrate with Platformatic and Fly.
+
+Once AWS is setup, store the credentials on Fly:
+```bash
+fly secrets set \
+ AWS_ACCESS_KEY_ID=some-access-key \
+ AWS_SECRET_ACCESS_KEY=some-access-secret
+```
+
+Update **fly.toml** with the bucket name:
+```toml
+[env]
+ AWS_BACKUP_BUCKET = "bucket-name"
+```
+
+Configuration of Litestream will be done through the standard yaml file, create
+a **litestream.yml** file in the project with the following contents:
+```yml
+dbs:
+ # make sure to replace
+ - path: /opt//.platformatic/data/.db
+ replicas:
+ - url: s3://${AWS_BACKUP_BUCKET}
+ access-key-id: ${AWS_ACCESS_KEY_ID}
+ secret-access-key: ${AWS_SECRET_ACCESS_KEY}
+```
+
+To get automatic database replication and restoration, a small Bash script is
+used as the **Dockerfile** `CMD`:
+```bash
+#!/bin/bash
+
+if [ ! -f "$DSN" ]
+then
+ echo "Restoring database"
+ litestream restore -v "$DSN"
+fi
+
+# TODO change
+echo "Starting Litestream & application"
+litestream replicate -exec "platformatic db --config /opt//platformatic.db.json"
+```
+
+Finally, the existing Dockerfile needs a number of changes. Start with the
+Litestream base image:
+
+```dockerfile
+FROM litestream/litestream:0.3.9 AS litestream
+
+FROM registry.fly.io/platformatic-private:latest
+```
+
+Copy Litestream into the platformatic image:
+```dockerfile
+USER root
+COPY --from=litestream /usr/local/bin/litestream /usr/local/bin/litestream
+```
+
+Copy the runner and configuration:
+```dockerfile
+COPY run.sh /run.sh
+COPY litestream.yml /etc/litestream.yml
+```
+
+Last of all, run from **run.sh**:
+```dockerfile
+CMD /run.sh
+```
+
+With Litestream and the database tools, the final image should look something
+like this:
+```dockerfile
+FROM litestream/litestream:0.3.9 AS litestream
+
+FROM registry.fly.io/platformatic-private:latest
+
+USER root
+COPY --from=litestream /usr/local/bin/litestream /usr/local/bin/litestream
+
+RUN apk add sqlite bash ca-certificates curl
+
+# Set environment variables.
+ENV DSN "/opt//.platformatic/data/.db"
+COPY image/db-cli /usr/local/bin/db-cli
+RUN chmod +x /usr/local/bin/db-cli
+
+EXPOSE 3042
+
+ADD litestream.yml /etc/litestream.yml
+ADD run.sh /run.sh
+
+# Application specific files
+WORKDIR /opt/
+COPY migrations migrations
+COPY platformatic.db.json platformatic.db.json
+
+CMD /run.sh
+```
diff --git a/docs/guides/jwt-auth0.md b/docs/guides/jwt-auth0.md
new file mode 100644
index 0000000000..454b703c25
--- /dev/null
+++ b/docs/guides/jwt-auth0.md
@@ -0,0 +1,31 @@
+# Configure JWT with Auth0
+
+[Auth0](https://auth0.com/) is a powerful authentication and authorization service provider that can be integrated with Platformatic DB through [JSON Web Tokens](https://jwt.io/) (JWT) tokens.
+When a user is authenticated, Auth0 creates a JWT token with all necessary security informations and custom claims (like the `X-PLATFORMATIC-ROLE`, see [User Metadata](../reference/db-authorization/intro#user-metadata)) and signs the token.
+
+Platformatic DB needs the correct public key to verify the JWT signature.
+The fastest way is to leverage [JWKS](https://www.rfc-editor.org/rfc/rfc7517), since Auth0 exposes a [JWKS](https://www.rfc-editor.org/rfc/rfc7517) endpoint for each tenant.
+Given a Auth0 tenant's `issuer` URL, the (public) keys are accessible at `${issuer}/.well-known/jwks.json`.
+For instance, if `issuer` is: `https://dev-xxx.us.auth0.com/`, the public keys are accessible at `https://dev-xxx.us.auth0.com/.well-known/jwks.json`
+
+To configure Platformatic DB authorization to use [JWKS](https://www.rfc-editor.org/rfc/rfc7517) with Auth0, set:
+
+```json
+
+...
+"authorization": {
+ "jwt": {
+ "jwks": {
+ "allowedDomains": [
+ "https://dev-xxx.us.auth0.com/"
+ ]
+ }
+ },
+ }
+...
+
+```
+
+Note that specify `allowedDomains` is critical to correctly restrict the JWT that MUST be issued from one of the allowed domains.
+
+
diff --git a/docs/guides/seed-a-database.md b/docs/guides/seed-a-database.md
new file mode 100644
index 0000000000..831e9e32c0
--- /dev/null
+++ b/docs/guides/seed-a-database.md
@@ -0,0 +1,30 @@
+# Seed a Database
+
+A database is as useful as the data that it contains: a fresh, empty database
+isn't always the best starting point. We can add a few rows from our migrations
+using SQL, but we might need to use JavaScript from time to time.
+
+The [platformatic db seed](/reference/cli.md#seed) command allows us to run a
+script that will populate — or "seed" — our database.
+
+## Example
+
+Our seed script should export a `Function` that accepts an argument:
+an instance of [`@platformatic/sql-mapper`](/reference/sql-mapper/introduction.md).
+
+```javascript title="seed.js"
+'use strict'
+
+module.exports = async function ({ entities, db, sql }) {
+ await entities.graph.save({ input: { name: 'Hello' } })
+ await db.query(sql`
+ INSERT INTO graphs (name) VALUES ('Hello 2');
+ `)
+}
+```
+
+We can then run the seed script with the Platformatic CLI:
+
+```bash
+npx platformatic db seed seed.js
+```
diff --git a/docs/reference/cli.md b/docs/reference/cli.md
new file mode 100644
index 0000000000..149aa92920
--- /dev/null
+++ b/docs/reference/cli.md
@@ -0,0 +1,292 @@
+---
+toc_max_heading_level: 4
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+import TOCInline from '@theme/TOCInline';
+
+# Platformatic CLI
+
+## Installation and usage
+
+Install the Platformatic CLI as a dependency for your project:
+
+
+
+
+```bash
+npm install platformatic
+```
+
+
+
+
+```bash
+yarn add platformatic
+```
+
+
+
+
+```bash
+pnpm add platformatic
+```
+
+
+
+
+Once it's installed you can run it with:
+
+
+
+
+```bash
+npx platformatic
+```
+
+
+
+
+```bash
+yarn platformatic
+```
+
+
+
+
+```bash
+pnpm platformatic
+```
+
+
+
+
+:::info
+
+The `platformatic` package can be installed globally, but installing it as a
+project dependency ensures that everyone working on the project is using the
+same version of the Platformatic CLI.
+
+:::
+
+## Commands
+
+The Platformatic CLI provides the following commands:
+
+
+
+### help
+
+
+```
+Welcome to Platformatic. Available commands are:
+
+* help - Display this message
+* help - shows more information about a command.
+* db - start Platformatic DB; type `platformatic db help` to know more.
+```
+
+
+### db
+
+```bash
+platformatic db
+```
+
+
+#### help
+
+Available commands:
+
+* `help` - show this help message.
+* `help ` - shows more information about a command.
+* `init` - initiate default application.
+* `start` - start the server.
+* `migrate` - run migrations.
+* `seed` - run a seed file.
+
+
+#### init
+
+Initiate default Platformatic DB application:
+
+ $ platformatic db init
+
+As a result of executing this command, the `platformatic.db.json` configuration
+file and the `migrations` folder with migration examples will be generated.
+
+Options:
+
+ * `-h, --hostname `: The hostname where Platformatic DB server will listen for connections.
+ * `-p, --port `: The port where Platformatic DB server will listen for connections.
+ * `-db, --database `: The name of the database to use. Default: `sqlite`.
+ * `-m, --migrations `: Relative path to the migrations folder. Default: `./migrations`.
+ * `-t, --types `: Set true to enable type autogeneration. Default: `true`.
+
+
+#### migrate
+
+Apply all configurated migrations to the database:
+
+ $ platformatic db migrate
+
+The migrations will be applied in the order they are specified in the
+folder defined in the configuration file. If you want to apply a specific migration,
+you can use the `--to` option:
+
+ $ platformatic db migrate --to 001
+
+Here is an example migration:
+
+ CREATE TABLE graphs (
+ id SERIAL PRIMARY KEY,
+ name TEXT
+ );
+
+You can always rollback to a specific migration with:
+
+ $ platformatic db migrate --to VERSION
+
+Use 000 to reset to the initial state.
+
+Options:
+
+ * `-c, --config `: Path to the configuration file.
+ * `-t, --to `: Migrate to a specific version.
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
+
+
+#### schema
+
+Generate a schema from the database and prints it to standard output:
+
+* `schema graphql` - generate the GraphQL schema
+* `schema openapi` - generate the OpenAPI schema
+
+Options:
+
+ -c, --config FILE Specify a configuration file to use
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
+
+
+#### seed
+
+Load a seed into the database. This is a convenience method that loads
+a JavaScript file and configure @platformatic/sql-mapper to connect to
+the database specified in the configuration file.
+
+Here is an example of a seed file:
+
+ 'use strict'
+
+ module.exports = async function ({ entities, db, sql }) {
+ await entities.graph.save({ input: { name: 'Hello' } })
+ await db.query(sql`
+ INSERT INTO graphs (name) VALUES ('Hello 2');
+ `)
+ }
+
+You can run this using the `seed` command:
+
+ $ platformatic db seed seed.js
+
+Options:
+
+ * `--config` - Path to the configuration file.
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
+
+
+#### start
+
+Start the Platformatic DB server with the following command:
+
+ $ platformatic db start
+
+You will need a configuration file. Here is an example to get you started,
+save the following as `platformatic.db.json`:
+
+ {
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+ }
+
+
+Remeber to create a migration, run the `db help migrate` command to know more.
+
+All outstanding migrations will be applied to the database unless the
+`migrations.autoApply` configuration option is set to false.
+
+By sending the SIGUSR2 signal, the server can be reloaded.
+
+Options:
+
+ -c, --config FILE Specify a configuration file to use
+ --watch-ignore LIST Specify a comma separated list of glob patterns to
+ ignore when watching for changes
+
+If not specified, the configuration specified will be loaded from `platformatic.db.json`,
+`platformatic.db.yml`, or `platformatic.db.tml` in the current directory. You can find more details about
+the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
+
+
+#### types
+
+Generate typescript types for your entities from the database.
+
+ $ platformatic db types
+
+As a result of executing this command, the Platformatic DB will generate a `types`
+folder with a typescript file for each database entity. It will also generate a
+`global.d.ts` file that injects the types into the Application instance.
+
+In order to add type support to your plugins, you need to install some additional
+dependencies. To do this, copy and run an `npm install` command with dependencies
+that "platformatic db types" will ask you.
+
+Here is an example of a platformatic plugin.js with jsdoc support.
+You can use it to add autocomplete to your code.
+
+///
+'use strict'
+
+/** @param {import('fastify').FastifyInstance} app */
+module.exports = async function (app) {
+ app.get('/movie', async () => {
+ const movies = await app.platformatic.entities.movie.find({
+ where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
+ })
+ return movies[0].id
+ })
+}
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
+
+
\ No newline at end of file
diff --git a/docs/reference/configuration.md b/docs/reference/configuration.md
new file mode 100644
index 0000000000..132b5e6769
--- /dev/null
+++ b/docs/reference/configuration.md
@@ -0,0 +1,391 @@
+# Configuration
+
+Platformatic DB is configured with a configuration file. It supports the use
+of environment variables as setting values with [configuration placeholders](#configuration-placeholders).
+
+## Configuration file
+
+If the Platformatic CLI finds a file in the current working directory matching
+one of these filenames, it will automatically load it:
+
+- `platformatic.db.json`
+- `platformatic.db.json5`
+- `platformatic.db.yml` or `platformatic.db.yaml`
+- `platformatic.db.tml`
+
+Alternatively, a [`--config` option](/reference/cli.md#db) with a configuration
+filepath can be passed to most `platformatic db` CLI commands.
+
+The configuration examples in this reference use JSON.
+
+### Supported formats
+
+| Format | Extensions |
+| :-- | :-- |
+| JSON | `.json` |
+| JSON5 | `.json5` |
+| YAML | `.yml`, `.yaml` |
+| TOML | `.tml` |
+
+Comments are supported by the JSON5, YAML and TOML file formats.
+
+## Settings
+
+Configuration settings are organised into the following groups:
+
+- [`core`](#core) **(required)**
+- [`dashboard`](#dashboard)
+- [`metrics`](#metrics)
+- [`migrations`](#migrations)
+- [`plugin`](#plugin)
+- [`server`](#server) **(required)**
+- [`authorization`](#authorization)
+
+Sensitive configuration settings, such as a database connection URL that contains
+a password, should be set using [configuration placeholders](#configuration-placeholders).
+
+### `core`
+
+A **required** object with the following settings:
+
+- **`connectionString`** (**required**, `string`) — Database connection URL.
+ - Example: `postgres://user:password@my-database:5432/db-name`
+ - Platformatic DB supports MySQL, MariaDB, PostgreSQL and SQLite.
+- **`graphql`** (`boolean` or `object`, default: `true`) — Controls the GraphQL API interface, with optional GraphiQL UI.
+
+ _Examples_
+
+ Enables GraphQL support
+
+ ```json
+ {
+ "core": {
+ ...
+ "graphql": true
+ }
+ }
+ ```
+
+ Enables GraphQL support with GraphiQL
+
+ ```json
+ {
+ "core": {
+ ...
+ "graphql": {
+ "graphiql": true
+ }
+ }
+ }
+ ```
+- **`openapi`** (`boolean` or `object`, default: `true`) — Enables OpenAPI REST support.
+ - If value is an object, all [OpenAPI v3](https://swagger.io/specification/) allowed properties can be passed.
+ - Platformatic DB uses [`@fastify/swagger`](https://github.com/fastify/fastify-swagger) under the hood to manage this configuration.
+
+ _Examples_
+
+ Enables OpenAPI
+
+ ```json
+ {
+ "core": {
+ ...
+ "openapi": true
+ }
+ }
+ ```
+
+ Enables OpenAPI with options
+
+ ```json
+ {
+ "core": {
+ ...
+ "openapi": {
+ "info": {
+ "title": "Platformatic DB",
+ "description": "Exposing a SQL database as REST"
+ }
+ }
+ }
+ }
+ ```
+- **`ignore`** (`object`) — Key/value object that defines which database tables should not be mapped as API entities.
+
+ _Examples_
+
+ ```json
+ {
+ "core": {
+ ...
+ "ignore": {
+ "versions": true // "versions" table will be not mapped with GraphQL/REST APIs
+ }
+ }
+ }
+ ```
+
+### `dashboard`
+
+An optional object with the following settings:
+
+- **`rootPath`** (`boolean`, default: `true`) — Make the dashboard available at the root path (`/`).
+
+### `metrics`
+
+Configuration for a [Prometheus](https://prometheus.io/) server that will export monitoring metrics
+for the current server instance. It uses [`fastify-metrics`](https://github.com/SkeLLLa/fastify-metrics)
+under the hood.
+
+This setting can be a `boolean` or an `object`. If set to `true` the Prometheus server will listen on `http://0.0.0.0:9090`.
+
+Supported object properties:
+
+- **`hostname`** (`string`) — The hostname where Prometheus server will listen for connections.
+- **`port`** (`number`) — The port where Prometheus server will listen for connections.
+- **`auth`** (`object`) — Basic Auth configuration. **`username`** and **`password`** are required here
+ (use [environment variables](#environment-variables)).
+
+### `migrations`
+
+Configures [Postgrator](https://github.com/rickbergfalk/postgrator) to run migrations against the database.
+
+An optional object with the following settings:
+
+- **`dir`** (**required**, `string`): Relative path to the migrations directory.
+- **`autoApply`** (`boolean`, default: `true`): Automatically apply migrations when Platformatic DB server starts.
+
+### `plugin`
+
+An optional object that defines a plugin to be loaded with [`fastify-isolate`](https://github.com/mcollina/fastify-isolate):
+
+- **`path`** (**required**, `string`): Relative path to plugin's entry point.
+
+All properties will be passed to `fastify-isolate`.
+
+### `server`
+
+A **required** object with the following settings:
+
+- **`hostname`** (**required**, `string`) — Hostname where Platformatic DB server will listen for connections.
+- **`port`** (**required**, `number`) — Port where Platformatic DB server will listen for connections.
+- **`healthCheck`** (`boolean` or `object`) — Enables the health check endpoint.
+ - Powered by [`@fastify/under-pressure`](https://github.com/fastify/under-pressure).
+ - The value can be an object, used to specify the interval between checks in milliseconds (default: `5000`)
+
+ _Example_
+
+ ```json
+ {
+ "server": {
+ ...
+ "healthCheck": {
+ "interval": 2000
+ }
+ }
+ }
+ ```
+- **`cors`** (`object`) — Configuration for Cross-Origin Resource Sharing (CORS) headers.
+ - All options will be passed to the [`@fastify/cors`](https://github.com/fastify/fastify-cors) plugin.
+
+### `authorization`
+
+Authorization settings can be set with an optional `authorization` object, for example:
+
+```json
+ "authorization": {
+ "adminSecret": "platformatic",
+ "rules": [
+ ...
+ ]
+ }
+```
+
+- **`adminSecret`** (`string`, optional) — If defined, it will be the password used to access the dashboard and the string to send within the `x-platformatic-admin-secret` header when performing GraphQL/REST API calls.
+- **`rules`** (`array`) — Authorization rules that describe the CRUD actions that users are allowed to perform.
+
+Note that if an `authorization` section is present, but _**no rules**_ are specified, no CRUD operations are allowed (unless `adminSecret` is passed).
+
+#### Authorization rules
+
+Every rule must specify:
+- `role` — the role name. It's a string and must match with the role(s) set by the external authentication service
+- `entity` — the Platformatic DB entity
+- A set of optional [`defaults`](#defaults)
+- One entry for each supported CRUD operation: `find`, `save`, `delete`
+
+#### Operation options
+
+Every operation can specify `checks` used for the authorizations.
+This value can be `false` (operation disabled) or `true` (operation enabled with no checks).
+
+To specify more fine-grained authorization controls, add a `checks` field, e.g.:
+
+```json
+{
+ "role": "user",
+ "entity": "page",
+ "find": {
+ "checks": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ }
+ },
+ ...
+}
+
+```
+
+In this example, when a user with a `user` role executes a `findPage`, they can
+access all the data that has `userId` equal to the value in user metadata with
+key `X-PLATFORMATIC-USER-ID`.
+
+Note that `"userId": "X-PLATFORMATIC-USER-ID"` is syntactic sugar for:
+
+```json
+ "find": {
+ "checks": {
+ "userId": {
+ "eq": "X-PLATFORMATIC-USER-ID"
+ }
+ }
+ }
+```
+
+It's possible to specify more complex rules using all the [supported where clause operators](./sql-mapper/entities/api.md#where-clause).
+
+Note that `userId` MUST exist as a field in the database table to use this feature.
+
+#### Fields
+
+If a `fields` array is present on an operation, Platformatic DB restricts the columns on which the user can execute to that list.
+For `save` operations, the configuration must specify all the not-nullable fields (otherwise, it would fail at runtime).
+Platformatic does these checks at startup.
+
+Example:
+
+```json
+ "rule": {
+ "entity": "page",
+ "role": "user",
+ "find": {
+ "checks": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ },
+ "fields": ["id", "title"]
+ }
+ ...
+ }
+```
+
+In this case, only `id` and `title` are returned for a user with a `user` role on the `page` entity.
+
+#### Defaults
+
+Defaults are used in database insert and are default fields added automatically populated from user metadata, e.g.:
+
+```json
+ "defaults": {
+ "userId": "X-PLATFORMATIC-USER-ID"
+ },
+```
+
+When an entity is created, the `userId` column is used and populated using the value from user metadata.
+
+#### Anonymous role
+
+If a user has no role, the `anonymous` role is assigned automatically. It's possible to specify a rule for it:
+
+```json
+ {
+ "role": "anonymous",
+ "entity": "page",
+ "find": false,
+ "delete": false,
+ "save": false
+ }
+```
+
+In this case, the user that has no role (or has an explicitly `anonymous` role) has no operations allowed on the `page` entity.
+
+#### Role and anonymous keys
+
+The roles key in user metadata defaults to `X-PLATFORMATIC-ROLE`. It's possible to change it using the `roleKey` field in configuration.
+Same for the `anonymous` role, which value can be changed using `anonymousRole`.
+
+```json
+ "authorization": {
+ "roleKey": "X-MYCUSTOM-ROLE_KEY",
+ "anonymousRole": "anonym",
+ "rules": [
+ ...
+ ]
+ }
+```
+
+## Configuration placeholders
+
+The value for any configuration setting can be replaced with an environment variable
+by adding a placeholder in the configuration file, for example `{PLT_SERVER_LOGGER_LEVEL}`.
+
+All placeholders in a configuration must be available as an environment variable
+and must meet the [allowed placeholder name](#allowed-placeholder-names) rules.
+
+### Example
+
+```json title="platformatic.db.json"
+{
+ "core": {
+ "logger": {
+ "level": "{PLT_SERVER_LOGGER_LEVEL}"
+ },
+ "connectionString": "{DATABASE_URL}"
+ },
+ "server": {
+ "port": "{PORT}"
+ }
+}
+```
+
+Platformatic will replace the placeholders in this example with the environment
+variables of the same name.
+
+### Setting environment variables
+
+If a `.env` file exists it will automatically be loaded by Platformatic using
+[`dotenv`](https://github.com/motdotla/dotenv). For example:
+
+```plaintext title=".env"
+PLT_SERVER_LOGGER_LEVEL=info
+PORT=8080
+```
+
+The `.env` file must be located in the same folder as the Platformatic configuration
+file or in the current working directory.
+
+Environment variables can also be set directly on the commmand line, for example:
+
+```bash
+PLT_SERVER_LOGGER_LEVEL=debug npx platformatic db
+```
+
+### Allowed placeholder names
+
+Only placeholder names prefixed with `PLT_`, or that are in this allow list, will be
+dynamically replaced in the configuration file:
+
+- `PORT`
+- `DATABASE_URL`
+
+This restriction is to avoid accidentally exposing system environment variables.
+An error will be raised by Platformatic if it finds a configuration placeholder
+that isn't allowed.
+
+The default allow list can be extended by passing a `--allow-env` CLI option with a
+comma separated list of strings, for example:
+
+```bash
+npx platformatic db --allow-env=HOST,SERVER_LOGGER_LEVEL
+```
+
+If `--allow-env` is passed as an option to the CLI, it will be merged with the
+default allow list.
diff --git a/docs/reference/db-authorization/images/http.png b/docs/reference/db-authorization/images/http.png
new file mode 100644
index 0000000000..f5221d759d
Binary files /dev/null and b/docs/reference/db-authorization/images/http.png differ
diff --git a/docs/reference/db-authorization/images/jwt.png b/docs/reference/db-authorization/images/jwt.png
new file mode 100644
index 0000000000..d1ff1a5efc
Binary files /dev/null and b/docs/reference/db-authorization/images/jwt.png differ
diff --git a/docs/reference/db-authorization/images/sources/http.excalidraw b/docs/reference/db-authorization/images/sources/http.excalidraw
new file mode 100644
index 0000000000..494e2ce0f6
--- /dev/null
+++ b/docs/reference/db-authorization/images/sources/http.excalidraw
@@ -0,0 +1,242 @@
+{
+ "type": "excalidraw",
+ "version": 2,
+ "source": "https://excalidraw.com",
+ "elements": [
+ {
+ "type": "rectangle",
+ "version": 406,
+ "versionNonce": 1629382156,
+ "isDeleted": false,
+ "id": "noa4LNz0zVFUkmX-gwJAz",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 533,
+ "y": 428,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 196.99999999999997,
+ "height": 76.99999999999997,
+ "seed": 332847037,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663655152807,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 231,
+ "versionNonce": 1401306164,
+ "isDeleted": false,
+ "id": "ohuP_nuHVS465PdPhUIMh",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 551,
+ "y": 437,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 162,
+ "height": 25,
+ "seed": 561555738,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663655147358,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "PLatformaticDB",
+ "baseline": 18,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "PLatformaticDB"
+ },
+ {
+ "type": "rectangle",
+ "version": 140,
+ "versionNonce": 1665950348,
+ "isDeleted": false,
+ "id": "rH-WPCYb6lBrRmg5JLygM",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 555,
+ "y": 215,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 136,
+ "height": 81,
+ "seed": 775227226,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "_ta3Rw_skN-LWmDow8g30"
+ },
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ },
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663655144744,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 98,
+ "versionNonce": 269081652,
+ "isDeleted": false,
+ "id": "_ta3Rw_skN-LWmDow8g30",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 560,
+ "y": 243,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 126,
+ "height": 25,
+ "seed": 71496198,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663655144744,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "App",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "rH-WPCYb6lBrRmg5JLygM",
+ "originalText": "App"
+ },
+ {
+ "type": "arrow",
+ "version": 504,
+ "versionNonce": 202873396,
+ "isDeleted": false,
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 616.1963603621963,
+ "y": 297.81649934860275,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 1.8922134556368064,
+ "height": 123.18350065139725,
+ "seed": 1918207706,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663655153198,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "rH-WPCYb6lBrRmg5JLygM",
+ "focus": 0.08967398473896379,
+ "gap": 1.816499348602747
+ },
+ "endBinding": {
+ "elementId": "noa4LNz0zVFUkmX-gwJAz",
+ "focus": -0.18058859061132515,
+ "gap": 7
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -1.8922134556368064,
+ 123.18350065139725
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 303,
+ "versionNonce": 1013336204,
+ "isDeleted": false,
+ "id": "z25WF_vFoaIV4INU4t7tc",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 618.5,
+ "y": 325,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 367,
+ "height": 40,
+ "seed": 50865242,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663655193198,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "HTTP Request\n X-PLATFORMATIC-ADMIN-SECRET: mysecret",
+ "baseline": 34,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "HTTP Request\n X-PLATFORMATIC-ADMIN-SECRET: mysecret"
+ }
+ ],
+ "appState": {
+ "gridSize": null,
+ "viewBackgroundColor": "#ffffff"
+ },
+ "files": {}
+}
\ No newline at end of file
diff --git a/docs/reference/db-authorization/images/sources/jwt.excalidraw b/docs/reference/db-authorization/images/sources/jwt.excalidraw
new file mode 100644
index 0000000000..62ba32b7c0
--- /dev/null
+++ b/docs/reference/db-authorization/images/sources/jwt.excalidraw
@@ -0,0 +1,477 @@
+{
+ "type": "excalidraw",
+ "version": 2,
+ "source": "https://excalidraw.com",
+ "elements": [
+ {
+ "type": "rectangle",
+ "version": 276,
+ "versionNonce": 619523398,
+ "isDeleted": false,
+ "id": "noa4LNz0zVFUkmX-gwJAz",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 444,
+ "y": 458,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 196.99999999999997,
+ "height": 76.99999999999997,
+ "seed": 332847037,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663576978685,
+ "link": null,
+ "locked": false
+ },
+ {
+ "id": "ohuP_nuHVS465PdPhUIMh",
+ "type": "text",
+ "x": 462,
+ "y": 467,
+ "width": 162,
+ "height": 25,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 561555738,
+ "version": 153,
+ "versionNonce": 449126534,
+ "isDeleted": false,
+ "boundElements": [
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663576887697,
+ "link": null,
+ "locked": false,
+ "text": "PLatformaticDB",
+ "fontSize": 20,
+ "fontFamily": 1,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "baseline": 18,
+ "containerId": null,
+ "originalText": "PLatformaticDB"
+ },
+ {
+ "id": "Scfqm_vUdY5oyu3yerPKO",
+ "type": "rectangle",
+ "x": 759,
+ "y": 284,
+ "width": 177,
+ "height": 79,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fab005",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 1696170074,
+ "version": 156,
+ "versionNonce": 1223669062,
+ "isDeleted": false,
+ "boundElements": [
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663576872846,
+ "link": null,
+ "locked": false
+ },
+ {
+ "id": "mpvHgBxJx9tZFSsUN0jJ9",
+ "type": "text",
+ "x": 776,
+ "y": 294,
+ "width": 141,
+ "height": 50,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fab005",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 1285827398,
+ "version": 108,
+ "versionNonce": 1191553562,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576872847,
+ "link": null,
+ "locked": false,
+ "text": "Authentication\nService",
+ "fontSize": 20,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 43,
+ "containerId": null,
+ "originalText": "Authentication\nService"
+ },
+ {
+ "id": "rH-WPCYb6lBrRmg5JLygM",
+ "type": "rectangle",
+ "x": 443,
+ "y": 281,
+ "width": 136,
+ "height": 81,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 775227226,
+ "version": 106,
+ "versionNonce": 1343463238,
+ "isDeleted": false,
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "_ta3Rw_skN-LWmDow8g30"
+ },
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ },
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663576973604,
+ "link": null,
+ "locked": false
+ },
+ {
+ "id": "_ta3Rw_skN-LWmDow8g30",
+ "type": "text",
+ "x": 448,
+ "y": 309,
+ "width": 126,
+ "height": 25,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 71496198,
+ "version": 64,
+ "versionNonce": 602759194,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576973604,
+ "link": null,
+ "locked": false,
+ "text": "App",
+ "fontSize": 20,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "baseline": 18,
+ "containerId": "rH-WPCYb6lBrRmg5JLygM",
+ "originalText": "App"
+ },
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow",
+ "x": 580,
+ "y": 322.58970381012875,
+ "width": 176,
+ "height": 0.045892196853628775,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 2082570438,
+ "version": 162,
+ "versionNonce": 440290522,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576973605,
+ "link": null,
+ "locked": false,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 176,
+ -0.045892196853628775
+ ]
+ ],
+ "lastCommittedPoint": null,
+ "startBinding": {
+ "elementId": "rH-WPCYb6lBrRmg5JLygM",
+ "focus": 0.027317205762435445,
+ "gap": 1
+ },
+ "endBinding": {
+ "elementId": "Scfqm_vUdY5oyu3yerPKO",
+ "focus": 0.02479683267347364,
+ "gap": 3
+ },
+ "startArrowhead": null,
+ "endArrowhead": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow",
+ "x": 754,
+ "y": 345.2974248777258,
+ "width": 167,
+ "height": 0.01993352314536878,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 1257156506,
+ "version": 109,
+ "versionNonce": 424911258,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576973605,
+ "link": null,
+ "locked": false,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -167,
+ 0.01993352314536878
+ ]
+ ],
+ "lastCommittedPoint": null,
+ "startBinding": {
+ "elementId": "Scfqm_vUdY5oyu3yerPKO",
+ "focus": -0.5509279555750403,
+ "gap": 5
+ },
+ "endBinding": {
+ "elementId": "rH-WPCYb6lBrRmg5JLygM",
+ "focus": 0.5881890326731964,
+ "gap": 8
+ },
+ "startArrowhead": null,
+ "endArrowhead": "arrow"
+ },
+ {
+ "id": "S5RW71vc9rVAx-OPnKlmY",
+ "type": "text",
+ "x": 608,
+ "y": 352.5,
+ "width": 120,
+ "height": 60,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 1521101082,
+ "version": 74,
+ "versionNonce": 1916745414,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576965379,
+ "link": null,
+ "locked": false,
+ "text": "JWT with \nuser metadata\nas claims",
+ "fontSize": 16,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 54,
+ "containerId": null,
+ "originalText": "JWT with \nuser metadata\nas claims"
+ },
+ {
+ "id": "C8QE_cwEIv2-LVJ1wekzV",
+ "type": "text",
+ "x": 595,
+ "y": 300.5,
+ "width": 114,
+ "height": 20,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 833074458,
+ "version": 20,
+ "versionNonce": 1641523590,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576872847,
+ "link": null,
+ "locked": false,
+ "text": "Authentication",
+ "fontSize": 16,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 14,
+ "containerId": null,
+ "originalText": "Authentication"
+ },
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow",
+ "x": 492.71712730597335,
+ "y": 370,
+ "width": 0.795498539080711,
+ "height": 86,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 1918207706,
+ "version": 129,
+ "versionNonce": 822011738,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663576977549,
+ "link": null,
+ "locked": false,
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0.795498539080711,
+ 86
+ ]
+ ],
+ "lastCommittedPoint": null,
+ "startBinding": {
+ "elementId": "z25WF_vFoaIV4INU4t7tc",
+ "focus": 1.3591750325698646,
+ "gap": 15.28287269402665
+ },
+ "endBinding": {
+ "elementId": "ohuP_nuHVS465PdPhUIMh",
+ "focus": -0.6074045497785253,
+ "gap": 11
+ },
+ "startArrowhead": null,
+ "endArrowhead": "arrow"
+ },
+ {
+ "id": "z25WF_vFoaIV4INU4t7tc",
+ "type": "text",
+ "x": 508,
+ "y": 383,
+ "width": 82,
+ "height": 40,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 50865242,
+ "version": 97,
+ "versionNonce": 961497754,
+ "isDeleted": false,
+ "boundElements": [
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663576977194,
+ "link": null,
+ "locked": false,
+ "text": "Request \nwith JWT ",
+ "fontSize": 16,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 34,
+ "containerId": null,
+ "originalText": "Request \nwith JWT "
+ }
+ ],
+ "appState": {
+ "gridSize": null,
+ "viewBackgroundColor": "#ffffff"
+ },
+ "files": {}
+}
\ No newline at end of file
diff --git a/docs/reference/db-authorization/images/sources/webhook.excalidraw b/docs/reference/db-authorization/images/sources/webhook.excalidraw
new file mode 100644
index 0000000000..cb3a97888d
--- /dev/null
+++ b/docs/reference/db-authorization/images/sources/webhook.excalidraw
@@ -0,0 +1,481 @@
+{
+ "type": "excalidraw",
+ "version": 2,
+ "source": "https://excalidraw.com",
+ "elements": [
+ {
+ "type": "rectangle",
+ "version": 279,
+ "versionNonce": 2065391204,
+ "isDeleted": false,
+ "id": "noa4LNz0zVFUkmX-gwJAz",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 444,
+ "y": 458,
+ "strokeColor": "#000000",
+ "backgroundColor": "#40c057",
+ "width": 196.99999999999997,
+ "height": 76.99999999999997,
+ "seed": 332847037,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ },
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663577229461,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 154,
+ "versionNonce": 1570449884,
+ "isDeleted": false,
+ "id": "ohuP_nuHVS465PdPhUIMh",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 462,
+ "y": 467,
+ "strokeColor": "#000000",
+ "backgroundColor": "transparent",
+ "width": 162,
+ "height": 25,
+ "seed": 561555738,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663577229461,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "PLatformaticDB",
+ "baseline": 18,
+ "textAlign": "left",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "PLatformaticDB"
+ },
+ {
+ "type": "rectangle",
+ "version": 220,
+ "versionNonce": 1013576164,
+ "isDeleted": false,
+ "id": "Scfqm_vUdY5oyu3yerPKO",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 852,
+ "y": 461,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fab005",
+ "width": 177,
+ "height": 79,
+ "seed": 1696170074,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663577229461,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 152,
+ "versionNonce": 215060060,
+ "isDeleted": false,
+ "id": "mpvHgBxJx9tZFSsUN0jJ9",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 896,
+ "y": 476,
+ "strokeColor": "#000000",
+ "backgroundColor": "#fab005",
+ "width": 79,
+ "height": 25,
+ "seed": 1285827398,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577229461,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "Webhook",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "Webhook"
+ },
+ {
+ "type": "rectangle",
+ "version": 107,
+ "versionNonce": 228201828,
+ "isDeleted": false,
+ "id": "rH-WPCYb6lBrRmg5JLygM",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 443,
+ "y": 281,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 136,
+ "height": 81,
+ "seed": 775227226,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [
+ {
+ "type": "text",
+ "id": "_ta3Rw_skN-LWmDow8g30"
+ },
+ {
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "type": "arrow"
+ },
+ {
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "type": "arrow"
+ },
+ {
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "type": "arrow"
+ }
+ ],
+ "updated": 1663577229461,
+ "link": null,
+ "locked": false
+ },
+ {
+ "type": "text",
+ "version": 65,
+ "versionNonce": 1086808796,
+ "isDeleted": false,
+ "id": "_ta3Rw_skN-LWmDow8g30",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 1,
+ "opacity": 100,
+ "angle": 0,
+ "x": 448,
+ "y": 309,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 126,
+ "height": 25,
+ "seed": 71496198,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577229462,
+ "link": null,
+ "locked": false,
+ "fontSize": 20,
+ "fontFamily": 1,
+ "text": "App",
+ "baseline": 18,
+ "textAlign": "center",
+ "verticalAlign": "middle",
+ "containerId": "rH-WPCYb6lBrRmg5JLygM",
+ "originalText": "App"
+ },
+ {
+ "type": "arrow",
+ "version": 349,
+ "versionNonce": 469354724,
+ "isDeleted": false,
+ "id": "1zdLFo52d2tDStweg0KBc",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 648,
+ "y": 484.6827433082138,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 197.0000000000001,
+ "height": 2.3530074335465656,
+ "seed": 2082570438,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577229462,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "noa4LNz0zVFUkmX-gwJAz",
+ "focus": -0.2660804905891414,
+ "gap": 7
+ },
+ "endBinding": {
+ "elementId": "Scfqm_vUdY5oyu3yerPKO",
+ "focus": 0.47614236399229914,
+ "gap": 6.999999999999886
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 197.0000000000001,
+ -2.3530074335465656
+ ]
+ ]
+ },
+ {
+ "type": "arrow",
+ "version": 297,
+ "versionNonce": 269395804,
+ "isDeleted": false,
+ "id": "39WfYBHJ7EpIkQxsksqi1",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 848,
+ "y": 507.4623099819273,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 202.32136797682188,
+ "height": 1.5376900180726807,
+ "seed": 1257156506,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577229462,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "Scfqm_vUdY5oyu3yerPKO",
+ "focus": -0.1558098047944271,
+ "gap": 4
+ },
+ "endBinding": {
+ "elementId": "noa4LNz0zVFUkmX-gwJAz",
+ "focus": 0.33846236660687795,
+ "gap": 4.678632023178125
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ -202.32136797682188,
+ 1.5376900180726807
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 145,
+ "versionNonce": 991366244,
+ "isDeleted": false,
+ "id": "S5RW71vc9rVAx-OPnKlmY",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 661.5,
+ "y": 525.5,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 169,
+ "height": 60,
+ "seed": 1521101082,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577229462,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "HTTP Response with \nuser metadata\nas headers",
+ "baseline": 54,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "HTTP Response with \nuser metadata\nas headers"
+ },
+ {
+ "type": "arrow",
+ "version": 266,
+ "versionNonce": 529173724,
+ "isDeleted": false,
+ "id": "_1K3dDelq8vt8hUVTGlVb",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 509.48826059466296,
+ "y": 363.81649934860275,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 0.7268426180534107,
+ "height": 94.18350065139725,
+ "seed": 1918207706,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577285155,
+ "link": null,
+ "locked": false,
+ "startBinding": {
+ "elementId": "rH-WPCYb6lBrRmg5JLygM",
+ "focus": 0.026910262236472773,
+ "gap": 1.816499348602747
+ },
+ "endBinding": {
+ "elementId": "ohuP_nuHVS465PdPhUIMh",
+ "focus": -0.40222436641659615,
+ "gap": 9
+ },
+ "lastCommittedPoint": null,
+ "startArrowhead": null,
+ "endArrowhead": "arrow",
+ "points": [
+ [
+ 0,
+ 0
+ ],
+ [
+ 0.7268426180534107,
+ 94.18350065139725
+ ]
+ ]
+ },
+ {
+ "type": "text",
+ "version": 158,
+ "versionNonce": 635135460,
+ "isDeleted": false,
+ "id": "z25WF_vFoaIV4INU4t7tc",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "angle": 0,
+ "x": 523.5,
+ "y": 389,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "width": 117,
+ "height": 40,
+ "seed": 50865242,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "boundElements": [],
+ "updated": 1663577251066,
+ "link": null,
+ "locked": false,
+ "fontSize": 16,
+ "fontFamily": 1,
+ "text": "HTTP Request\n ",
+ "baseline": 34,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "containerId": null,
+ "originalText": "HTTP Request\n "
+ },
+ {
+ "id": "W9rc6or6W35UphQ2A7HV-",
+ "type": "text",
+ "x": 705,
+ "y": 452,
+ "width": 54,
+ "height": 20,
+ "angle": 0,
+ "strokeColor": "#000000",
+ "backgroundColor": "#e64980",
+ "fillStyle": "hachure",
+ "strokeWidth": 1,
+ "strokeStyle": "solid",
+ "roughness": 0,
+ "opacity": 100,
+ "groupIds": [],
+ "strokeSharpness": "sharp",
+ "seed": 972120284,
+ "version": 29,
+ "versionNonce": 950413532,
+ "isDeleted": false,
+ "boundElements": null,
+ "updated": 1663577268388,
+ "link": null,
+ "locked": false,
+ "text": "POST ",
+ "fontSize": 16,
+ "fontFamily": 1,
+ "textAlign": "center",
+ "verticalAlign": "top",
+ "baseline": 14,
+ "containerId": null,
+ "originalText": "POST "
+ }
+ ],
+ "appState": {
+ "gridSize": null,
+ "viewBackgroundColor": "#ffffff"
+ },
+ "files": {}
+}
\ No newline at end of file
diff --git a/docs/reference/db-authorization/images/webhook.png b/docs/reference/db-authorization/images/webhook.png
new file mode 100644
index 0000000000..a22b723583
Binary files /dev/null and b/docs/reference/db-authorization/images/webhook.png differ
diff --git a/docs/reference/db-authorization/introduction.md b/docs/reference/db-authorization/introduction.md
new file mode 100644
index 0000000000..dd5ec2b026
--- /dev/null
+++ b/docs/reference/db-authorization/introduction.md
@@ -0,0 +1,123 @@
+# Introduction to Authentication & Authorization
+
+Authorization in Platformatic DB is **role-based** (see [Roles And User Information](#roles-and-user-information) for further details).
+A user is supposed to have a list of roles associated.
+Platformatic delegates authentication and assignment of the `roles` to an external _authentication service_.
+The job of the authentication service is to authenticate users and assign their roles correctly.
+Supported authentication service integrations are:
+- JWT
+- Webhook
+
+We refer to the user roles and other informations (like `userId`) as [User Metadata](#user-metadata).
+
+To make testing and developing easier, it's possible to bypass these checks if a `adminSecret` is set. See [HTTP Headers](#http-headers).
+
+## JWT
+JWT is built on top of [fastify-jwt](https://github.com/fastify/fastify-jwt).
+
+![Platformatic DB JWT integration](./images/jwt.png)
+
+To configure it, the quickest way is to pass a shared `secret`, e.g.:
+
+```json
+ "authorization": {
+ ...
+
+ "jwt": {
+ "secret": ""
+ },
+
+ ...
+ }
+```
+For more complex configurations, please check [fastify-jwt options](https://github.com/fastify/fastify-jwt#options).
+
+### JWKS
+
+Platformatic DB supports [JWKS](https://www.rfc-editor.org/rfc/rfc7517). To configure it:
+
+```json
+ ...
+ "authorization": {
+ "jwt": {
+ "jwks": {
+ "allowedDomains": [
+ "https://ISSUER_DOMAIN"
+ ]
+ }
+ },
+ }
+ ...
+```
+More [get-jwks options](https://github.com/nearform/get-jwks#options) can be specified.
+
+When a JWT token is received, Platformatic DB gets the correct public key from `https:/ISSUER_DOMAIN/.well-known/jwks.json` and uses it to verify the JWT signature. The token carries all the informations, like the `kid`, which is the key id used to sign the token itself, so no other configurations are strictly necessary.
+
+It's also possible to enable [JWKS](https://www.rfc-editor.org/rfc/rfc7517) with no options:
+
+```json
+ ...
+
+ "authorization": {
+ "jwt": {
+ "jwks": true
+ }
+ },
+ }
+ ...
+```
+In this case, the JWKS URL is calculated from the `iss` (issuer) field of JWT, so every JWT token from an issuer that exposes a valid JWKS token will pass the validation. For that reason, **this configuration should be used only for development**, while in every other case the `allowedDomains` should be specified.
+
+
+## Webhook
+Platformatic can use a webhook to authenticate the requests.
+
+![Platformatic DB Webhook integration](./images/webhook.png)
+
+In this case, the URL is configured on authorization:
+
+```json
+ "authorization": {
+ ...
+
+ "webhook": {
+ "url": ""
+ },
+
+ ...
+ }
+````
+
+When a request is received, Platformatic sends a `POST` to the webhook, replicating the same body and headers, except for:
+- `host`
+- `connection`
+
+In the Webhook case, the HTTP response contains the roles/user information as HTTP headers.
+
+## HTTP Headers
+
+To make testing and developing easier, it's possible to bypass JWT / WebHook integration if a `adminSecret` is set.
+If so, and if a request has `X-PLATFORMATIC-ADMIN-SECRET` HTTP header set with the configured `adminSecret`, the JWT/Webhook authentication is skipped, and
+the role set automatically as `platformatic-admin`.
+
+
+![Platformatic DB JWT integration](./images/http.png)
+
+Note that setting user roles on HTTP headers is highly insecure and should be used only within protected networks.
+
+### Impersonation
+If a user is recognized with a `platformatic-admin` role, can also **impersonate users**.
+The users/roles to impersonate are specified by:
+- `X-PLATFORMATIC-USER-ID`: the `userId` of the authenticated user. Note that this key value is conventional, any key can be used as long that is the same key specified in authorization rules.
+- `X-PLATFORMATIC-ROLE`: comma separated list of roles
+
+## User Metadata
+In all cases, the roles/user information is passed to Platformatic from the external _authentication service_ as a string (JWT claims or HTTP headers).
+We can refer to these as **user metadata**. Platformatic saves the user metadata for each request in a `user` object.
+Roles can be set using `X-PLATFORMATIC-ROLE` as list of comma-separated roles (this key is configurable, see [References](../configuration.md#role-and-anonymous-keys)).
+
+Note that roles are just strings. Some "special roles" are reserved:
+- `platformatic-admin` : this identifies a user who has admin powers
+- `anonymous`: set automatically when no roles are associated
+
+
diff --git a/docs/reference/db-authorization/programmatic-rules.md b/docs/reference/db-authorization/programmatic-rules.md
new file mode 100644
index 0000000000..ceeae8d201
--- /dev/null
+++ b/docs/reference/db-authorization/programmatic-rules.md
@@ -0,0 +1,81 @@
+# Programmatic Rules
+If it's necessary to have more control over the authorizations, it's possible to specify the rules programmatically, e.g.:
+
+```js
+
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ async find ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ },
+ async delete ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ },
+ defaults: {
+ userId: async function ({ user, ctx, input }) {
+ match(user, {
+ 'X-PLATFORMATIC-USER-ID': generated.shift(),
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+ return user['X-PLATFORMATIC-USER-ID']
+ }
+
+ },
+ async save ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ }
+ }]
+ })
+
+```
+
+In this example, the `user` role can delete all the posts edited before yesterday:
+
+```js
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ save: true,
+ async delete ({ user, ctx, where }) {
+ return {
+ ...where,
+ editedAt: {
+ lt: yesterday
+ }
+ }
+ },
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }]
+ })
+```
+
+
diff --git a/docs/reference/migrations.md b/docs/reference/migrations.md
new file mode 100644
index 0000000000..c1c564a72d
--- /dev/null
+++ b/docs/reference/migrations.md
@@ -0,0 +1,62 @@
+# Migrations
+
+Platformatic DB is already set up to run migrations for you when it starts.
+It uses [Postgrator](https://www.npmjs.com/package/postgrator) under the hood to run migrations. Please refer to the [Postgrator documentation](https://github.com/rickbergfalk/postgrator) for guidance on writing migration files.
+
+In brief, you should create a file structure like this
+
+```
+migrations/
+ |- 001.do.sql
+ |- 001.undo.sql
+ |- 002.do.sql
+ |- 002.undo.sql
+ |- 003.do.sql
+ |- 003.undo.sql
+ |- 004.do.sql
+ |- 004.undo.sql
+ |- ... and so on
+```
+
+Postgrator uses a table in your schema, to store which migrations have been already processed, so that only new ones will be applied at every server start.
+
+You can always rollback some migrations specifing what version you would like to rollback to.
+
+_Example_
+
+```
+$ platformatic db migrate --to 002
+```
+
+Will execute `004.undo.sql`, `003.undo.sql` in this order. If you keep those files in migrations directory, when the server restarts it will execute `003.do.sql` and `004.do.sql` in this order.
+
+## How to run migrations
+
+There are two ways to run migrations in Platformatic DB. They can be processed automatically when the server starts, or you can just run the `db migrate` command.
+
+In both cases you have to edit your config file to tell Platformatic DB where are your migration files.
+
+
+### Automatically on server start
+To run migrations when Platformatic DB starts, you need to use the config file root property `migrations`.
+
+There are two options in the `"migrations"` property
+- `dir` (_required_) the directory where the migration files are located. It will be relative to the config file path.
+- `autoApply` a boolean value that tells Platformatic DB to auto-apply migrations or not (default: `true`)
+
+_Example_
+
+```json
+{
+ ...
+ "migrations": {
+ "dir": "./path/to/migrations/folder",
+ "autoApply": false
+ }
+}
+```
+
+
+### Manually with the CLI
+
+See documentation about `db migrate` [command](./cli#migrate)
diff --git a/docs/reference/plugin.md b/docs/reference/plugin.md
new file mode 100644
index 0000000000..ea8e36efad
--- /dev/null
+++ b/docs/reference/plugin.md
@@ -0,0 +1,39 @@
+# Plugin
+
+If you want to extend Platformatic DB features, it is possible to register a plugin, which will be in the form of a standard [Fastify](https://fastify.io) plugin.
+
+The config file will specify where the plugin file is located as the example below:
+
+```json
+{
+ ...
+ "plugin": {
+ "path": "./plugin/index.js"
+ }
+}
+```
+The path is relative to the config file path.
+
+Since it uses [fastify-isolate](https://github.com/mcollina/fastify-isolate) under the hood, all other options of that package may be specified under the `plugin` property.
+
+Once the config file is set up, you can write your plugin to extend Platformatic DB API or write your custom business logic.
+
+You should export an async `function` which receives a parameters
+- `app` (`FastifyInstance`) that is the main fastify [instance](https://www.fastify.io/docs/latest/Reference/Server/#instance) running Platformatic DB
+- `opts` all the options specified in the config file after `path`
+-
+You can always access Platformatic [data mapper](/reference/sql-mapper/introduction.md) through `app.platformatic` property.
+
+Check some [examples](/guides/add-custom-functionality/introduction.md).
+
+## Hot Reload
+
+Plugin file is being watched by [`fs.watch`](https://nodejs.org/api/fs.html#fspromiseswatchfilename-options) function.
+
+You don't need to reload Platformatic DB server while working on your plugin. Every time you save, the watcher will trigger a reload event and the server will auto-restart and load your updated code.
+
+:::tip
+
+At this time, on Linux, file watch in subdirectories is not supported due to a Node.js limitation (documented [here](https://nodejs.org/api/fs.html#caveats)).
+
+:::
diff --git a/docs/reference/sql-graphql/examples/deleteEntity.js b/docs/reference/sql-graphql/examples/deleteEntity.js
new file mode 100644
index 0000000000..fc384cd5be
--- /dev/null
+++ b/docs/reference/sql-graphql/examples/deleteEntity.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+ app.decorate('platformatic', mapper)
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { id: { eq: "3" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
+ await app.close()
+}
+
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-graphql/examples/insertEntity.js b/docs/reference/sql-graphql/examples/insertEntity.js
new file mode 100644
index 0000000000..d3297831c3
--- /dev/null
+++ b/docs/reference/sql-graphql/examples/insertEntity.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+ app.decorate('platformatic', mapper)
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ insertPage(input: { title: "Platformatic is cool!" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { insertPage: { id: '4', title: 'Platformatic is cool!' } }
+ await app.close()
+}
+
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-graphql/examples/query.js b/docs/reference/sql-graphql/examples/query.js
new file mode 100644
index 0000000000..1afd34b379
--- /dev/null
+++ b/docs/reference/sql-graphql/examples/query.js
@@ -0,0 +1,37 @@
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query{
+ pages{
+ id,
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data)
+ await app.close()
+}
+
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-graphql/examples/saveEntity.js b/docs/reference/sql-graphql/examples/saveEntity.js
new file mode 100644
index 0000000000..f2dd78b49b
--- /dev/null
+++ b/docs/reference/sql-graphql/examples/saveEntity.js
@@ -0,0 +1,38 @@
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+ app.decorate('platformatic', mapper)
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 3 title: "Platformatic is cool!" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
+ await app.close()
+}
+
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-graphql/introduction.md b/docs/reference/sql-graphql/introduction.md
new file mode 100644
index 0000000000..ac60448106
--- /dev/null
+++ b/docs/reference/sql-graphql/introduction.md
@@ -0,0 +1,17 @@
+# Introduction to the GraphQL API
+
+The Platformatic DB GraphQL plugin starts a GraphQL server wand makes it available
+via a `/graphql` endpoint. This endpoint is automatically ready to run queries and
+mutations against your entities. This functionality is powered by
+[Mercurius](https://mercurius.dev).
+
+## GraphiQL
+
+The [GraphiQL](https://github.com/graphql/graphiql) web UI is integrated into
+Platformatic DB. To enable it you can pass an option to the `sql-graphql` plugin:
+
+```javascript
+app.register(graphqlPlugin, { graphiql: true })
+```
+
+The GraphiQL interface is made available under the `/graphiql` path.
diff --git a/docs/reference/sql-graphql/mutations.md b/docs/reference/sql-graphql/mutations.md
new file mode 100644
index 0000000000..c2c1438aa5
--- /dev/null
+++ b/docs/reference/sql-graphql/mutations.md
@@ -0,0 +1,152 @@
+# Mutations
+
+When the GraphQL plugin is loaded, some mutations are automatically adding to
+the GraphQL schema.
+
+## `save[ENTITY]`
+
+Saves a new entity to the database or updates an existing entity.
+
+### Example
+
+```js
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ log: logger,
+ })
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 3 title: "Platformatic is cool!" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { savePage: { id: '3', title: 'Platformatic is cool!' } }
+ await app.close()
+}
+
+main()
+```
+
+## `insert[ENTITY]`
+
+Inserts a new entity in the database.
+
+### Example
+
+
+```js
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ log: logger,
+ })
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Platformatic is cool!" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { savePage: { id: '4', title: 'Platformatic is cool!' } }
+ await app.close()
+}
+
+main()
+```
+
+## `delete[ENTITIES]`
+
+Deletes one or more entities from the database, based on the `where` clause
+passed as an input to the mutation.
+
+### Example
+
+
+
+```js
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ log: logger,
+ })
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { id: { eq: "3" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data) // { deletePages: [ { id: '3', title: 'Platformatic is cool!' } ] }
+ await app.close()
+}
+
+main()
+```
diff --git a/docs/reference/sql-graphql/queries.md b/docs/reference/sql-graphql/queries.md
new file mode 100644
index 0000000000..da90257af2
--- /dev/null
+++ b/docs/reference/sql-graphql/queries.md
@@ -0,0 +1,77 @@
+# Queries
+
+A GraphQL query is automatically added to the GraphQL schema for each database
+table, along with a complete mapping for all table fields.
+
+## Example
+
+
+```js
+'use strict'
+
+const Fastify = require('fastify')
+const graphqlPlugin = require('@platformatic/sql-graphql')
+const sqlMapper = require('@platformatic/sql-mapper')
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(sqlMapper, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+ app.register(graphqlPlugin, {
+ graphiql: true
+ })
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query{
+ pages{
+ id,
+ title
+ }
+ }
+ `
+ }
+ })
+ const result = await res.json()
+ console.log(result.data)
+ await app.close()
+}
+main()
+```
+
+## Advanced Queries
+
+The following additional queries are added to the GraphQL schema for each entity:
+
+### `get[ENTITY]by[PRIMARY_KEY]`
+
+If you have a table `pages` with the field `id` as the primary key, you can run
+a query called `getPageById`.
+
+#### Example
+
+```js
+...
+const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query{
+ getPageById(id: 3) {
+ id,
+ title
+ }
+ }
+ `
+ }
+})
+const result = await res.json()
+console.log(result.data) // { getPageById: { id: '3', title: 'A fiction' } }
+```
diff --git a/docs/reference/sql-mapper/entities/api.md b/docs/reference/sql-mapper/entities/api.md
new file mode 100644
index 0000000000..89ebe574dd
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/api.md
@@ -0,0 +1,244 @@
+# API
+
+A set of operation methods are available on each entity:
+
+- [`find`](#find)
+- [`insert`](#insert)
+- [`save`](#save)
+- [`delete`](#delete)
+
+
+## Returned fields
+
+The entity operation methods accept a `fields` option that can specify an array of field names to be returned. If not specified, all fields will be returned.
+
+## Where clause
+
+The entity operation methods accept a `where` option to allow limiting of the database rows that will be affected by the operation.
+
+The `where` object's key is the field you want to check, the value is a key/value map where the key is an operator (see the table below) and the value is the value you want to run the operator against.
+
+| Platformatic operator | SQL operator |
+|--- | ---|
+| eq | `'='` |
+| in | `'IN'` |
+| nin | `'NOT IN'` |
+| neq | `'<>'` |
+| gt | `'>'` |
+| gte | `'>='` |
+| lt | `'<'` |
+| lte | `'<='` |
+
+### Examples
+
+#### Selects row with `id = 1`
+```
+{
+ ...
+ "where": {
+ id: {
+ eq: 1
+ }
+ }
+}
+```
+
+#### Select all rows with id less than 100
+```
+{
+ ...
+ "where": {
+ id: {
+ lt: 100
+ }
+ }
+}
+```
+
+#### Select all rows with id 1, 3, 5 or 7
+```
+{
+ ...
+ "where": {
+ id: {
+ in: [1, 3, 5, 7]
+ }
+ }
+}
+```
+
+## Reference
+
+### `find`
+
+Retrieve data for an entity from the database.
+
+#### Options
+
+| Name | Type | Description
+|---|---|---|
+| `fields` | Array of `string` | List of fields to be returned for each object |
+| `where` | `Object` | [Where clause 🔗](#where-clause)
+| `orderBy` | Array of `Object` | Object like `{ field: 'counter', direction: 'ASC' }`
+| `limit` | `Number` | Limits the number of returned elements
+| `offset` | `Number` | The offset to start looking for rows from
+
+
+#### Usage
+
+
+```js
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.find({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 10
+ }
+ },
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+main()
+```
+
+### `insert`
+
+Insert one or more entity rows in the database.
+
+#### Options
+
+| Name | Type | Description
+|---|---|---|
+| `fields` | Array of `string` | List of fields to be returned for each object |
+| `inputs` | Array of `Object` | Each object is a new row
+
+#### Usage
+
+```js
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.insert({
+ fields: ['id', 'title' ],
+ inputs: [
+ { title: 'Foobar' },
+ { title: 'FizzBuzz' }
+ ],
+ })
+ logger.info(res)
+ /**
+ 0: {
+ "id": "16",
+ "title": "Foobar"
+ }
+ 1: {
+ "id": "17",
+ "title": "FizzBuzz"
+ }
+ */
+ await mapper.db.dispose()
+}
+main()
+```
+
+### `save`
+
+Create a new entity row in the database or update an existing one.
+
+To update an existing entity, the `id` field (or equivalent primary key) must be included in the `input` object.
+
+#### Options
+
+| Name | Type | Description
+|---|---|---|
+| `fields` | Array of `string` | List of fields to be returned for each object |
+| `input` | `Object` | The single row to create/update
+
+#### Usage
+
+```js
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: connectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.save({
+ fields: ['id', 'title' ],
+ input: { id: 1, title: 'FizzBuzz' },
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+main()
+```
+### `delete`
+
+Delete one or more entity rows from the database, depending on the `where` option. Returns the data for all deleted objects.
+
+#### Options
+
+| Name | Type | Description
+|---|---|---|
+| `fields` | Array of `string` | List of fields to be returned for each object |
+| `where` | `Object` | [Where clause 🔗](#where-clause)
+
+#### Usage
+
+```js
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: connectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.delete({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 4
+ }
+ },
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+main()
+
+```
diff --git a/docs/reference/sql-mapper/entities/example.md b/docs/reference/sql-mapper/entities/example.md
new file mode 100644
index 0000000000..3b55490816
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/example.md
@@ -0,0 +1,160 @@
+# Example
+
+Given this PostgreSQL SQL schema:
+
+```sql
+CREATE TABLE "categories" (
+ "id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
+ "name" varchar(255) NOT NULL,
+ PRIMARY KEY ("id")
+);
+
+CREATE TABLE "pages" (
+ "id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
+ "title" varchar(255) NOT NULL,
+ "category_id" int4,
+ "user_id" int4,
+ PRIMARY KEY ("id")
+);
+
+ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");
+```
+
+`app.platformatic.entities` will contain this mapping object:
+
+```json
+{
+ "category": {
+ "name": "Category",
+ "singularName": "category",
+ "pluralName": "categories",
+ "primaryKey": "id",
+ "table": "categories",
+ "fields": {
+ "id": {
+ "sqlType": "int4",
+ "isNullable": false,
+ "primaryKey": true,
+ "camelcase": "id"
+ },
+ "name": {
+ "sqlType": "varchar",
+ "isNullable": false,
+ "camelcase": "name"
+ }
+ },
+ "camelCasedFields": {
+ "id": {
+ "sqlType": "int4",
+ "isNullable": false,
+ "primaryKey": true,
+ "camelcase": "id"
+ },
+ "name": {
+ "sqlType": "varchar",
+ "isNullable": false,
+ "camelcase": "name"
+ }
+ },
+ "relations": [],
+ "reverseRelationships": [
+ {
+ "sourceEntity": "Page",
+ "relation": {
+ "constraint_catalog": "postgres",
+ "constraint_schema": "public",
+ "constraint_name": "pages_category_id_fkey",
+ "table_catalog": "postgres",
+ "table_schema": "public",
+ "table_name": "pages",
+ "constraint_type": "FOREIGN KEY",
+ "is_deferrable": "NO",
+ "initially_deferred": "NO",
+ "enforced": "YES",
+ "column_name": "category_id",
+ "ordinal_position": 1,
+ "position_in_unique_constraint": 1,
+ "foreign_table_name": "categories",
+ "foreign_column_name": "id"
+ }
+ }
+ ]
+ },
+ "page": {
+ "name": "Page",
+ "singularName": "page",
+ "pluralName": "pages",
+ "primaryKey": "id",
+ "table": "pages",
+ "fields": {
+ "id": {
+ "sqlType": "int4",
+ "isNullable": false,
+ "primaryKey": true,
+ "camelcase": "id"
+ },
+ "title": {
+ "sqlType": "varchar",
+ "isNullable": false,
+ "camelcase": "title"
+ },
+ "category_id": {
+ "sqlType": "int4",
+ "isNullable": true,
+ "foreignKey": true,
+ "camelcase": "categoryId"
+ },
+ "user_id": {
+ "sqlType": "int4",
+ "isNullable": true,
+ "camelcase": "userId"
+ }
+ },
+ "camelCasedFields": {
+ "id": {
+ "sqlType": "int4",
+ "isNullable": false,
+ "primaryKey": true,
+ "camelcase": "id"
+ },
+ "title": {
+ "sqlType": "varchar",
+ "isNullable": false,
+ "camelcase": "title"
+ },
+ "categoryId": {
+ "sqlType": "int4",
+ "isNullable": true,
+ "foreignKey": true,
+ "camelcase": "categoryId"
+ },
+ "userId": {
+ "sqlType": "int4",
+ "isNullable": true,
+ "camelcase": "userId"
+ }
+ },
+ "relations": [
+ {
+ "constraint_catalog": "postgres",
+ "constraint_schema": "public",
+ "constraint_name": "pages_category_id_fkey",
+ "table_catalog": "postgres",
+ "table_schema": "public",
+ "table_name": "pages",
+ "constraint_type": "FOREIGN KEY",
+ "is_deferrable": "NO",
+ "initially_deferred": "NO",
+ "enforced": "YES",
+ "column_name": "category_id",
+ "ordinal_position": 1,
+ "position_in_unique_constraint": 1,
+ "foreign_table_name": "categories",
+ "foreign_column_name": "id"
+ }
+ ],
+ "reverseRelationships": []
+ }
+}
+```
+
diff --git a/docs/reference/sql-mapper/entities/fields.md b/docs/reference/sql-mapper/entities/fields.md
new file mode 100644
index 0000000000..53c330af64
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/fields.md
@@ -0,0 +1,91 @@
+# Fields
+
+When Platformatic DB inspects a database's schema, it creates an object for each table that contains a mapping of their fields.
+
+These objects contain the following properties:
+- `singularName`: singular entity name, based on table name. Uses [inflected](https://www.npmjs.com/package/inflected) under the hood.
+- `pluralName`: plural entity name (i.e `'pages'`)
+- `primaryKey`: the field which is identified as primary key.
+- `table`: original table name
+- `fields`: an object containing all fields details. Object key is the field name.
+- `camelCasedFields`: an object containing all fields details in camelcase. If you have a column named `user_id` you can access it using both `userId` or `user_id`
+
+## Fields detail
+
+- `sqlType`: The original field type. It may vary depending on the underlying DB Engine
+- `isNullable`: Whether the field can be `null` or not
+- `primaryKey`: Whether the field is the primary key or not
+- `camelcase`: The _camelcased_ value of the field
+
+## Example
+Given this SQL Schema (for PostgreSQL):
+```SQL
+CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
+CREATE TABLE "public"."pages" (
+ "id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
+ "title" varchar,
+ "body_content" text,
+ "category_id" int4,
+ PRIMARY KEY ("id")
+);
+```
+
+The resulting mapping object will be:
+
+```js
+{
+ singularName: 'page',
+ pluralName: 'pages',
+ primaryKey: 'id',
+ table: 'pages',
+ fields: {
+ id: {
+ sqlType: 'int4',
+ isNullable: false,
+ primaryKey: true,
+ camelcase: 'id'
+ },
+ title: {
+ sqlType: 'varchar',
+ isNullable: true,
+ camelcase: 'title'
+ },
+ body_content: {
+ sqlType: 'text',
+ isNullable: true,
+ camelcase: 'bodyContent'
+ },
+ category_id: {
+ sqlType: 'int4',
+ isNullable: true,
+ foreignKey: true,
+ camelcase: 'categoryId'
+ }
+ }
+ camelCasedFields: {
+ id: {
+ sqlType: 'int4',
+ isNullable: false,
+ primaryKey: true,
+ camelcase: 'id'
+ },
+ title: {
+ sqlType: 'varchar',
+ isNullable: true,
+ camelcase: 'title'
+ },
+ bodyContent: {
+ sqlType: 'text',
+ isNullable: true,
+ camelcase: 'bodyContent'
+ },
+ categoryId: {
+ sqlType: 'int4',
+ isNullable: true,
+ foreignKey: true,
+ camelcase: 'categoryId'
+ }
+ },
+ relations: []
+}
+```
diff --git a/docs/reference/sql-mapper/entities/hooks.md b/docs/reference/sql-mapper/entities/hooks.md
new file mode 100644
index 0000000000..a7610b2272
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/hooks.md
@@ -0,0 +1,144 @@
+# Hooks
+
+Entity hooks are a way to wrap the [API methods](./api) for an entity and add custom behaviour.
+
+The Platformatic DB SQL Mapper provides an `addEntityHooks(entityName, spec)` function that can be used to add hooks for an entity.
+
+## How to use hooks
+
+`addEntityHooks` accepts two arguments:
+
+1. A string representing the entity name (singularized), for example `'page'`.
+1. A key/value object where the key is one of the API methods (`find`, `insert`, `save`, `delete`) and the value is a callback function. The callback will be called with the _original_ API method and the options that were passed to that method. See the example below.
+
+### Usage
+
+```js
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ mapper.addEntityHooks('page', {
+ find: async (originalFind, opts) => {
+ // Add a `foo` field with `bar` value to each row
+ const res = await originalFind(opts)
+ return res.map((row) => {
+ row.foo = 'bar'
+ return row
+ })
+ }
+ })
+ const res = await mapper.entities.page.find({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 10
+ }
+ },
+ })
+ logger.info(res)
+ /**
+ [
+ 0: {
+ "id": "5",
+ "title": "Page 1",
+ "foo": "bar"
+ },
+ 1: {
+ "id": "6",
+ "title": "Page 2",
+ "foo": "bar"
+ }
+ ]
+ */
+ await mapper.db.dispose()
+}
+main()
+```
+
+
+## Multiple Hooks
+
+Multiple hooks can be added for the same entity and API method, for example:
+
+
+```js
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ mapper.addEntityHooks('page', {
+ find: async function firstHook(previousFunction, opts) {
+ // Add a `foo` field with `bar` value to each row
+ const res = await previousFunction(opts)
+ return res.map((row) => {
+ row.foo = 'bar'
+ return row
+ })
+ }
+ })
+ mapper.addEntityHooks('page', {
+ find: async function secondHook(previousFunction, opts) {
+ // Add a `bar` field with `baz` value to each row
+ const res = await previousFunction(opts)
+ return res.map((row) => {
+ row.bar = 'baz'
+ return row
+ })
+ }
+ })
+ const res = await mapper.entities.page.find({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 10
+ }
+ },
+ })
+ logger.info(res)
+ /**
+ [
+ 0: {
+ "id": "5",
+ "title": "Page 1",
+ "foo": "bar",
+ "bar": "baz"
+ },
+ 1: {
+ "id": "6",
+ "title": "Page 2",
+ "foo": "bar",
+ "bar": "baz"
+ }
+ ]
+ */
+ await mapper.db.dispose()
+}
+main()
+```
+
+Since hooks are wrappers, they are being called in reverse order, like the image below
+
+![Hooks Lifecycle](../images/plt-db-hooks.svg)
+
+So even though we defined two hooks, the Database will be hit only once.
+
+Query result will be processed by `firstHook`, which will pass the result to `secondHook`, which will, finally, send the processed result to the original `.find({...})` function.
+
+
diff --git a/docs/reference/sql-mapper/entities/introduction.md b/docs/reference/sql-mapper/entities/introduction.md
new file mode 100644
index 0000000000..564874b490
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/introduction.md
@@ -0,0 +1,11 @@
+# Introduction to Entities
+
+The primary goal of Platformatic DB is to read a database schema and generate REST and GraphQL endpoints that enable the execution of CRUD (Create/Retrieve/Update/Delete) operations against the database.
+
+Platformatic DB includes a _mapper_ that reads the schemas of database tables and then generates an _entity_ object for each table.
+
+Platformatic DB is a [Fastify](https://fastify.io) application. The Fastify instance object is decorated with the `platformatic` property, which exposes several APIs that handle the manipulation of data in the database.
+
+Platformatic DB populates the `app.platformatic.entities` object with data found in database tables.
+
+The keys on the `entities` object are _singularized_ versions of the table names — for example `users` becomes `user`, `categories` becomes `category` — and the values are a set of associated metadata and functions.
diff --git a/docs/reference/sql-mapper/entities/relations.md b/docs/reference/sql-mapper/entities/relations.md
new file mode 100644
index 0000000000..418ff33dda
--- /dev/null
+++ b/docs/reference/sql-mapper/entities/relations.md
@@ -0,0 +1,87 @@
+# Relations
+
+When Platformatic DB is reading your database schema, it identifies relationships
+between tables and stores metadata on them in the entity object's `relations` field.
+This is achieved by querying the database's internal metadata.
+
+## Example
+
+Given this PostgreSQL schema:
+
+```sql
+CREATE SEQUENCE IF NOT EXISTS categories_id_seq;
+
+CREATE TABLE "categories" (
+ "id" int4 NOT NULL DEFAULT nextval('categories_id_seq'::regclass),
+ "name" varchar(255) NOT NULL,
+ PRIMARY KEY ("id")
+);
+
+CREATE SEQUENCE IF NOT EXISTS pages_id_seq;
+
+CREATE TABLE "pages" (
+ "id" int4 NOT NULL DEFAULT nextval('pages_id_seq'::regclass),
+ "title" varchar(255) NOT NULL,
+ "body_content" text,
+ "category_id" int4,
+ PRIMARY KEY ("id")
+);
+
+ALTER TABLE "pages" ADD FOREIGN KEY ("category_id") REFERENCES "categories"("id");
+```
+
+When this code is run:
+
+
+```js
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const pageEntity = mapper.entities.page
+ console.log(pageEntity.relations)
+ await mapper.db.dispose()
+}
+main()
+```
+
+The output will be:
+
+```javascript
+[
+ {
+ constraint_catalog: 'postgres',
+ constraint_schema: 'public',
+ constraint_name: 'pages_category_id_fkey',
+ table_catalog: 'postgres',
+ table_schema: 'public',
+ table_name: 'pages',
+ constraint_type: 'FOREIGN KEY',
+ is_deferrable: 'NO',
+ initially_deferred: 'NO',
+ enforced: 'YES',
+ column_name: 'category_id',
+ ordinal_position: 1,
+ position_in_unique_constraint: 1,
+ foreign_table_name: 'categories',
+ foreign_column_name: 'id'
+ }
+]
+```
+
+As Platformatic DB supports multiple database engines, the contents of the
+`relations` object will vary depending on the database being used.
+
+The following `relations` fields are common to all database engines:
+
+- `column_name` — the column that stores the foreign key
+- `foreign_table_name` — the table hosting the related row
+- `foreign_column_name` — the column in foreign table that identifies the row
diff --git a/docs/reference/sql-mapper/examples/delete.js b/docs/reference/sql-mapper/examples/delete.js
new file mode 100644
index 0000000000..b02d8ba518
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/delete.js
@@ -0,0 +1,30 @@
+// Referenced in docs/reference/sql-mapper/entity/api.md
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: connectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.delete({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 4
+ }
+ },
+ })
+
+ logger.info(res)
+
+ await mapper.db.dispose()
+}
+
+main()
diff --git a/docs/reference/sql-mapper/examples/fastify-plugin.js b/docs/reference/sql-mapper/examples/fastify-plugin.js
new file mode 100644
index 0000000000..6ac4c979cc
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/fastify-plugin.js
@@ -0,0 +1,25 @@
+// Referenced in docs/reference/sql-mapper/fastify-plugin.md
+'use strict'
+
+const Fastify = require('fastify')
+const mapper = require('@platformatic/sql-mapper')
+
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(mapper.plugin, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+
+ app.get('/all-pages', async (req, reply) => {
+ const res = await app.platformatic.entities.page.find()
+ return res
+ })
+
+ await app.listen({ port: 3333 })
+}
+
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-mapper/examples/fields.js b/docs/reference/sql-mapper/examples/fields.js
new file mode 100644
index 0000000000..5c991beda5
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/fields.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ logger.info(mapper.entities.page)
+ await mapper.db.dispose()
+}
+
+main()
diff --git a/docs/reference/sql-mapper/examples/find.js b/docs/reference/sql-mapper/examples/find.js
new file mode 100644
index 0000000000..ac2cdb56b2
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/find.js
@@ -0,0 +1,30 @@
+// Referenced in docs/reference/sql-mapper/entity/api.md
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.find({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 10
+ }
+ },
+ })
+
+ logger.info(res)
+
+ await mapper.db.dispose()
+}
+
+main()
diff --git a/docs/reference/sql-mapper/examples/hooks.js b/docs/reference/sql-mapper/examples/hooks.js
new file mode 100644
index 0000000000..b854295fd4
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/hooks.js
@@ -0,0 +1,46 @@
+// Referenced in docs/reference/sql-mapper/hooks.md
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ mapper.addEntityHooks('page', {
+ find: async function firstHook(previousFunction, opts) {
+ // Add a `foo` field with `bar` value to each row
+ const res = await previousFunction(opts)
+ return res.map((row) => {
+ row.foo = 'bar'
+ return row
+ })
+ }
+ })
+ mapper.addEntityHooks('page', {
+ find: async function secondHook(previousFunction, opts) {
+ // Add a `bar` field with `baz` value to each row
+ const res = await previousFunction(opts)
+ return res.map((row) => {
+ row.bar = 'baz'
+ return row
+ })
+ }
+ })
+ const res = await mapper.entities.page.find({
+ fields: ['id', 'title',],
+ where: {
+ id: {
+ lt: 10
+ }
+ },
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+
+main()
diff --git a/docs/reference/sql-mapper/examples/insert.js b/docs/reference/sql-mapper/examples/insert.js
new file mode 100644
index 0000000000..06e562c881
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/insert.js
@@ -0,0 +1,25 @@
+// Referenced in docs/reference/sql-mapper/entity/api.md
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.insert({
+ fields: ['id', 'title' ],
+ inputs: [
+ { title: 'Foobar' },
+ { title: 'FizzBuzz' }
+ ],
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-mapper/examples/relations.js b/docs/reference/sql-mapper/examples/relations.js
new file mode 100644
index 0000000000..1f7c5afc0b
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/relations.js
@@ -0,0 +1,38 @@
+// Referenced in docs/reference/sql-mapper/entity/relations.md
+'use strict'
+
+const { connect } = require('@platformatic/sql-mapper')
+
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const pgConnectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: pgConnectionString,
+ log: logger,
+ })
+ const pageEntity = mapper.entities.page
+ const categoryEntity = mapper.entities.category
+
+ const newCategory = await categoryEntity.insert({
+ fields: ['id', 'name'],
+ inputs: [{ name: 'fiction' }]
+ })
+ {
+ const res = await pageEntity.insert({
+ fields: ['id', 'name'],
+ inputs: [
+ {
+ title: 'A fiction', bodyContent: 'This is our first fiction', category_id: newCategory[0].id
+ }
+ ]
+ })
+ console.log(res)
+ }
+ console.log(pageEntity.relations)
+ await mapper.db.dispose()
+}
+
+main()
diff --git a/docs/reference/sql-mapper/examples/save.js b/docs/reference/sql-mapper/examples/save.js
new file mode 100644
index 0000000000..46f90cd395
--- /dev/null
+++ b/docs/reference/sql-mapper/examples/save.js
@@ -0,0 +1,21 @@
+// Referenced in docs/reference/sql-mapper/entity/api.md
+'use strict'
+const { connect } = require('@platformatic/sql-mapper')
+const { pino } = require('pino')
+const pretty = require('pino-pretty')
+const logger = pino(pretty())
+
+async function main() {
+ const connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ const mapper = await connect({
+ connectionString: connectionString,
+ log: logger,
+ })
+ const res = await mapper.entities.page.save({
+ fields: ['id', 'title' ],
+ input: { id: 10, title: 'FizzBuzz' },
+ })
+ logger.info(res)
+ await mapper.db.dispose()
+}
+main()
\ No newline at end of file
diff --git a/docs/reference/sql-mapper/fastify-plugin.md b/docs/reference/sql-mapper/fastify-plugin.md
new file mode 100644
index 0000000000..caeef33a1a
--- /dev/null
+++ b/docs/reference/sql-mapper/fastify-plugin.md
@@ -0,0 +1,42 @@
+# Fastify Plugin
+
+The `@platformatic/sql-mapper` package exports a [Fastify](https://fastify.io) plugin that can be used out-of the box in a server application.
+
+A `connectionString` option must be passed to connect to your database.
+
+The plugin decorates the server with a `platformatic` object that has the following properties:
+
+- `db` — the DB wrapper object provided by [`@databases`](https://www.atdatabases.org/)
+- `sql` — the SQL query mapper object provided by [`@databases`](https://www.atdatabases.org/)
+- `entities` — all entity objects with their [API methods](./entity/api)
+- `addEntityHooks` — a function to add a [hook](./entity/hooks) to an entity API method.
+
+#### Usage
+
+```js
+'use strict'
+
+const Fastify = require('fastify')
+const mapper = require('@platformatic/sql-mapper')
+
+async function main() {
+ const app = Fastify({
+ logger: {
+ level: 'info'
+ }
+ })
+ app.register(mapper.plugin, {
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ })
+
+ app.get('/all-pages', async (req, reply) => {
+ // Will return all rows from 'pages' table
+ const res = await app.platformatic.entities.page.find()
+ return res
+ })
+
+ await app.listen({ port: 3333 })
+}
+
+main()
+```
diff --git a/docs/reference/sql-mapper/images/plt-db-hooks.svg b/docs/reference/sql-mapper/images/plt-db-hooks.svg
new file mode 100644
index 0000000000..6471afbce3
--- /dev/null
+++ b/docs/reference/sql-mapper/images/plt-db-hooks.svg
@@ -0,0 +1,4 @@
+
+
+
+firstHook() secondHook() entity.find() Database mapper.entities.page.find()
mapper.entities.page.find() Text is not SVG - cannot display
\ No newline at end of file
diff --git a/docs/reference/sql-mapper/introduction.md b/docs/reference/sql-mapper/introduction.md
new file mode 100644
index 0000000000..708869ffff
--- /dev/null
+++ b/docs/reference/sql-mapper/introduction.md
@@ -0,0 +1,51 @@
+# Introduction to the Platformatic DB Mapper
+
+The Platformatic DB Mapper will inspect a database schema and return an object containing:
+
+- `db` — A database abstraction layer from [`@databases`](https://www.atdatabases.org/)
+- `sql` — The SQL builder from [`@databases`](https://www.atdatabases.org/)
+- `entities` — An object containing a key for each table found in the schema, with basic CRUD operations. See [Entity Reference](./entities/introduction.md) for details.
+
+It exports a function that accepts an object with the following properties:
+
+- `connectionString` — The Database connection string
+- `log` — A logger object (like [Pino](https://getpino.io))
+- `onDatabaseLoad` — An async function that is called after the connection is established. It will receive `db` and `sql` as parameter.
+- `ignore` — Object used to ignore some tables from building entities. (i.e. `{ 'versions': true }` will ignore `versions` table)
+- `autoTimestamp` — Generate timestamp automatically when inserting/updating records.
+- `hooks` — For each entity name (like `Page`) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.
+
+## Code samples
+
+```javascript
+const { pino } = require('pino')
+
+const logger = pino()
+
+async function onDatabaseLoad (db, sql) {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+}
+const connectionString =
+ 'postgres://postgres:postgres@localhost:5432/postgres'
+const mapper = await connect({
+ connectionString,
+ log: logger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {
+ Page: {
+ find: async function(_find, opts) {
+ console.log('hook called');
+ return await _find(opts)
+ }
+ }
+ }
+})
+const pageEntity = mapper.entities.page
+
+await mapper.db.query(mapper.sql`SELECT * FROM pages`)
+await mapper.db.find('option1', 'option2')
+```
diff --git a/docs/reference/sql-rest/api.md b/docs/reference/sql-rest/api.md
new file mode 100644
index 0000000000..5c133fed6a
--- /dev/null
+++ b/docs/reference/sql-rest/api.md
@@ -0,0 +1,149 @@
+# API
+
+Each table is mapped to an `entity` named after table's name.
+
+In the following reference we'll use some placeholders, but let's see an example
+
+_Example_
+
+Given this SQL executed against your database:
+
+```sql
+CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL,
+ body TEXT NOT NULL
+);
+```
+
+- `[PLURAL_ENTITY_NAME]` is `pages`
+- `[SINGULAR_ENTITY_NAME]` is `page`
+- `[PRIMARY_KEY]` is `id`
+- `fields` are `id`, `title`, `body`
+
+## GET and POST parameters
+
+Some APIs needs the `GET` method, where parameters must be defined in the URL, or `POST/PUT` methods, where parameters can be defined in the http request payload.
+
+## Fields
+
+Every API can define a `fields` parameter, representing the entity fields you want to get back for each row of the table. If not specified all fields are returned.
+
+
+`fields` parameter are always sent in query string, even for `POST`, `PUT` and `DELETE` requests, as a comma separated value.
+
+## `GET /[PLURAL_ENTITY_NAME]`
+
+Return all entities matching `where` clause
+
+### Where clause
+
+You can define many `WHERE` clauses in REST API, each clause includes a **field**, an **operator** and a **value**.
+
+The **field** is one of the fields found in the schema.
+
+The **operator** follows this table:
+
+| Platformatic operator | SQL operator |
+|--- | ---|
+| eq | `'='` |
+| in | `'IN'` |
+| nin | `'NOT IN'` |
+| neq | `'<>'` |
+| gt | `'>'` |
+| gte | `'>='` |
+| lt | `'<'` |
+| lte | `'<='` |
+
+The **value** is the value you want to compare the field to.
+
+For GET requests all these clauses are specified in the query string using the format `where.[FIELD].[OPERATOR]=[VALUE]`
+
+_Example_
+
+If you want to get the `title` and the `body` of every `page` where `id < 15` you can make an HTTP request like this:
+
+```bash
+$ curl -X 'GET' \
+ 'http://localhost:3042/pages/?fields=body,title&where.id.lt=15' \
+ -H 'accept: application/json'
+```
+
+
+## `POST [PLURAL_ENTITY_NAME]`
+
+Creates a new row in table. Expects fields to be sent in a JSON formatted request body.
+
+_Example_
+
+```
+$ curl -X 'POST' \
+ 'http://localhost:3042/pages/' \
+ -H 'accept: application/json' \
+ -H 'Content-Type: application/json' \
+ -d '{
+ "title": "Hello World",
+ "body": "Welcome to Platformatic!"
+}'
+
+{
+ "id": 1,
+ "title": "Hello World",
+ "body": "Welcome to Platformatic"
+}
+```
+
+## `GET [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]`
+
+Returns a single row, identified by `PRIMARY_KEY`.
+
+_Example_
+
+```
+$ curl -X 'GET' 'http://localhost:3042/pages/1?fields=title,body
+
+{
+ "title": "Hello World",
+ "body": "Welcome to Platformatic"
+}
+```
+
+## `POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]`
+
+Updates a row identified by `PRIMARY_KEY`.
+
+_Example_
+
+```
+$ curl -X 'POST' \
+ 'http://localhost:3042/pages/1' \
+ -H 'accept: application/json' \
+ -H 'Content-Type: application/json' \
+ -d '{
+ "title": "Hello Platformatic!",
+ "body": "Welcome to Platformatic!"
+}'
+
+{
+ "id": 1,
+ "title": "Hello Platformatic!",
+ "body": "Welcome to Platformatic"
+}
+```
+## `PUT [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]`
+
+Same as `POST [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]`.
+
+## `DELETE [PLURAL_ENTITY_NAME]/[PRIMARY_KEY]`
+
+Deletes a row identified by the `PRIMARY_KEY`.
+
+_Example_
+
+```
+$ curl -X 'DELETE' 'http://localhost:3042/pages/1?fields=title'
+
+{
+ "title": "Hello Platformatic!"
+}
+```
\ No newline at end of file
diff --git a/docs/reference/sql-rest/introduction.md b/docs/reference/sql-rest/introduction.md
new file mode 100644
index 0000000000..22890a78f0
--- /dev/null
+++ b/docs/reference/sql-rest/introduction.md
@@ -0,0 +1,23 @@
+# Introduction to the REST API
+
+The Platformatic DB OpenAPI plugin automatically starts a REST API server (powered by [Fastify](https://fastify.io)) that provides CRUD (**C**reate, **R**ead, **U**pdate, **D**elete) functionality for each entity.
+
+## Configuration
+
+In the config file, under the `"core"` section, the OpenAPI server is enabled by default. Although you can disable it setting the property `openapi` to `false`.
+
+_Example_
+
+```json
+{
+ ...
+ "core": {
+ "openapi": false
+ }
+}
+```
+
+As Platformatic DB uses [`fastify-swagger`](https://github.com/fastify/fastify-swagger) under the hood, the `"openapi"` property can be an object that follows the [OpenAPI Specification Object](https://swagger.io/specification/#oasObject) format.
+
+This allows you to extend the output of the Swagger UI documentation.
+
diff --git a/package.json b/package.json
new file mode 100644
index 0000000000..9376e2303e
--- /dev/null
+++ b/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "platformatic",
+ "version": "0.0.21",
+ "private": true,
+ "scripts": {
+ "test": "pnpm -r --workspace-concurrency=1 test",
+ "dashboard:start": "cd packages/db-dashboard && pnpm run dev",
+ "dashboard:build": "cd packages/db-dashboard && pnpm run build",
+ "cleanall": "rm pnpm-lock.yaml && rm -rf node_modules && rm -rf packages/*/node_modules",
+ "postinstall": "node ./scripts/postinstall.js"
+ },
+ "packageManager": "pnpm@7.12.1",
+ "devDependencies": {
+ "@fastify/pre-commit": "^2.0.2"
+ },
+ "dependencies": {
+ "desm": "^1.2.0"
+ }
+}
diff --git a/packages/authenticate/LICENSE b/packages/authenticate/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/authenticate/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/authenticate/NOTICE b/packages/authenticate/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/authenticate/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/authenticate/README.md b/packages/authenticate/README.md
new file mode 100644
index 0000000000..73af41206e
--- /dev/null
+++ b/packages/authenticate/README.md
@@ -0,0 +1,27 @@
+# @platformatic/authenticate
+
+Provides login and invite claiming.
+
+## Usage
+
+Login:
+
+```sh
+plt login
+```
+
+Login and claim invite:
+
+```sh
+plt login --claim some-invite-code
+```
+
+Automatically open default browser:
+
+```sh
+plt login --browser
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/authenticate/authenticate.js b/packages/authenticate/authenticate.js
new file mode 100644
index 0000000000..b7ddaefe4e
--- /dev/null
+++ b/packages/authenticate/authenticate.js
@@ -0,0 +1,26 @@
+#! /usr/bin/env node
+
+import commist from 'commist'
+import isMain from 'es-main'
+import { red } from 'colorette'
+import startLogin from './lib/login.js'
+import { print } from './lib/utils.js'
+
+const program = commist()
+program.register('login', startLogin)
+
+export function login (argv) {
+ const result = program.parse(argv)
+ if (result) return startLogin(result, print).catch(exit)
+}
+
+/* c8 ignore next 5 */
+function exit (err) {
+ print(`${red('Unable to authenticate:')}`, console.error)
+ print(`\n\t${err.message}`, console.error)
+ process.exit(1)
+}
+
+if (isMain(import.meta)) {
+ await login(process.argv.splice(2))
+}
diff --git a/packages/authenticate/lib/login.js b/packages/authenticate/lib/login.js
new file mode 100644
index 0000000000..610ca01101
--- /dev/null
+++ b/packages/authenticate/lib/login.js
@@ -0,0 +1,158 @@
+import parseArgs from 'minimist'
+import { request } from 'undici'
+import open from 'open'
+import { blue, green, underline } from 'colorette'
+import { lstat, mkdir } from 'node:fs/promises'
+import path from 'node:path'
+import ConfigManager from '@platformatic/config'
+import schema from './schema.js'
+
+const AP_HOST = process.env.PLT_AUTH_PROXY_HOST || 'https://auth-proxy.fly.dev'
+
+async function triggerAuthentication () {
+ // call auth-proxy to get code
+ const { statusCode, body } = await request(`${AP_HOST}/login`, {
+ method: 'GET',
+ headers: {
+ 'content-type': 'application/json'
+ }
+ })
+
+ if (statusCode !== 200) throw new Error('Unable to contact login service')
+
+ return body.json()
+}
+
+async function getTokens (id) {
+ const { statusCode, body } = await request(`${AP_HOST}/login/ready/${id}`)
+
+ const data = await body.json()
+
+ if (data.error && data.error === 'pending') {
+ return { state: 'pending', data: { id } }
+ } else if (statusCode === 200) {
+ return { state: 'complete', data }
+ } else {
+ throw new Error('Unable to retrieve tokens')
+ }
+}
+
+async function poll (id, timeout, interval) {
+ const expiresAt = Date.now() + timeout
+
+ async function check (resolve, reject) {
+ let result
+ try {
+ result = await getTokens(id)
+ } catch (err) {
+ return reject(err)
+ }
+
+ const { state, data } = result
+
+ if (Date.now() > expiresAt) {
+ reject(new Error('User did not authenticate before expiry'))
+ } else if (state === 'pending') {
+ setTimeout(check, interval, resolve, reject)
+ } else if (state === 'complete') {
+ resolve(data)
+ /* c8 ignore next 3 */
+ } else {
+ // do nothing, never get here
+ }
+ }
+
+ return new Promise(check)
+}
+
+export default async function startLogin (_args, print) {
+ const args = parseArgs(_args, {
+ boolean: 'browser',
+ string: ['claim', 'config']
+ })
+
+ let pltDirPath = path.join(process.env.PLT_HOME, '.platformatic')
+ if (args.config) {
+ const stats = await lstat(args.config)
+ if (stats.isDirectory()) throw new Error('--config option requires path to a file')
+
+ pltDirPath = path.dirname(args.config)
+ }
+
+ try {
+ await mkdir(pltDirPath)
+ /* c8 ignore next 2 */
+ } catch {
+ }
+
+ const config = new ConfigManager({
+ source: args.config || path.join(pltDirPath, 'config.yaml'),
+ schema
+ })
+
+ const { verifyAt, expiresInSeconds, id, intervalSeconds } = await triggerAuthentication()
+
+ // print browser url
+ print(`Open ${blue(underline(verifyAt))} in your browser to continue logging in.`)
+
+ // open browser if requested
+ /* c8 ignore next 1 */
+ if (args.browser) await open(verifyAt)
+
+ const { tokens } = await poll(id, expiresInSeconds * 1000, intervalSeconds * 1000)
+ const { state } = await registerUser(tokens, args.claim)
+ await saveTokens(tokens, config)
+
+ print(`${green(`Success, you have ${state}!`)}`)
+ if (state === 'registered') {
+ print(`Visit our Getting Started guide at ${blue(underline('https://docs.platforamtic.dev/getting-started'))} to build your first application`)
+ }
+}
+
+async function saveTokens (tokens, config) {
+ await config.update({ accessToken: tokens.access })
+}
+
+async function registerUser (tokens, invite) {
+ // try to load user
+ const userInfoRes = await request(`${AP_HOST}/users/self`, {
+ method: 'GET',
+ headers: {
+ authorization: `Bearer ${tokens.access}`
+ }
+ })
+
+ if (userInfoRes.statusCode !== 200) {
+ throw new Error('Unable to get user data')
+ }
+
+ const { username, fromProvider } = await userInfoRes.body.json()
+ if (username) {
+ // user is already registered
+ return { state: 'authenticated' }
+ }
+
+ // if no user but is claiming, do claim
+ if (invite && fromProvider.sub) {
+ const claimRes = await request(`${AP_HOST}/claim`, {
+ method: 'POST',
+ body: JSON.stringify({
+ username: fromProvider.nickname,
+ externalId: fromProvider.sub,
+ invite
+ }),
+ headers: {
+ 'content-type': 'application/json',
+ authorization: `Bearer ${tokens.access}`
+ }
+ })
+
+ if (claimRes.statusCode !== 200) {
+ throw new Error('Unable to claim invite')
+ }
+
+ return { state: 'registered' }
+ }
+
+ throw new Error('Missing invite')
+}
diff --git a/packages/authenticate/lib/schema.js b/packages/authenticate/lib/schema.js
new file mode 100644
index 0000000000..f996997b0c
--- /dev/null
+++ b/packages/authenticate/lib/schema.js
@@ -0,0 +1,7 @@
+export default {
+ $id: 'https://schemas.platformatic.dev/auth',
+ type: 'object',
+ properties: {
+ accessToken: { type: 'string' }
+ }
+}
diff --git a/packages/authenticate/lib/utils.js b/packages/authenticate/lib/utils.js
new file mode 100644
index 0000000000..cdba14ed05
--- /dev/null
+++ b/packages/authenticate/lib/utils.js
@@ -0,0 +1,3 @@
+export function print (message, writer = console.log) {
+ writer(message)
+}
diff --git a/packages/authenticate/package.json b/packages/authenticate/package.json
new file mode 100644
index 0000000000..2485769ae3
--- /dev/null
+++ b/packages/authenticate/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@platformatic/authenticate",
+ "version": "0.0.21",
+ "description": "",
+ "main": "index.js",
+ "type": "module",
+ "scripts": {
+ "test": "standard | snazzy && c8 --100 tap --no-coverage test/*test.js"
+ },
+ "author": "Platformatic.dev",
+ "license": "Apache-2.0",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "c8": "^7.12.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.3.0"
+ },
+ "dependencies": {
+ "@platformatic/config": "workspace:*",
+ "colorette": "^2.0.19",
+ "commist": "^3.0.0",
+ "es-main": "^1.2.0",
+ "minimist": "^1.2.6",
+ "open": "^8.4.0",
+ "undici": "^5.8.2"
+ }
+}
diff --git a/packages/authenticate/test/login.test.js b/packages/authenticate/test/login.test.js
new file mode 100644
index 0000000000..f575e83857
--- /dev/null
+++ b/packages/authenticate/test/login.test.js
@@ -0,0 +1,366 @@
+import { mkdtemp, mkdir, readFile, writeFile } from 'node:fs/promises'
+import { tmpdir } from 'node:os'
+import path from 'node:path'
+import { beforeEach, test } from 'tap'
+import { MockAgent, setGlobalDispatcher } from 'undici'
+import { blue, green, underline } from 'colorette'
+import login from '../lib/login.js'
+
+let mockAgent
+
+async function makeConfig (config = '', name = 'pltconf.yaml', setHomeDir = false) {
+ let tmpPath = await mkdtemp(path.join(tmpdir(), 'plt-authenticate-'))
+ if (setHomeDir) {
+ process.env.PLT_HOME = tmpPath // don't move this line
+ tmpPath = path.join(tmpPath, '.platformatic')
+ await mkdir(tmpPath)
+ }
+
+ const filename = path.join(tmpPath, name)
+ await writeFile(filename, config)
+ return filename
+}
+
+const MSG_VERIFY_AT_URL = `Open ${blue(underline('https://some-auth.pro/vider'))} in your browser to continue logging in.`
+const MSG_REGISTERED = `${green('Success, you have registered!')}`
+const MSG_AUTHENTICATED = `${green('Success, you have authenticated!')}`
+const MSG_GETTING_STARTED = `Visit our Getting Started guide at ${blue(underline('https://docs.platforamtic.dev/getting-started'))} to build your first application`
+
+// Ordered message assertions
+function assertMessages (t, expecting) {
+ return message => t.equal(message, expecting.shift())
+}
+
+beforeEach(() => {
+ mockAgent = new MockAgent()
+ setGlobalDispatcher(mockAgent)
+ mockAgent.disableNetConnect()
+
+ process.env.PLT_AUTH_PROXY_HOST = 'http://127.0.0.1:3000'
+ process.env.PLT_HOME = ''
+})
+
+test('should be able to login as an existing user immediately', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 900,
+ id: 'abc123',
+ intervalSeconds: 5
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, {
+ username: 'person',
+ role: 'invitee',
+ fromProvider: {
+ sub: 'github|16238872'
+ }
+ })
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL, MSG_AUTHENTICATED])
+ await t.resolves(login(args, print))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), 'accessToken: "1234"\n')
+})
+
+test('should use home directory config', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 900,
+ id: 'abc123',
+ intervalSeconds: 5
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, {
+ username: 'person',
+ role: 'invitee',
+ fromProvider: {
+ sub: 'github|16238872'
+ }
+ })
+
+ const confPath = await makeConfig('', 'config.yaml', true)
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL, MSG_AUTHENTICATED])
+ await t.resolves(login([], print))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), 'accessToken: "1234"\n')
+})
+
+test('should be able to login after a short wait', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 5,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+
+ // pending user auth
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { error: 'pending' })
+
+ // user has authenticated
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, {
+ username: 'person',
+ role: 'invitee',
+ fromProvider: {
+ sub: 'github|16238872'
+ }
+ })
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL, MSG_AUTHENTICATED])
+ await t.resolves(login(args, print))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), 'accessToken: "1234"\n')
+})
+
+test('should fail when unable to connect to authproxy', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(500, {})
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = () => { t.fail('Should not hit the print function') }
+ await t.rejects(login(args, print), new Error('Unable to contact login service'))
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should fail if there is a problem getting tokens', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 5,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(500, {})
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL])
+ await t.rejects(login(args, print), new Error('Unable to retrieve tokens'))
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should fail if user does not authenticate before link expires', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 2,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+
+ // pending user auth
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { error: 'pending' }).persist()
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL])
+ await t.rejects(login(args, print), new Error('User did not authenticate before expiry'))
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should claim an invite', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 10,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, { username: '', fromProvider: { sub: 'github|def567', nickname: 'bobby' } })
+ authproxy.intercept({
+ method: 'POST',
+ path: '/claim',
+ body: JSON.stringify({
+ username: 'bobby',
+ externalId: 'github|def567',
+ invite: 'best.token.ever'
+ })
+ }).reply(200, {})
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath, '--claim', 'best.token.ever']
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL, MSG_REGISTERED, MSG_GETTING_STARTED])
+ await t.resolves(login(args, print))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), 'accessToken: "1234"\n')
+})
+
+test('should fail when unable to claim an invite', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 10,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, { username: '', fromProvider: { sub: 'github|def567', nickname: 'bobby' } })
+ authproxy.intercept({
+ method: 'POST',
+ path: '/claim'
+ }).reply(400, {})
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath, '--claim', 'best.token.ever']
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL])
+ await t.rejects(login(args, print), new Error('Unable to claim invite'))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should fail when unable to get any user details', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 10,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(400, {})
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL])
+ await t.rejects(login(args, print), new Error('Unable to get user data'))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should fail when not registered and no invite to be claimed', async (t) => {
+ const authproxy = mockAgent.get('https://auth-proxy.fly.dev')
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login'
+ }).reply(200, {
+ verifyAt: 'https://some-auth.pro/vider',
+ expiresInSeconds: 10,
+ id: 'abc123',
+ intervalSeconds: 1
+ })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/login/ready/abc123'
+ }).reply(200, { tokens: { access: '1234' } })
+ authproxy.intercept({
+ method: 'GET',
+ path: '/users/self'
+ }).reply(200, { username: '', fromProvider: {} })
+
+ const confPath = await makeConfig()
+ const args = ['--config', confPath]
+
+ const print = assertMessages(t, [MSG_VERIFY_AT_URL])
+ await t.rejects(login(args, print), new Error('Missing invite'))
+
+ const actual = await readFile(confPath)
+ t.equal(actual.toString(), '')
+})
+
+test('should fail if no file name is set', async (t) => {
+ const confPath = await makeConfig()
+ const args = ['--config', path.dirname(confPath)]
+
+ const print = () => t.fail('Should not hit print')
+ await t.rejects(login(args, print), new Error('--config option requires path to a file'))
+})
diff --git a/packages/cli/.npmignore b/packages/cli/.npmignore
new file mode 100644
index 0000000000..be27365759
--- /dev/null
+++ b/packages/cli/.npmignore
@@ -0,0 +1,2 @@
+.nyc_output
+coverage
diff --git a/packages/cli/.taprc b/packages/cli/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/cli/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/cli/LICENSE b/packages/cli/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/cli/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/cli/NOTICE b/packages/cli/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/cli/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/cli/README.md b/packages/cli/README.md
new file mode 100644
index 0000000000..a1bed5dc36
--- /dev/null
+++ b/packages/cli/README.md
@@ -0,0 +1,35 @@
+# Platformatic
+
+Platformatic is a set a Open Source tools that you can use to build your own
+_Internal Developer Platform_.
+
+The first of these tools is **Platformatic DB** — more will follow!
+
+## Install
+
+```bash
+npm install platformatic
+
+# Start a new project
+npx platformatic db init
+```
+
+Follow our [Quick Start Guide](https://oss.platformatic.dev/docs/getting-started/quick-start-guide)
+guide to get up and running with Platformatic DB.
+
+## Documentation
+
+- [Getting Started](https://oss.platformatic.dev/docs/category/getting-started)
+- [Reference](https://oss.platformatic.dev/docs/category/reference)
+- [Guides](https://oss.platformatic.dev/docs/category/guides)
+
+Check out our full documentation at [oss.platformatic.dev](https://oss.platformatic.dev).
+
+## Support
+
+Having issues? Drop in to the [Platformatic Discord](https://discord.com/channels/1011258196905689118/1011258204371554307)
+for help.
+
+## License
+
+Apache 2.0
diff --git a/packages/cli/cli.js b/packages/cli/cli.js
new file mode 100755
index 0000000000..9073e2246b
--- /dev/null
+++ b/packages/cli/cli.js
@@ -0,0 +1,57 @@
+#! /usr/bin/env node
+
+import commist from 'commist'
+import minimist from 'minimist'
+import { runDB } from '@platformatic/db/db.mjs'
+import { login } from '@platformatic/authenticate/authenticate.js'
+import { readFile } from 'fs/promises'
+import { join } from 'desm'
+import { isColorSupported } from 'colorette'
+import helpMe from 'help-me'
+
+import { logo } from './lib/ascii.js'
+
+const program = commist()
+const help = helpMe({
+ dir: join(import.meta.url, 'help'),
+ // the default
+ ext: '.txt'
+})
+
+program.register('db', runDB)
+program.register('help', help.toStdout)
+program.register('help db', function (args) {
+ runDB(['help', ...args])
+})
+program.register({ command: 'login', strict: true }, login)
+
+/* c8 ignore next 3 */
+if (isColorSupported && process.stdout.isTTY) {
+ console.log(logo)
+}
+
+const args = minimist(process.argv.slice(2), {
+ boolean: ['help', 'version'],
+ alias: {
+ help: 'h',
+ version: 'v'
+ }
+})
+
+if (args.version) {
+ const version = JSON.parse(await readFile(join(import.meta.url, 'package.json'))).version
+ console.log('v' + version)
+ process.exit(0)
+}
+
+if (args.help) {
+ help.toStdout(['help'])
+} else if (process.argv.length > 2) {
+ const result = program.parse(process.argv.slice(2))
+
+ if (result) {
+ console.log('Command not found:', result.join(' '))
+ }
+} else {
+ help.toStdout(['help'])
+}
diff --git a/packages/cli/help/help.txt b/packages/cli/help/help.txt
new file mode 100644
index 0000000000..f894bf8ce7
--- /dev/null
+++ b/packages/cli/help/help.txt
@@ -0,0 +1,5 @@
+Welcome to Platformatic. Available commands are:
+
+* help - Display this message
+* help - shows more information about a command.
+* db - start Platformatic DB; type `platformatic db help` to know more.
diff --git a/packages/cli/lib/ascii.js b/packages/cli/lib/ascii.js
new file mode 100644
index 0000000000..f563727318
--- /dev/null
+++ b/packages/cli/lib/ascii.js
@@ -0,0 +1,36 @@
+import { green } from 'colorette'
+
+const str = `
+
+
+
+
+ /////////////
+ ///// /////
+ /// ///
+ /// ///
+ /// ///
+ && /// /// &&
+ &&&&&& /// /// &&&&&&
+ &&&& /// /// &&&&
+ &&& /// /// &&&&&&&&&&&&
+ &&& /// /////// //// && &&&&&
+ && /// /////////////// &&&
+ &&& /// /// &&&
+ &&& /// // &&
+ &&& /// &&
+ &&& /// &&&
+ &&&& /// &&&
+ &&&&&% /// &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+ ///
+ ///
+ ///
+ ///
+ ///
+ ///
+
+`
+
+export const logo = str.replace(/\//g, function (str) {
+ return green(str)
+})
diff --git a/packages/cli/package.json b/packages/cli/package.json
new file mode 100644
index 0000000000..eaad737921
--- /dev/null
+++ b/packages/cli/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "@platformatic/cli",
+ "version": "0.0.21",
+ "description": "Platformatic DB cli",
+ "main": "cli.js",
+ "type": "module",
+ "bin": {
+ "platformatic": "cli.js"
+ },
+ "scripts": {
+ "test": "standard | snazzy && c8 --100 tap --no-coverage test/*.test.js"
+ },
+ "author": "Matteo Collina ",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/platformatic/platformatic.git"
+ },
+ "license": "Apache-2.0",
+ "devDependencies": {
+ "c8": "^7.11.0",
+ "execa": "^6.1.0",
+ "snazzy": "^9.0.0",
+ "split2": "^4.1.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0"
+ },
+ "dependencies": {
+ "@platformatic/authenticate": "workspace:*",
+ "@platformatic/db": "workspace:*",
+ "colorette": "^2.0.19",
+ "commist": "^3.1.2",
+ "desm": "^1.2.0",
+ "help-me": "^4.1.0",
+ "minimist": "^1.2.6"
+ }
+}
diff --git a/packages/cli/test/helper.js b/packages/cli/test/helper.js
new file mode 100644
index 0000000000..0f5407e6c2
--- /dev/null
+++ b/packages/cli/test/helper.js
@@ -0,0 +1,7 @@
+import { join } from 'desm'
+
+const cliPath = join(import.meta.url, '..', 'cli.js')
+
+export {
+ cliPath
+}
diff --git a/packages/cli/test/platformatic.test.js b/packages/cli/test/platformatic.test.js
new file mode 100644
index 0000000000..2b1b69103f
--- /dev/null
+++ b/packages/cli/test/platformatic.test.js
@@ -0,0 +1,60 @@
+import { test } from 'tap'
+import { join } from 'desm'
+import { readFile } from 'fs/promises'
+import { execa } from 'execa'
+import { cliPath } from './helper.js'
+import { EOL } from 'os'
+
+const version = JSON.parse(await readFile(join(import.meta.url, '..', 'package.json'))).version
+const help = await readFile(join(import.meta.url, '..', 'help', 'help.txt'), 'utf8')
+
+// This reads a file from packages/db
+const helpDB = await readFile(join(import.meta.url, '..', '..', 'db', 'help', 'help.txt'), 'utf8')
+
+test('version', async (t) => {
+ const { stdout } = await execa('node', [cliPath, '--version'])
+ t.ok(stdout.includes('v' + version))
+})
+
+test('db', async (t) => {
+ try {
+ await execa('node', [cliPath, 'db'])
+ t.fail('bug')
+ } catch (err) {
+ t.ok(err.stderr.includes('Missing config file'))
+ }
+})
+
+test('login', async (t) => {
+ try {
+ await execa('node', [cliPath, 'login'])
+ t.fail('bug')
+ } catch (err) {
+ t.ok(err.stderr.includes('Unable to authenticate:'))
+ }
+})
+
+test('command not found', async (t) => {
+ const { stdout } = await execa('node', [cliPath, 'foo'])
+ t.ok(stdout.includes('Command not found: foo'))
+})
+
+test('prints the help with help command', async (t) => {
+ const { stdout } = await execa('node', [cliPath, 'help'])
+ t.equal(stdout + EOL, help)
+})
+
+test('prints the help with help flag', async (t) => {
+ const { stdout } = await execa('node', [cliPath, '--help'])
+ t.equal(stdout + EOL, help)
+})
+
+test('prints the help of db', async (t) => {
+ const { stdout } = await execa('node', [cliPath, 'help', 'db'])
+ t.equal(stdout + EOL, helpDB)
+})
+
+test('prints the help if not commands are specified', async (t) => {
+ const { stdout } = await execa('node', [cliPath])
+ t.equal(stdout + EOL, help)
+})
diff --git a/packages/config/LICENSE b/packages/config/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/config/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/config/NOTICE b/packages/config/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/config/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/config/README.md b/packages/config/README.md
new file mode 100644
index 0000000000..2b8d0a60c1
--- /dev/null
+++ b/packages/config/README.md
@@ -0,0 +1,18 @@
+# @platformatic/config
+
+Utility to load and update configuration files in Platformatic.
+
+## Install
+
+```sh
+npm install @platformatic/config
+```
+
+## Usage
+
+TBD
+
+## License
+
+Apache 2.0
+
diff --git a/packages/config/index.d.ts b/packages/config/index.d.ts
new file mode 100644
index 0000000000..b456cd57ac
--- /dev/null
+++ b/packages/config/index.d.ts
@@ -0,0 +1,42 @@
+import { type InstanceOptions } from 'ajv'
+import { type FastifyPluginAsync } from 'fastify'
+interface IEnv {
+ [key: string]: string
+}
+interface IConfigManagerOptions {
+ source: string | JsonMap
+ schema?: object
+ schemaOptions?: InstanceOptions
+ env?: IEnv
+ envWhitelist?: string[]
+ watch?: boolean
+ watchIgnore?: string[]
+}
+
+type JsonArray = boolean[] | number[] | string[] | JsonMap[] | Date[]
+type AnyJson = boolean | number | string | JsonMap | Date | JsonArray | JsonArray[]
+
+interface JsonMap {
+ [key: string]: AnyJson;
+}
+
+interface ISerializer {
+ parse(src: string): JsonMap
+ stringify(obj: JsonMap): string
+}
+export declare class ConfigManager {
+ constructor(opts: IConfigManagerOptions)
+ current: object
+ stopWatch(): void
+ startWatch(): Promise
+ getSerializer(): ISerializer
+ purgeEnv(): IEnv
+ replaceEnv(configString: string): string
+ parse(): Promise
+ validate(): boolean
+ fixSqliteLocation(): void
+ toFastifyPlugin(): FastifyPluginAsync
+ update(config: JsonMap): Promise
+ save(): Promise
+ load(): Promise
+}
diff --git a/packages/config/index.js b/packages/config/index.js
new file mode 100644
index 0000000000..2547d95b77
--- /dev/null
+++ b/packages/config/index.js
@@ -0,0 +1,252 @@
+'use strict'
+
+const { extname, join, resolve, dirname } = require('path')
+const { readFile, watch, writeFile, access } = require('fs/promises')
+const { tmpdir } = require('os')
+const EventEmitter = require('events')
+const Ajv = require('ajv')
+const fastifyPlugin = require('./lib/plugin')
+const YAML = require('yaml')
+const TOML = require('@iarna/toml')
+const JSON5 = require('json5')
+const dotenv = require('dotenv')
+const minimatch = require('minimatch')
+class ConfigManager extends EventEmitter {
+ constructor (opts) {
+ super()
+ this.watchIgnore = opts.watchIgnore || []
+ this.pupa = null
+ this.abortController = null
+ this._shouldSave = false
+ this.envWhitelist = opts.envWhitelist || []
+ if (!opts.source) {
+ throw new Error('Source missing.')
+ }
+
+ this.validationErrors = []
+ if (typeof opts.source === 'string') {
+ this.fullPath = resolve(opts.source)
+ } else {
+ this.fullPath = join(tmpdir(), `platformatic-db-config-${Date.now()}.json`)
+ this.current = opts.source
+ this._shouldSave = true
+ }
+ this.serializer = this.getSerializer()
+ this.schema = opts.schema || {}
+ this.schemaOptions = opts.schemaOptions || {}
+ this._originalEnv = opts.env || {}
+ this.env = this.purgeEnv(this._originalEnv)
+ /* c8 ignore next 3 */
+ if (opts.watch) {
+ this.startWatch()
+ }
+ }
+
+ toFastifyPlugin () {
+ return async (app, opts) => {
+ return fastifyPlugin(app, {
+ ...opts,
+ configManager: this
+ })
+ }
+ }
+
+ async stopWatch () {
+ if (!this.abortController) {
+ return
+ }
+ this.abortController.abort()
+ this.abortController = false
+ await this._watcher.catch(() => {})
+ }
+
+ startWatch () {
+ if (this.abortController) {
+ return this._watcher
+ }
+ this.abortController = new AbortController()
+ const { signal } = this.abortController
+ const watcher = watch(dirname(this.fullPath), { signal, recursive: true })
+ let timer = null
+ const refresh = async () => {
+ timer = null
+ try {
+ await this.parseAndValidate()
+ this.emit('update', this.current)
+ } catch (err) {
+ this.emit('error', err)
+ }
+ }
+
+ const loop = async () => {
+ for await (const event of watcher) {
+ if (timer) {
+ continue
+ }
+
+ // eventType can be both 'change' and 'rename'
+ /* c8 ignore next 1 */
+ if (event.eventType === 'change' || event.eventType === 'rename') {
+ if (this.shouldFileBeWatched(event.filename)) {
+ timer = setTimeout(refresh, 100)
+ }
+ }
+ }
+ /* c8 ignore next 1 */
+ }
+
+ this._watcher = loop()
+ return this._watcher
+ }
+
+ getSerializer () {
+ switch (extname(this.fullPath)) {
+ case '.yaml':
+ case '.yml':
+ return YAML
+ case '.json':
+ return JSON
+ case '.json5':
+ return JSON5
+ case '.toml':
+ return TOML
+ default:
+ throw new Error('Invalid config file extension. Only yml, yaml, json, json5, toml are supported.')
+ }
+ }
+
+ purgeEnv (providedEnvironment) {
+ const env = {
+ ...process.env,
+ ...providedEnvironment
+ }
+ const purged = {}
+ for (const key in env) {
+ if (key.match(/^PLT_/) || this.envWhitelist.includes(key)) {
+ purged[key] = env[key]
+ }
+ }
+ return purged
+ }
+
+ async replaceEnv (configString) {
+ if (this.pupa === null) {
+ this.pupa = (await import('pupa')).default
+ }
+ const paths = [
+ join(dirname(this.fullPath), '.env'),
+ join(process.cwd(), '.env')
+ ]
+ let dotEnvPath
+ for (const p of paths) {
+ try {
+ await access(p)
+ dotEnvPath = p
+ break
+ } catch {
+ // Nothing to do
+ }
+ }
+ let env = { ...this._originalEnv }
+ if (dotEnvPath) {
+ const data = await readFile(dotEnvPath, 'utf-8')
+ const parsed = dotenv.parse(data)
+ env = { ...env, ...parsed }
+ }
+ this.env = this.purgeEnv(env)
+ return this.pupa(configString, this.env)
+ }
+
+ _transformConfig () {}
+
+ async parse () {
+ try {
+ if (this._shouldSave) {
+ await this.save()
+ this._shouldSave = false
+ }
+ const configString = await this.load()
+ this.current = this.serializer.parse(await this.replaceEnv(configString))
+ const validationResult = this.validate()
+ if (!validationResult) {
+ return false
+ }
+ this._transformConfig()
+ return true
+ } catch (err) {
+ if (err.name === 'MissingValueError') {
+ if (!err.key.match(/^PLT_/) && !this.envWhitelist.includes(err.key)) {
+ throw new Error(`${err.key} is an invalid placeholder. All placeholders must be prefixed with PLT_.\nDid you mean PLT_${err.key}?`)
+ } else {
+ throw new Error(`${err.key} env variable is missing.`)
+ }
+ }
+ throw new Error(`Cannot parse config file. ${err.message}`)
+ }
+ }
+
+ validate () {
+ if (!this.current) {
+ return false
+ }
+ const ajv = new Ajv(this.schemaOptions)
+ const ajvValidate = ajv.compile(this.schema)
+
+ const res = ajvValidate(this.current)
+ /* c8 ignore next 12 */
+ if (!res) {
+ this.validationErrors = ajvValidate.errors.map((err) => {
+ return {
+ path: err.instancePath === '' ? '/' : err.instancePath,
+ message: err.message + ' ' + JSON.stringify(err.params)
+ }
+ })
+ return false
+ }
+ return true
+ }
+
+ async parseAndValidate () {
+ const validationResult = await this.parse()
+ if (!validationResult) {
+ throw new Error(this.validationErrors.map((err) => {
+ return err.message
+ }).join('\n'))
+ }
+ }
+
+ async update (newConfig) {
+ const _old = { ...this.current }
+ this.current = newConfig
+ if (this.validate()) {
+ return this.save()
+ }
+ this.current = _old
+ return false
+ }
+
+ async save () {
+ if (!this.current) {
+ return false
+ }
+ return await writeFile(this.fullPath, this.serializer.stringify(this.current))
+ }
+
+ async load () {
+ const configString = await readFile(this.fullPath, 'utf-8')
+ return configString
+ }
+
+ shouldFileBeWatched (fileName) {
+ let found = true
+ for (const ignoredFile of this.watchIgnore) {
+ if (minimatch(fileName, ignoredFile)) {
+ found = false
+ break
+ }
+ }
+ return found
+ }
+}
+
+module.exports = ConfigManager
diff --git a/packages/config/lib/plugin.js b/packages/config/lib/plugin.js
new file mode 100644
index 0000000000..9a983c2424
--- /dev/null
+++ b/packages/config/lib/plugin.js
@@ -0,0 +1,62 @@
+'use strict'
+
+async function configRoutes (app, opts) {
+ const { configManager } = opts
+ const headersSchema = {
+ type: 'object',
+ properties: {
+ 'x-platformatic-admin-secret': {
+ type: 'string',
+ description: 'The secret defined in authorization.adminSecret property of config file.'
+ }
+ },
+ required: ['x-platformatic-admin-secret']
+ }
+ const unauthorizedResponseSchema = {
+ type: 'object',
+ properties: {
+ success: { type: 'boolean', default: false },
+ message: { type: 'string', default: 'Unauthorized' }
+ }
+ }
+ // TODO: where we implement authorization for this?
+ // app.addHook('preHandler', async (request, reply) => {
+ // if (!request.user) {
+ // return reply.code(401).send({ success: false, message: 'Unauthorized' })
+ // }
+ // })
+ app.post('/config-file', {
+ schema: {
+ headers: headersSchema,
+ response: {
+ 200: {
+ type: 'object',
+ properties: {
+ success: { type: 'boolean' }
+ }
+ },
+ 401: unauthorizedResponseSchema
+ },
+ body: {
+ type: 'object'
+ }
+ }
+ }, async (req, reply) => {
+ await configManager.update(req.body)
+ return reply
+ .code(200)
+ .header('Content-Type', 'application/json')
+ .send({ success: true })
+ })
+
+ app.get('/config-file', async (req, reply) => {
+ const data = configManager.current
+ return reply
+ .code(200)
+ .header('Content-Type', 'application/json')
+ .send(data)
+ })
+ app.decorate('platformaticConfigManager', configManager)
+}
+
+module.exports = configRoutes
diff --git a/packages/config/package.json b/packages/config/package.json
new file mode 100644
index 0000000000..a232791fdc
--- /dev/null
+++ b/packages/config/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "@platformatic/config",
+ "version": "0.0.21",
+ "description": "Platformatic DB Config Manager",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && c8 --100 tap --no-coverage test/*test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/platformatic/platformatic.git"
+ },
+ "author": "Leonardo Rossi ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/platformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/platformatic/platformatic#readme",
+ "devDependencies": {
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0"
+ },
+ "dependencies": {
+ "@iarna/toml": "^2.2.5",
+ "ajv": "^8.11.0",
+ "c8": "^7.11.0",
+ "dotenv": "^16.0.1",
+ "json5": "^2.2.1",
+ "minimatch": "^5.1.0",
+ "pupa": "^3.1.0",
+ "undici": "^5.8.0",
+ "yaml": "^2.1.1"
+ }
+}
diff --git a/packages/config/test/fixtures/bad-placeholder.json b/packages/config/test/fixtures/bad-placeholder.json
new file mode 100644
index 0000000000..4ace1bd0df
--- /dev/null
+++ b/packages/config/test/fixtures/bad-placeholder.json
@@ -0,0 +1,18 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "logger": {
+ "level": "info"
+ },
+ "port": "{PORT}"
+ },
+ "core": {
+ "connectionString": "sqlite://./db.sqlite"
+ },
+ "migrations": {
+ "dir": "./migrations"
+ },
+ "plugin": {
+ "path": "./plugin-sum.js"
+ }
+}
\ No newline at end of file
diff --git a/packages/config/test/fixtures/simple.json b/packages/config/test/fixtures/simple.json
new file mode 100644
index 0000000000..16656e15f6
--- /dev/null
+++ b/packages/config/test/fixtures/simple.json
@@ -0,0 +1,36 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": "3042",
+ "logger": {
+ "level": "info"
+ }
+ },
+ "metrics": {
+ "auth": {
+ "username": "plt-db",
+ "password": "plt-db"
+ }
+ },
+ "plugin": {
+ "path": "./plugin-sum.js"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@localhost:5432/postgres",
+ "graphiql": true,
+ "ignore": {
+ "versions": true
+ }
+ },
+ "migrations": {
+ "dir": "./demo/migrations",
+ "validateChecksums": false
+ },
+ "dashboard": {
+ "enabled": true,
+ "rootPath": true
+ },
+ "authorization": {
+ "adminSecret": "plt-db"
+ }
+}
\ No newline at end of file
diff --git a/packages/config/test/fixtures/simple.json5 b/packages/config/test/fixtures/simple.json5
new file mode 100644
index 0000000000..a6f8fb1f2e
--- /dev/null
+++ b/packages/config/test/fixtures/simple.json5
@@ -0,0 +1,41 @@
+{
+ server: {
+ // Server ip address
+ hostname: "127.0.0.1",
+ // Server port
+ port: "3042",
+ logger: {
+ level: "info"
+ }
+ },
+ "metrics": {
+ "auth": {
+ "username": "plt-db",
+ "password": "plt-db"
+ }
+ },
+ "plugin": {
+ "path": "./plugin-sum.js"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@localhost:5432/postgres",
+ "graphiql": true,
+ "ignore": {
+ "versions": true
+ }
+ },
+ "migrations": {
+ "dir": "./demo/auth/migrations",
+ "validateChecksums": false
+ },
+ "dashboard": {
+ "enabled": true,
+ "rootPath": true
+ },
+ "authorization": {
+ // Single quotes are allowed
+ "adminSecret": 'plt-db'
+ },
+ foobar: 'foobar',
+ // Trailing comma is allowed
+}
\ No newline at end of file
diff --git a/packages/config/test/fixtures/simple.toml b/packages/config/test/fixtures/simple.toml
new file mode 100644
index 0000000000..81725f8fcc
--- /dev/null
+++ b/packages/config/test/fixtures/simple.toml
@@ -0,0 +1,33 @@
+foobar = "{PLT_FOOBAR}"
+
+[server]
+hostname = "127.0.0.1"
+port = "3042"
+
+ [server.logger]
+ level = "info"
+
+[metrics.auth]
+username = "plt-db"
+password = "plt-db"
+
+[plugin]
+path = "./plugin-sum.js"
+
+[core]
+connectionString = "postgres://postgres:postgres@localhost:5432/postgres"
+graphiql = true
+
+ [core.ignore]
+ versions = true
+
+[migrations]
+dir = "./demo/auth/migrations"
+validateChecksums = false
+
+[dashboard]
+enabled = true
+rootPath = true
+
+[authorization]
+adminSecret = "plt-db"
diff --git a/packages/config/test/fixtures/simple.yaml b/packages/config/test/fixtures/simple.yaml
new file mode 100644
index 0000000000..e6325befb2
--- /dev/null
+++ b/packages/config/test/fixtures/simple.yaml
@@ -0,0 +1,26 @@
+---
+server:
+ hostname: 127.0.0.1
+ port: '3042'
+ logger:
+ level: info
+metrics:
+ auth:
+ username: "plt-db"
+ password: "plt-db"
+plugin:
+ path: "./plugin-sum.js"
+core:
+ connectionString: postgres://postgres:postgres@localhost:5432/postgres
+ graphiql: true
+ ignore:
+ versions: true
+migrations:
+ dir: "./demo/auth/migrations"
+ validateChecksums: false
+dashboard:
+ enabled: true
+ rootPath: true
+authorization:
+ adminSecret: "plt-db"
+foobar: "{PLT_FOOBAR}"
diff --git a/packages/config/test/fixtures/sqlite.json b/packages/config/test/fixtures/sqlite.json
new file mode 100644
index 0000000000..2d15227820
--- /dev/null
+++ b/packages/config/test/fixtures/sqlite.json
@@ -0,0 +1,16 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": "3042",
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "sqlite://./demo/db.sqlite3`",
+ "graphiql": true,
+ "ignore": {
+ "versions": true
+ }
+ }
+}
\ No newline at end of file
diff --git a/packages/config/test/helper.js b/packages/config/test/helper.js
new file mode 100644
index 0000000000..d135de904d
--- /dev/null
+++ b/packages/config/test/helper.js
@@ -0,0 +1,18 @@
+'use strict'
+const os = require('os')
+const { join } = require('path')
+const { writeFile } = require('fs/promises')
+function getTempFile (filename = 'platformatic.json') {
+ return join(os.tmpdir(), filename)
+}
+
+async function saveConfigToFile (config, filename, serializer = JSON) {
+ const targetFile = getTempFile(filename)
+ await writeFile(targetFile, serializer.stringify(config))
+ return targetFile
+}
+
+module.exports = {
+ getTempFile,
+ saveConfigToFile
+}
diff --git a/packages/config/test/index.test.js b/packages/config/test/index.test.js
new file mode 100644
index 0000000000..e64e1fbb2d
--- /dev/null
+++ b/packages/config/test/index.test.js
@@ -0,0 +1,225 @@
+'use strict'
+
+const { test } = require('tap')
+const { resolve } = require('path')
+const ConfigManager = require('..')
+const path = require('path')
+const { unlink, writeFile, mkdir } = require('fs/promises')
+const os = require('os')
+const pid = process.pid
+
+test('should compute absolute path', ({ equal, plan }) => {
+ plan(1)
+ const cm = new ConfigManager({ source: './test.json' })
+ equal(cm.fullPath, resolve(process.cwd(), './test.json'))
+})
+
+test('should throw if both path and config are not defined', async ({ equal, plan, fail }) => {
+ plan(1)
+ try {
+ const cm = new ConfigManager({})
+ await cm.parse()
+ fail()
+ } catch (err) {
+ equal(err.message, 'Source missing.')
+ }
+})
+
+test('should accept and parse initial config object', async ({ same, equal, plan }) => {
+ plan(1)
+ const cm = new ConfigManager({
+ source: {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042'
+ }
+ }
+ })
+ await cm.parse()
+ same(cm.current, {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042'
+ }
+ })
+})
+
+test('should purge env', ({ plan, same, teardown }) => {
+ plan(2)
+ {
+ // passed env
+ const cm = new ConfigManager({
+ source: './test.json',
+ env: {
+ FOOBAR: 'foobar',
+ PLT_FOOBAR: 'plt_foobar'
+ }
+ })
+
+ same(cm.env, {
+ PLT_FOOBAR: 'plt_foobar'
+ })
+ }
+ {
+ // from process env
+ process.env.FOOBAR = 'foobar'
+ process.env.PLT_FOOBAR = 'plt_foobar'
+ const cm = new ConfigManager({ source: './fixtures/test.json' })
+ teardown(() => {
+ delete process.env.FOOBAR
+ delete process.env.PLT_FOOBAR
+ })
+ same(cm.env, {
+ PLT_FOOBAR: 'plt_foobar'
+ })
+ }
+})
+
+test('support env white list', ({ plan, same, teardown }) => {
+ plan(2)
+ {
+ // passed env
+ const cm = new ConfigManager({
+ source: './test.json',
+ env: {
+ FOOBAR: 'foobar',
+ PLT_FOOBAR: 'plt_foobar'
+ },
+ envWhitelist: ['FOOBAR']
+ })
+
+ same(cm.env, {
+ PLT_FOOBAR: 'plt_foobar',
+ FOOBAR: 'foobar'
+ })
+ }
+ {
+ // from process env
+ process.env.FOOBAR = 'foobar'
+ process.env.PLT_FOOBAR = 'plt_foobar'
+ const cm = new ConfigManager({ source: './fixtures/test.json', envWhitelist: ['FOOBAR'] })
+ teardown(() => {
+ delete process.env.FOOBAR
+ delete process.env.PLT_FOOBAR
+ })
+ same(cm.env, {
+ PLT_FOOBAR: 'plt_foobar',
+ FOOBAR: 'foobar'
+ })
+ }
+})
+
+test('should not validate if parsing is not called', ({ plan, same, teardown }) => {
+ plan(1)
+ const cm = new ConfigManager({
+ source: './test.json'
+ })
+ same(cm.validate(), false)
+})
+
+test('should throw if file is not JSON, yaml, or toml', async ({ fail, equal, plan }) => {
+ plan(1)
+ try {
+ const cm = new ConfigManager({
+ source: './test.txt'
+ })
+ await cm.parse()
+ fail()
+ } catch (err) {
+ equal(err.message, 'Invalid config file extension. Only yml, yaml, json, json5, toml are supported.')
+ }
+})
+
+test('should look for a .env file in the same folder of config', async ({ same, fail, plan, teardown, comment }) => {
+ plan(1)
+ const tmpDir = path.join(os.tmpdir(), `plt-auth-${pid}`)
+ await mkdir(tmpDir)
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: '{PLT_PROP}'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = path.join(tmpDir, 'uses-env.json')
+ const envFile = path.join(tmpDir, '.env')
+
+ await writeFile(envFile, 'PLT_PROP=foo\n')
+ await writeFile(file, JSON.stringify(config))
+
+ const cm = new ConfigManager({ source: file, schema })
+ await cm.parse()
+ const expectedConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foo'
+ }
+ }
+ same(cm.current, expectedConfig)
+ await unlink(file)
+ await unlink(envFile)
+})
+
+test('should look for a .env file in process.cwd() too', async ({ same, fail, plan, teardown, comment }) => {
+ plan(1)
+ const currentCWD = process.cwd()
+ teardown(() => process.chdir(currentCWD))
+
+ const tmpDir = path.join(os.tmpdir(), `plt-auth-${pid}-2`)
+ const tmpDir2 = path.join(os.tmpdir(), `plt-auth-${pid}-2-cwd`)
+ await mkdir(tmpDir)
+ await mkdir(tmpDir2)
+
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: '{PLT_PROP}'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = path.join(tmpDir, 'uses-env.json')
+ const envFile = path.join(tmpDir2, '.env')
+
+ await writeFile(envFile, 'PLT_PROP=foo\n')
+ await writeFile(file, JSON.stringify(config))
+
+ process.chdir(tmpDir2)
+
+ const cm = new ConfigManager({ source: file, schema })
+ await cm.parse()
+ const expectedConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foo'
+ }
+ }
+ same(cm.current, expectedConfig)
+ await unlink(file)
+ await unlink(envFile)
+})
diff --git a/packages/config/test/load.test.js b/packages/config/test/load.test.js
new file mode 100644
index 0000000000..669577b350
--- /dev/null
+++ b/packages/config/test/load.test.js
@@ -0,0 +1,110 @@
+'use strict'
+
+const { test } = require('tap')
+const { resolve } = require('path')
+const ConfigManager = require('..')
+test('should throw if file is not found', async ({ match, fail }) => {
+ try {
+ const cm = new ConfigManager({ source: './invalid-file.json' })
+ await cm.parse()
+ fail()
+ } catch (err) {
+ match(err.message, 'Cannot parse config file. ENOENT: no such file or directory')
+ }
+})
+
+test('should throw if placeholder is invalid', async ({ match, fail }) => {
+ try {
+ const cm = new ConfigManager({ source: resolve(__dirname, './fixtures/bad-placeholder.json') })
+ await cm.parse()
+ fail()
+ } catch (err) {
+ match(err.message, 'PORT is an invalid placeholder. All placeholders must be prefixed with PLT_.\nDid you mean PLT_PORT?')
+ }
+})
+
+test('should throw if placeholder is missing', async ({ match, fail }) => {
+ try {
+ const cm = new ConfigManager({ source: resolve(__dirname, './fixtures/bad-placeholder.json'), envWhitelist: ['PORT'] })
+ await cm.parse()
+ fail()
+ } catch (err) {
+ match(err.message, 'PORT env variable is missing.')
+ }
+})
+
+// TODO
+// test('should throw if config is invalid', ({ equal, plan }) => { })
+test('should support YAML format', async ({ same }) => {
+ const cm = new ConfigManager({
+ source: resolve(__dirname, './fixtures/simple.yaml'),
+ env: { PLT_FOOBAR: 'foobar' }
+ })
+ await cm.parse()
+ same(cm.current, {
+ server: { hostname: '127.0.0.1', port: '3042', logger: { level: 'info' } },
+ metrics: { auth: { username: 'plt-db', password: 'plt-db' } },
+ plugin: { path: './plugin-sum.js' },
+ core: {
+ connectionString: 'postgres://postgres:postgres@localhost:5432/postgres',
+ graphiql: true,
+ ignore: { versions: true }
+ },
+ migrations: { dir: './demo/auth/migrations', validateChecksums: false },
+ dashboard: { enabled: true, rootPath: true },
+ authorization: { adminSecret: 'plt-db' },
+ foobar: 'foobar'
+ })
+})
+
+test('should support TOML format', async ({ same }) => {
+ const cm = new ConfigManager({
+ source: resolve(__dirname, './fixtures/simple.toml'),
+ env: { PLT_FOOBAR: 'foobar' }
+ })
+ await cm.parse()
+ cm._transformConfig = function () {
+ this.current.plugin.path = this.fixRelativePath(this.current.plugin.path)
+ this.current.migrations.dir = this.fixRelativePath(this.current.migrations.dir)
+ }
+ same(cm.current, {
+ server: { hostname: '127.0.0.1', port: '3042', logger: { level: 'info' } },
+ metrics: { auth: { username: 'plt-db', password: 'plt-db' } },
+ plugin: { path: './plugin-sum.js' },
+ core: {
+ connectionString: 'postgres://postgres:postgres@localhost:5432/postgres',
+ graphiql: true,
+ ignore: { versions: true }
+ },
+ migrations: { dir: './demo/auth/migrations', validateChecksums: false },
+ dashboard: { enabled: true, rootPath: true },
+ authorization: { adminSecret: 'plt-db' },
+ foobar: 'foobar'
+ })
+})
+
+test('should support JSON5 format', async ({ same }) => {
+ const cm = new ConfigManager({
+ source: resolve(__dirname, './fixtures/simple.json5'),
+ env: { PLT_FOOBAR: 'foobar' }
+ })
+ await cm.parse()
+ cm._transformConfig = function () {
+ this.current.plugin.path = this.fixRelativePath(this.current.plugin.path)
+ this.current.migrations.dir = this.fixRelativePath(this.current.migrations.dir)
+ }
+ same(cm.current, {
+ server: { hostname: '127.0.0.1', port: '3042', logger: { level: 'info' } },
+ metrics: { auth: { username: 'plt-db', password: 'plt-db' } },
+ plugin: { path: './plugin-sum.js' },
+ core: {
+ connectionString: 'postgres://postgres:postgres@localhost:5432/postgres',
+ graphiql: true,
+ ignore: { versions: true }
+ },
+ migrations: { dir: './demo/auth/migrations', validateChecksums: false },
+ dashboard: { enabled: true, rootPath: true },
+ authorization: { adminSecret: 'plt-db' },
+ foobar: 'foobar'
+ })
+})
diff --git a/packages/config/test/placeholders.test.js b/packages/config/test/placeholders.test.js
new file mode 100644
index 0000000000..d7764a9890
--- /dev/null
+++ b/packages/config/test/placeholders.test.js
@@ -0,0 +1,85 @@
+'use strict'
+
+const ConfigManager = require('..')
+const { test } = require('tap')
+
+test('transform placeholders', async ({ plan, same }) => {
+ plan(2)
+ {
+ const cm = new ConfigManager({
+ source: './file.json',
+ env: {
+ PLT_FOO: 'bar',
+ PLT_USERNAME: 'john'
+ }
+ })
+ const config = {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042',
+ replace: '{PLT_FOO}'
+ }
+ }
+
+ const res = await cm.replaceEnv(JSON.stringify(config))
+ same(JSON.parse(res), {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042',
+ replace: 'bar'
+ }
+ })
+ }
+
+ {
+ // shouldn't complain if no placeholders are defined
+ const cm = new ConfigManager({
+ source: './file.json',
+ env: {
+ PLT_FOO: 'bar',
+ PLT_USERNAME: 'john'
+ }
+ })
+
+ const config = {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042'
+ }
+ }
+
+ const res = await cm.replaceEnv(JSON.stringify(config))
+ same(JSON.parse(res), {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042'
+ }
+ })
+ }
+})
+
+test('throws if not all placeholders are defined', async ({ plan, same, throws }) => {
+ plan(2)
+ const cm = new ConfigManager({
+ source: './file.json',
+ env: {
+ PLT_FOO: 'bar',
+ PLT_USERNAME: 'john'
+ }
+ })
+
+ const config = {
+ server: {
+ hostname: '127.0.0.1',
+ port: '3042',
+ replace: '{PLT_FOO}'
+ },
+ plugin: '{PLT_PLUGIN}'
+ }
+ try {
+ await cm.replaceEnv(JSON.stringify(config))
+ } catch (err) {
+ same(err.name, 'MissingValueError')
+ same(err.message, 'Missing a value for the placeholder: PLT_PLUGIN')
+ }
+})
diff --git a/packages/config/test/plugin.test.js b/packages/config/test/plugin.test.js
new file mode 100644
index 0000000000..cd11241579
--- /dev/null
+++ b/packages/config/test/plugin.test.js
@@ -0,0 +1,71 @@
+'use strict'
+
+const { test } = require('tap')
+const Fastify = require('fastify')
+const ConfigManager = require('..')
+const { saveConfigToFile } = require('./helper')
+const { readFile, unlink } = require('fs/promises')
+
+test('should generate fastify plugin', async ({ teardown, same, equal }) => {
+ const config = {
+ foo: 'bar'
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'string' }
+ }
+ }
+ const file = await saveConfigToFile(config, 'plugin.json')
+
+ const cm = new ConfigManager({
+ source: file,
+ schema
+ })
+ await cm.parse()
+ const app = Fastify({
+ logger: false
+ })
+ app.register(cm.toFastifyPlugin())
+
+ await app.listen({ port: 0 })
+ teardown(async () => { await unlink(file) })
+ teardown(app.close)
+
+ {
+ // Read config file
+ const res = await app.inject({
+ method: 'GET',
+ url: '/config-file'
+ })
+ equal(res.statusCode, 200)
+ same(res.json(), {
+ foo: 'bar'
+ })
+ }
+ {
+ const newConfig = {
+ foo: 'bar',
+ bar: 'baz'
+ }
+ // Write config file
+ const res = await app.inject({
+ method: 'POST',
+ url: '/config-file',
+ headers: {
+ 'x-platformatic-admin-secret': 'secret'
+ },
+ body: newConfig
+ })
+ equal(res.statusCode, 200)
+ same(res.json(), {
+ success: true
+ })
+
+ // check both file and current Config
+ const newData = JSON.parse(await readFile(file))
+ same(newData, newConfig)
+ same(cm.current, newConfig)
+ }
+})
diff --git a/packages/config/test/save.test.js b/packages/config/test/save.test.js
new file mode 100644
index 0000000000..61ce1344e9
--- /dev/null
+++ b/packages/config/test/save.test.js
@@ -0,0 +1,333 @@
+'use strict'
+
+const { test } = require('tap')
+const ConfigManager = require('..')
+const { saveConfigToFile } = require('./helper')
+const { readFile, unlink } = require('fs/promises')
+const YAML = require('yaml')
+const TOML = require('@iarna/toml')
+test('should not save invalid config', async ({ equal, fail, pass, same, teardown }) => {
+ const invalidConfig = {
+ name: ['Platformatic'],
+ props: {
+ foo: 123
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(invalidConfig, 'invalid.json')
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ schemaOptions: {
+ allErrors: true
+ }
+ })
+ const res = await cm.parse()
+ equal(res, false)
+ same(cm.validationErrors, [
+ { path: '/name', message: 'must be string {"type":"string"}' },
+ { path: '/props/foo', message: 'must be string {"type":"string"}' }
+ ])
+})
+test('should not replace placeholders in file', async ({ equal, same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: '{PLT_FOO}'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'no-placeholder-replacement.json')
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ const res = await cm.parse()
+ equal(res, true)
+ same(cm.validationErrors, [])
+ const configData = JSON.parse(await readFile(file))
+ same(configData, {
+ name: 'Platformatic',
+ props: {
+ foo: '{PLT_FOO}'
+ }
+ })
+ same(cm.current, {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar'
+ }
+ })
+})
+test('should save valid config and replace current one', async ({ same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'to-replace.json')
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ await cm.parse()
+ const newConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar',
+ bar: 42
+ }
+ }
+ await cm.update(newConfig)
+ same(cm.current, newConfig)
+ const configData = JSON.parse(await readFile(file))
+ same(configData, cm.current)
+})
+
+test('should not update with invalid config', async ({ same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'do-not-update.json')
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ await cm.parse()
+ const newConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar',
+ bar: '42'
+ }
+ }
+ const updateRes = await cm.update(newConfig)
+ same(updateRes, false)
+ same(cm.current, config)
+ const configData = JSON.parse(await readFile(file))
+ same(configData, config)
+})
+
+test('should support YAML format', async ({ same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'to-replace.yaml', YAML)
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ await cm.parse()
+ const newConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar',
+ bar: 42
+ }
+ }
+ await cm.update(newConfig)
+ same(cm.current, newConfig)
+ const configData = YAML.parse(await readFile(file, 'utf-8'))
+ same(configData, cm.current)
+})
+
+test('should support TOML format', async ({ same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'to-replace.toml', TOML)
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ await cm.parse()
+ const newConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar',
+ bar: 42
+ }
+ }
+ await cm.update(newConfig)
+ same(cm.current, newConfig)
+ const configData = TOML.parse(await readFile(file, 'utf-8'))
+ same(configData, cm.current)
+})
+
+test('should keep history of configs', { skip: true }, async ({ equal, plan }) => {
+ // TODO: implement this if we want to keep history of configs in running instance
+})
+
+test('should not save if not parsed', async ({ equal, fail, pass, same, teardown }) => {
+ const invalidConfig = {
+ name: ['Platformatic'],
+ props: {
+ foo: '123'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(invalidConfig, 'not-parsed.json')
+ teardown(async () => await unlink(file))
+ const cm = new ConfigManager({
+ source: file,
+ schema,
+ schemaOptions: {
+ allErrors: true
+ }
+ })
+ equal(await cm.save(), false)
+})
+
+test('should save if initialized with object', async ({ same, teardown }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const cm = new ConfigManager({
+ source: config,
+ schema,
+ env: { PLT_FOO: 'foobar' }
+ })
+ teardown(async () => await unlink(cm.fullPath))
+ await cm.parse()
+ const newConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar',
+ bar: 42
+ }
+ }
+ await cm.update(newConfig)
+ same(cm.current, newConfig)
+ const configData = JSON.parse(await readFile(cm.fullPath))
+ same(configData, cm.current)
+})
diff --git a/packages/config/test/watch.test.js b/packages/config/test/watch.test.js
new file mode 100644
index 0000000000..d13d9f9d6b
--- /dev/null
+++ b/packages/config/test/watch.test.js
@@ -0,0 +1,294 @@
+'use strict'
+
+const { test } = require('tap')
+const { saveConfigToFile } = require('./helper')
+const { unlink, writeFile, mkdir } = require('fs/promises')
+const { once } = require('events')
+const path = require('path')
+const os = require('os')
+const ConfigManager = require('..')
+const pid = process.pid
+const { setTimeout: sleep } = require('timers/promises')
+
+test('should emit event if file is updated', async ({ same, fail, plan, teardown }) => {
+ plan(1)
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'emit-event.json')
+ const cm = new ConfigManager({ source: file, schema, watch: true })
+ await cm.parse()
+ const updatedConfig = {
+ name: 'Platformatic Update',
+ props: {
+ foo: 'foobar'
+ }
+ }
+ await Promise.all([
+ once(cm, 'update'),
+ writeFile(file, JSON.stringify(updatedConfig))
+ ])
+ same(cm.current, updatedConfig)
+ await cm.stopWatch()
+ await unlink(file)
+})
+
+test('start & stop cannot be called multiple times', async ({ same, fail, plan, teardown, rejects }) => {
+ plan(2)
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = await saveConfigToFile(config, 'emit-event.json')
+ const cm = new ConfigManager({ source: file, schema })
+ await cm.parse()
+ const p1 = cm.startWatch()
+ const p2 = cm.startWatch()
+ same(p1, p2)
+ await Promise.all([
+ cm.stopWatch(),
+ rejects(p1)
+ ])
+ await cm.stopWatch()
+})
+test('should emit error for invalid config and not update current', async ({ teardown, fail }) => {
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: 'bar'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ },
+ required: ['name']
+ }
+
+ const file = await saveConfigToFile(config)
+ teardown(async () => {
+ await cm.stopWatch()
+ await unlink(file)
+ })
+ const cm = new ConfigManager({ source: file, schema, watch: true })
+ await cm.parse()
+ const updatedConfig = {
+ props: {
+ foo: 'foo',
+ bar: '42'
+ }
+ }
+ await Promise.all([
+ Promise.race([
+ once(cm, 'error'),
+ once(cm, 'update').then(() => fail())
+ ]),
+ writeFile(file, JSON.stringify(updatedConfig))
+ ])
+})
+
+test('should emit event if .env file is updated', async ({ same, fail, plan, teardown, comment }) => {
+ plan(1)
+ const tmpDir = path.join(os.tmpdir(), `plt-auth-${pid}`)
+ await mkdir(tmpDir)
+ const config = {
+ name: 'Platformatic',
+ props: {
+ foo: '{PLT_PROP}'
+ }
+ }
+ const schema = {
+ type: 'object',
+ properties: {
+ name: { type: 'string' },
+ props: {
+ type: 'object',
+ properties: {
+ foo: { type: 'string' },
+ bar: { type: 'integer' }
+ }
+ }
+ }
+ }
+
+ const file = path.join(tmpDir, 'uses-env.json')
+ const envFile = path.join(tmpDir, '.env')
+
+ await writeFile(envFile, 'PLT_PROP=foo\n')
+ await writeFile(file, JSON.stringify(config))
+
+ const cm = new ConfigManager({ source: file, schema, watch: true })
+ await cm.parse()
+ const updatedConfig = {
+ name: 'Platformatic',
+ props: {
+ foo: 'foobar'
+ }
+ }
+ comment('reloading')
+ await Promise.all([
+ once(cm, 'update'),
+ writeFile(envFile, 'PLT_PROP=foobar')
+ ])
+ same(cm.current, updatedConfig)
+ await cm.stopWatch()
+ await unlink(file)
+ await unlink(envFile)
+})
+
+test('initialize watchIgnore array', async ({ same, plan }) => {
+ plan(2)
+ {
+ const config = {
+ core: {
+ connectionString: 'sqlite://db.sqlite'
+ }
+ }
+ const file = await saveConfigToFile(config, 'test-watchIgnore.json')
+ const cm = new ConfigManager({ source: file, watch: true })
+ await cm.parse()
+ same(cm.watchIgnore, [])
+ await cm.stopWatch()
+ await unlink(file)
+ }
+ {
+ const config = {
+ core: {
+ connectionString: 'sqlite://db.sqlite'
+ }
+ }
+ const file = await saveConfigToFile(config, 'test-watchIgnore.json')
+ const cm = new ConfigManager({ source: file, watch: true, watchIgnore: ['foo.bar'] })
+ await cm.parse()
+ same(cm.watchIgnore, ['foo.bar'])
+ await cm.stopWatch()
+ await unlink(file)
+ }
+})
+
+test('return correct files to ignore watch', async ({ equal, same, plan }) => {
+ {
+ const config = {
+ core: {
+ connectionString: 'sqlite://db.sqlite'
+ }
+ }
+ const file = await saveConfigToFile(config, 'test-watchIgnore.json')
+ const cm = new ConfigManager({ source: file, watchIgnore: ['test.file', 'test2.file'] })
+ equal(false, cm.shouldFileBeWatched('test.file'))
+ equal(false, cm.shouldFileBeWatched('test2.file'))
+ await unlink(file)
+ }
+
+ {
+ const config = {
+ core: {
+ connectionString: 'sqlite://db.sqlite'
+ }
+ }
+ const file = await saveConfigToFile(config, 'test-watchIgnore.json')
+ const cm = new ConfigManager({ source: file, watchIgnore: ['test.file', 'test2.file'] })
+ equal(true, cm.shouldFileBeWatched('another.file'))
+ await unlink(file)
+ }
+})
+
+test('do not emit event for ignored files', async ({ teardown, equal, same, fail }) => {
+ const configFile = path.join(__dirname, 'fixtures', 'simple.json')
+ const cm = new ConfigManager({
+ source: configFile,
+ schema: {},
+ watch: true,
+ watchIgnore: ['test.file']
+ })
+ const parseResult = await cm.parse()
+ equal(parseResult, true)
+ const testFileFullPath = `${path.join(path.dirname(cm.fullPath))}/test.file`
+ cm.on('update', () => {
+ fail()
+ })
+ await writeFile(testFileFullPath, 'foobar')
+
+ teardown(async () => {
+ await cm.stopWatch()
+ await unlink(testFileFullPath)
+ })
+
+ same(cm.watchIgnore, ['test.file'])
+
+ // await a full event loop cycle to make sure all possible updates
+ // have been processed.
+ await sleep(150)
+})
+
+test('emit event for not-ignored files', async ({ teardown, equal, same, pass, fail }) => {
+ const configFile = path.join(__dirname, 'fixtures', 'simple.json')
+ const cm = new ConfigManager({
+ source: configFile,
+ schema: {},
+ watch: true,
+ watchIgnore: ['test.file']
+ })
+ let eventEmitted = false
+ const parseResult = await cm.parse()
+ equal(parseResult, true)
+ const testFileFullPath = `${path.join(path.dirname(cm.fullPath))}/test2.file`
+ cm.on('update', () => {
+ pass()
+ eventEmitted = true
+ })
+ await writeFile(testFileFullPath, 'foobar')
+
+ teardown(async () => {
+ await cm.stopWatch()
+ await unlink(testFileFullPath)
+ })
+
+ same(cm.watchIgnore, ['test.file'])
+
+ // await a full event loop cycle to make sure all possible updates
+ // have been processed.
+ await sleep(150)
+ equal(eventEmitted, true)
+})
diff --git a/packages/db-authorization/.npmignore b/packages/db-authorization/.npmignore
new file mode 100644
index 0000000000..be27365759
--- /dev/null
+++ b/packages/db-authorization/.npmignore
@@ -0,0 +1,2 @@
+.nyc_output
+coverage
diff --git a/packages/db-authorization/.taprc b/packages/db-authorization/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/db-authorization/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/db-authorization/LICENSE b/packages/db-authorization/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/db-authorization/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-authorization/NOTICE b/packages/db-authorization/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/db-authorization/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-authorization/README.md b/packages/db-authorization/README.md
new file mode 100644
index 0000000000..0a591a9347
--- /dev/null
+++ b/packages/db-authorization/README.md
@@ -0,0 +1,15 @@
+# @platformatic/db-authorization
+
+Fastify plugin that adds role-based authorization hooks to [`@platformatic/sql-mapper`](https://www.npmjs.com/package/@platformatic/sql-mapper).
+
+Check out the full documentation on [our website](https://oss.platformatic.dev/docs/reference/db-authorization/introduction).
+
+## Install
+
+```sh
+npm install @platformatic/db-authorization
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/db-authorization/index.js b/packages/db-authorization/index.js
new file mode 100644
index 0000000000..e45438504d
--- /dev/null
+++ b/packages/db-authorization/index.js
@@ -0,0 +1,293 @@
+'use strict'
+
+const fp = require('fastify-plugin')
+const createError = require('@fastify/error')
+const { getRequestFromContext, getRoles } = require('./lib/utils')
+const findRule = require('./lib/find-rule')
+
+const PLT_ADMIN_ROLE = 'platformatic-admin'
+const Unauthorized = createError('PLT_DB_AUTH_UNAUTHORIZED', 'operation not allowed', 401)
+const UnauthorizedField = createError('PLT_DB_AUTH_UNAUTHORIZED', 'field not allowed: %s', 401)
+const MissingNotNullableError = createError('PLT_DB_AUTH_NOT_NULLABLE_MISSING', 'missing not nullable field: "%s" in save rule for entity "%s"')
+
+async function auth (app, opts) {
+ if (opts.jwt) {
+ app.register(require('./lib/jwt'), opts.jwt)
+ } else if (opts.webhook) {
+ app.register(require('./lib/webhook'), opts.webhook)
+ }
+
+ const adminSecret = opts.adminSecret
+ const roleKey = opts.roleKey || 'X-PLATFORMATIC-ROLE'
+ const anonymousRole = opts.anonymousRole || 'anonymous'
+ app.addHook('preHandler', async (request) => {
+ if (adminSecret && request.headers['x-platformatic-admin-secret'] === adminSecret) {
+ request.log.info('admin secret is valid')
+ request.user = new Proxy(request.headers, {
+ get: (target, key) => {
+ let value
+ if (!target[key]) {
+ const newKey = key.toLowerCase()
+ value = target[newKey]
+ } else {
+ value = target[key]
+ }
+
+ if (!value && key.toLowerCase() === roleKey.toLowerCase()) {
+ value = PLT_ADMIN_ROLE
+ }
+
+ return value
+ }
+ })
+
+ return
+ }
+ try {
+ await request.createSession()
+ } catch (err) {
+ request.log.trace({ err })
+ }
+ })
+
+ const rules = opts.rules || []
+
+ app.platformatic.addRulesForRoles = (_rules) => {
+ for (const rule of _rules) {
+ rules.push(rule)
+ }
+ }
+
+ app.addHook('onReady', function () {
+ const entityRules = {}
+ // TODO validate that there is at most a rule for a given role
+ for (const rule of rules) {
+ if (!entityRules[rule.entity]) {
+ entityRules[rule.entity] = []
+ }
+ entityRules[rule.entity].push(rule)
+ }
+
+ for (const entityKey of Object.keys(app.platformatic.entities)) {
+ const rules = entityRules[entityKey] || []
+ const type = app.platformatic.entities[entityKey]
+
+ if (adminSecret) {
+ rules.push({
+ role: PLT_ADMIN_ROLE,
+ find: true,
+ save: true,
+ delete: true
+ })
+ }
+
+ // If we have `fields` in save rules, we need to check if all the not-nullable
+ // fields are specified
+ checkSaveMandatoryFieldsInRules(type, rules)
+
+ app.platformatic.addEntityHooks(entityKey, {
+ async find (originalFind, { where, ctx, fields }) {
+ const request = getRequestFromContext(ctx)
+ const rule = findRuleForRequestUser(ctx, rules, roleKey, anonymousRole)
+ checkFieldsFromRule(rule.find, fields)
+ where = await fromRuleToWhere(ctx, rule.find, where, request.user)
+
+ return originalFind({ where, ctx, fields })
+ },
+
+ async save (originalSave, { input, ctx, fields }) {
+ const request = getRequestFromContext(ctx)
+ const rule = findRuleForRequestUser(ctx, rules, roleKey, anonymousRole)
+
+ if (!rule.save) {
+ throw new Unauthorized()
+ }
+ checkFieldsFromRule(rule.save, fields)
+ checkInputFromRuleFields(rule.save, input)
+
+ if (rule.defaults) {
+ for (const key of Object.keys(rule.defaults)) {
+ const defaults = rule.defaults[key]
+ if (typeof defaults === 'function') {
+ input[key] = await defaults({ user: request.user, ctx, input })
+ } else {
+ input[key] = request.user[defaults]
+ }
+ }
+ }
+
+ if (input[type.primaryKey]) {
+ const where = await fromRuleToWhere(ctx, rule.save, {
+ [type.primaryKey]: {
+ eq: input[type.primaryKey]
+ }
+ }, request.user)
+
+ const found = await type.find({
+ where,
+ ctx,
+ fields
+ })
+
+ if (found.length === 0) {
+ throw new Unauthorized()
+ }
+
+ return originalSave({ input, ctx, fields })
+ }
+
+ return originalSave({ input, ctx, fields })
+ },
+
+ async insert (originalInsert, { inputs, ctx, fields }) {
+ const request = getRequestFromContext(ctx)
+ const rule = findRuleForRequestUser(ctx, rules, roleKey, anonymousRole)
+
+ if (!rule.save) {
+ throw new Unauthorized()
+ }
+
+ checkFieldsFromRule(rule.save, fields)
+ checkInputFromRuleFields(rule.save, inputs)
+
+ /* istanbul ignore else */
+ if (rule.defaults) {
+ for (const input of inputs) {
+ for (const key of Object.keys(rule.defaults)) {
+ const defaults = rule.defaults[key]
+ if (typeof defaults === 'function') {
+ input[key] = await defaults({ user: request.user, ctx, input })
+ } else {
+ input[key] = request.user[defaults]
+ }
+ }
+ }
+ }
+
+ return originalInsert({ inputs, ctx, fields })
+ },
+
+ async delete (originalDelete, { where, ctx, fields }) {
+ const request = getRequestFromContext(ctx)
+ const rule = findRuleForRequestUser(ctx, rules, roleKey, anonymousRole)
+
+ where = await fromRuleToWhere(ctx, rule.delete, where, request.user)
+
+ return originalDelete({ where, ctx, fields })
+ }
+ })
+ }
+ })
+}
+
+async function fromRuleToWhere (ctx, rule, where, user) {
+ if (!rule) {
+ throw new Unauthorized()
+ }
+ const request = getRequestFromContext(ctx)
+ /* istanbul ignore next */
+ where = where || {}
+
+ if (typeof rule === 'object') {
+ const { checks } = rule
+
+ /* istanbul ignore else */
+ if (checks) {
+ for (const key of Object.keys(checks)) {
+ const clauses = checks[key]
+ if (typeof clauses === 'string') {
+ // case: "userId": "X-PLATFORMATIC-USER-ID"
+ where[key] = {
+ eq: request.user[clauses]
+ }
+ } else {
+ // case:
+ // userId: {
+ // eq: 'X-PLATFORMATIC-USER-ID'
+ // }
+ for (const clauseKey of Object.keys(clauses)) {
+ const clause = clauses[clauseKey]
+ where[key] = {
+ [clauseKey]: request.user[clause]
+ }
+ }
+ }
+ }
+ }
+ } else if (typeof rule === 'function') {
+ where = await rule({ user, ctx, where })
+ }
+ return where
+}
+
+function findRuleForRequestUser (ctx, rules, roleKey, anonymousRole) {
+ const roles = getRoles(getRequestFromContext(ctx), roleKey, anonymousRole)
+ const rule = findRule(rules, roles)
+ if (!rule) {
+ ctx.reply.log.warn({ roles }, 'no rule for roles')
+ throw new Unauthorized()
+ }
+ return rule
+}
+
+function checkFieldsFromRule (rule, fields) {
+ if (!rule) {
+ throw new Unauthorized()
+ }
+ const { fields: fieldsFromRule } = rule
+ /* istanbul ignore else */
+ if (fieldsFromRule) {
+ for (const field of fields) {
+ if (!fieldsFromRule.includes(field)) {
+ throw new UnauthorizedField(field)
+ }
+ }
+ }
+}
+
+const validateInputs = (inputs, fieldsFromRule) => {
+ for (const input of inputs) {
+ const inputFields = Object.keys(input)
+ for (const inputField of inputFields) {
+ if (!fieldsFromRule.includes(inputField)) {
+ throw new UnauthorizedField(inputField)
+ }
+ }
+ }
+}
+
+function checkInputFromRuleFields (rule, inputs) {
+ const { fields: fieldsFromRule } = rule
+ /* istanbul ignore else */
+ if (fieldsFromRule) {
+ if (!Array.isArray(inputs)) {
+ // save
+ validateInputs([inputs], fieldsFromRule)
+ } else {
+ // insert
+ validateInputs(inputs, fieldsFromRule)
+ }
+ }
+}
+
+function checkSaveMandatoryFieldsInRules (type, rules) {
+ // List of not nullable, not PKs field to validate save/insert when allowed fields are specified on the rule
+ const mandatoryFields =
+ Object.values(type.fields)
+ .filter(k => (!k.isNullable && !k.primaryKey))
+ .map(({ camelcase }) => (camelcase))
+
+ for (const rule of rules) {
+ const { entity, save } = rule
+ if (save && save.fields) {
+ const fields = save.fields
+ for (const mField of mandatoryFields) {
+ if (!fields.includes(mField)) {
+ throw new MissingNotNullableError(mField, entity)
+ }
+ }
+ }
+ }
+}
+
+module.exports = fp(auth)
diff --git a/packages/db-authorization/lib/find-rule.d.ts b/packages/db-authorization/lib/find-rule.d.ts
new file mode 100644
index 0000000000..1673acf653
--- /dev/null
+++ b/packages/db-authorization/lib/find-rule.d.ts
@@ -0,0 +1,14 @@
+import { MercuriusContext } from 'mercurius'
+interface IRules {
+ [key: string]: {
+ 'role': string,
+ 'type': string,
+ 'find': boolean,
+ 'delete': boolean,
+ 'insert': boolean,
+ 'save': boolean
+
+ }
+}
+
+export default function findRule(ctx: MercuriusContext, rules: IRules[], roleKey: string, anonymousRole: string)
diff --git a/packages/db-authorization/lib/find-rule.js b/packages/db-authorization/lib/find-rule.js
new file mode 100644
index 0000000000..dfee08b94f
--- /dev/null
+++ b/packages/db-authorization/lib/find-rule.js
@@ -0,0 +1,19 @@
+'use strict'
+
+function findRule (rules, roles) {
+ let found = null
+ for (const rule of rules) {
+ for (const role of roles) {
+ if (rule.role === role) {
+ found = rule
+ break
+ }
+ }
+ if (found) {
+ break
+ }
+ }
+ return found
+}
+
+module.exports = findRule
diff --git a/packages/db-authorization/lib/jwt.js b/packages/db-authorization/lib/jwt.js
new file mode 100644
index 0000000000..35a6157ff4
--- /dev/null
+++ b/packages/db-authorization/lib/jwt.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const jwt = require('@fastify/jwt')
+const fp = require('fastify-plugin')
+const buildGetJwks = require('get-jwks')
+
+module.exports = fp(async function (app, opts) {
+ // opts.jwks can be `true` (to enable with no options)
+ // or options from https://github.com/nearform/get-jwks#options
+ if (opts.jwks) {
+ const getJwks = buildGetJwks(typeof opts.jwks === 'object' ? opts.jwks : {})
+ app.register(jwt, {
+ ...opts,
+ decode: { complete: true },
+ secret: function (request, token) {
+ const {
+ header: { kid, alg },
+ payload: { iss }
+ } = token
+ return getJwks.getPublicKey({ kid, domain: iss, alg })
+ }
+ })
+ } else {
+ app.register(jwt, opts)
+ }
+
+ app.decorateRequest('createSession', function () {
+ return this.jwtVerify()
+ })
+})
diff --git a/packages/db-authorization/lib/utils.d.ts b/packages/db-authorization/lib/utils.d.ts
new file mode 100644
index 0000000000..5a1f500e96
--- /dev/null
+++ b/packages/db-authorization/lib/utils.d.ts
@@ -0,0 +1,5 @@
+import { MercuriusContext } from "mercurius";
+import { FastifyRequest, FastifyReply } from "fastify";
+
+export function getRequestFromContext(ctx: MercuriusContext): FastifyReply
+export function getRoles(request: FastifyRequest, roleKey: string, anonymousRole: string): string[]
diff --git a/packages/db-authorization/lib/utils.js b/packages/db-authorization/lib/utils.js
new file mode 100644
index 0000000000..623186675e
--- /dev/null
+++ b/packages/db-authorization/lib/utils.js
@@ -0,0 +1,32 @@
+'use strict'
+
+function getRequestFromContext (ctx) {
+ if (!ctx || !ctx.reply) {
+ throw new Error('Missing context. You should call this function with { ctx: { reply }}')
+ }
+ return ctx.reply.request
+}
+
+function getRoles (request, roleKey, anonymousRole) {
+ let output = []
+ const user = request.user
+ if (!user) {
+ output.push(anonymousRole)
+ return output
+ }
+
+ const rolesRaw = user[roleKey]
+ if (typeof rolesRaw === 'string') {
+ output = rolesRaw.split(',')
+ } else if (Array.isArray(rolesRaw)) {
+ output = rolesRaw
+ }
+ if (output.length === 0) {
+ output.push(anonymousRole)
+ }
+ return output
+}
+module.exports = {
+ getRequestFromContext,
+ getRoles
+}
diff --git a/packages/db-authorization/lib/webhook.js b/packages/db-authorization/lib/webhook.js
new file mode 100644
index 0000000000..11d434f1b9
--- /dev/null
+++ b/packages/db-authorization/lib/webhook.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const fp = require('fastify-plugin')
+const { Pool } = require('undici')
+
+const notAllowed = new Set([
+ 'content-length',
+ 'host',
+ 'connection'
+])
+
+module.exports = fp(async function (app, opts) {
+ const origin = new URL(opts.url)
+ const path = origin.pathname
+ origin.pathname = '/'
+ const pool = new Pool(origin)
+ app.addHook('onClose', () => pool.close())
+ app.decorateRequest('createSession', async function () {
+ const headers = {}
+ for (const header of Object.keys(this.headers)) {
+ if (!notAllowed.has(header)) {
+ headers[header] = this.headers[header]
+ }
+ }
+ const body = JSON.stringify(this.body)
+ headers['content-length'] = Buffer.byteLength(body)
+ const res = await pool.request({
+ path,
+ method: 'POST',
+ headers,
+ body
+ })
+
+ if (res.statusCode > 299) {
+ throw new Error('operation not allowed')
+ }
+
+ const data = await res.body.json()
+ this.user = data
+ })
+})
diff --git a/packages/db-authorization/package.json b/packages/db-authorization/package.json
new file mode 100644
index 0000000000..762492e472
--- /dev/null
+++ b/packages/db-authorization/package.json
@@ -0,0 +1,33 @@
+{
+ "name": "@platformatic/db-authorization",
+ "version": "0.0.21",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && tap test/*test.js"
+ },
+ "author": "Matteo Collina ",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/platformatic/platformatic.git"
+ },
+ "license": "Apache-2.0",
+ "devDependencies": {
+ "@fastify/cookie": "^8.0.0",
+ "@fastify/session": "^10.0.0",
+ "@platformatic/db-core": "workspace:*",
+ "fast-jwt": "^1.7.1",
+ "fastify": "^4.6.0",
+ "mercurius": "^11.0.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0"
+ },
+ "dependencies": {
+ "@fastify/error": "^3.0.0",
+ "@fastify/jwt": "^6.3.1",
+ "fastify-plugin": "^4.1.0",
+ "get-jwks": "^8.0.0",
+ "undici": "^5.6.1"
+ }
+}
diff --git a/packages/db-authorization/schema.js b/packages/db-authorization/schema.js
new file mode 100644
index 0000000000..755f9ee392
--- /dev/null
+++ b/packages/db-authorization/schema.js
@@ -0,0 +1,15 @@
+'use strict'
+
+const AuthSchema = {
+ $id: '/BasegraphAuth',
+ type: 'object',
+ properties: {
+ adminSecret: {
+ type: 'string',
+ description: 'The password should be used to login dashboard and to access routes under /_admin prefix.'
+ }
+ },
+ additionalProperties: true // TODO remove and add proper validation for the rules
+}
+
+module.exports = AuthSchema
diff --git a/packages/db-authorization/test/admin.test.js b/packages/db-authorization/test/admin.test.js
new file mode 100644
index 0000000000..be0a59cea1
--- /dev/null
+++ b/packages/db-authorization/test/admin.test.js
@@ -0,0 +1,1150 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('admin can do impersonate a users', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ adminSecret,
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'x-platformatic-user-id'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+
+test('only admin usage', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ adminSecret,
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'x-platformatic-user-id'
+ }
+ },
+ save: {
+ checks: { userId: 'X-PLATFORMATIC-USER-ID' }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+
+test('platformatic-admin role', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ adminSecret
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1' },
+ { id: 3, title: 'Page 2' },
+ { id: 4, title: 'Page 3' }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Page 1" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 2,
+ title: 'Page 1'
+ }]
+ }
+ }, 'deletePages response')
+ }
+})
+
+test('admin with no rules', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ adminSecret
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getPageById status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'getPageById response')
+ }
+})
+
+test('platformatic-admin has lower priority to allow user impersonation', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ adminSecret,
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['user', 'platformatic-admin']
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['platformatic-admin']
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 1,
+ title: 'Hello'
+ }]
+ }
+ }, 'deletePages response')
+ }
+})
diff --git a/packages/db-authorization/test/code-permissions.test.js b/packages/db-authorization/test/code-permissions.test.js
new file mode 100644
index 0000000000..740d20b6ac
--- /dev/null
+++ b/packages/db-authorization/test/code-permissions.test.js
@@ -0,0 +1,647 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite, isMysql } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+async function createPagesWithTimestamp (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ edited_at TIMESTAMP,
+ user_id INTEGER
+ );`)
+ } else if (isMysql) {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ edited_at TIMESTAMP NULL DEFAULT NULL,
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ edited_at TIMESTAMP,
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('users can save and update their own pages, read everybody\'s and delete none', async ({ pass, teardown, same, equal, match, plan }) => {
+ plan(27)
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ const generated = [42, 42, 43, 42, 42, 42]
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ async find ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ },
+ async delete ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ },
+ defaults: {
+ userId: async function ({ user, ctx, input }) {
+ match(user, {
+ 'X-PLATFORMATIC-USER-ID': generated.shift(),
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ return user['X-PLATFORMATIC-USER-ID']
+ }
+
+ },
+ async save ({ user, ctx, where }) {
+ return {
+ ...where,
+ userId: {
+ eq: user['X-PLATFORMATIC-USER-ID']
+ }
+ }
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello World" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: []
+ }
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello World" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 1,
+ title: 'Hello World'
+ }]
+ }
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+})
+
+test('user can delete all post written before yesterday', async ({ pass, teardown, same, equal, match, plan }) => {
+ // plan(27)
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+ await clear(db, sql)
+ await createPagesWithTimestamp(db, sql)
+ }
+ })
+ const userId = 42
+ const yesterday = (new Date(Date.now() - 60 * 60 * 24 * 1000)).toISOString()
+ const twoHoursAgo = (new Date(Date.now() - 60 * 60 * 2 * 1000)).toISOString()
+ const twentySixHoursAgo = (new Date(Date.now() - 60 * 60 * 26 * 1000)).toISOString()
+
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ save: true,
+ async delete ({ user, ctx, where }) {
+ return {
+ ...where,
+ editedAt: {
+ lt: yesterday
+ }
+ }
+ },
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ // that is: 24 * 60 * 60 * 1000
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': userId,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ // Inserts a page edited 2 hours ago
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", editedAt: "${twoHoursAgo}" }) {
+ id
+ title
+ userId
+ editedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId,
+ editedAt: twoHoursAgo
+ }
+ }
+ }, 'savePage response')
+ }
+
+ // Inserts a page edited 26 hours ago (so yesterday)
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", editedAt: "${twentySixHoursAgo}" }) {
+ id
+ title
+ userId
+ editedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 2,
+ title: 'Hello',
+ userId,
+ editedAt: twentySixHoursAgo
+ }
+ }
+ }, 'savePage response')
+ }
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ pages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ editedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ userId,
+ editedAt: twoHoursAgo
+ }, {
+ id: 2,
+ title: 'Hello',
+ userId,
+ editedAt: twentySixHoursAgo
+ }]
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 2,
+ title: 'Hello'
+ }]
+ }
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ pages {
+ id
+ title
+ userId
+ editedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ userId,
+ editedAt: twoHoursAgo
+ }]
+ }
+ }, 'pages response')
+ }
+})
diff --git a/packages/db-authorization/test/fields-permissions.test.js b/packages/db-authorization/test/fields-permissions.test.js
new file mode 100644
index 0000000000..c9d9b84441
--- /dev/null
+++ b/packages/db-authorization/test/fields-permissions.test.js
@@ -0,0 +1,597 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42) NOT NULL,
+ author VARCHAR(100),
+ topic VARCHAR(13) NOT NULL,
+ reviewed_by VARCHAR(13) NOT NULL,
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42) NOT NULL,
+ author VARCHAR(100),
+ topic VARCHAR(13) NOT NULL,
+ reviewed_by VARCHAR(13) NOT NULL,
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('users can find only the authorized fields', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ fields: ['id', 'title', 'topic']
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "TITLE_1", author: "AUTHOR_1", topic: "TOPIC_1", reviewedBy: "TEST" }) {
+ id
+ title
+ author
+ topic
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'TITLE_1',
+ author: 'AUTHOR_1',
+ topic: 'TOPIC_1',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ topic
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'TITLE_1',
+ topic: 'TOPIC_1'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ pages {
+ author
+ title
+ topic
+ }
+ }
+ `
+ }
+ })
+
+ same(res.json(), {
+ data: {
+ pages: null
+ },
+ errors: [
+ {
+ message: 'field not allowed: author',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'pages'
+ ]
+ }
+ ]
+ }, 'pages response')
+ }
+})
+
+test('users can save only the authorized fields', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ fields: ['id', 'title', 'topic', 'reviewedBy']
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ // Successful save
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "TITLE_1", topic: "TOPIC_1", reviewedBy: "TEST"}) {
+ id
+ title
+ topic
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'TITLE_1',
+ topic: 'TOPIC_1'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ topic
+ author
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'TITLE_1',
+ topic: 'TOPIC_1',
+ author: null
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ // This must fail because forbidden input
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "TITLE_1", author: "XXXXXXXXXXXXXX" ,topic: "TOPIC_1" }) {
+ id
+ title
+ topic
+ }
+ }
+ `
+ }
+ })
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'field not allowed: author',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ // This must fail because forbidden field
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "TITLE_1" ,topic: "TOPIC_1" }) {
+ id
+ title
+ topic
+ author
+ }
+ }
+ `
+ }
+ })
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'field not allowed: author',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
+
+test('users can insert only the authorized fields', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ fields: ['id', 'title', 'topic', 'reviewedBy']
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ // Successful insert
+ const books = [{
+ title: 'TITLE_1',
+ topic: 'TOPIC_1',
+ reviewedBy: 'REV_1'
+ }, {
+ title: 'TITLE_2',
+ topic: 'TOPIC_2',
+ reviewedBy: 'REV_2'
+ }]
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs: [PageInput]!) {
+ insertPages(inputs: $inputs) {
+ id
+ title
+ }
+ }
+
+ `,
+ variables: {
+ inputs: books
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [{
+ id: 1,
+ title: 'TITLE_1'
+ }, {
+ id: 2,
+ title: 'TITLE_2'
+ }]
+ }
+ }, 'insertPages response')
+ }
+
+ {
+ // One of the records has a not allowed field: fail
+ const books = [{
+ title: 'TITLE_1',
+ topic: 'TOPIC_1',
+ reviewedBy: 'REV_1'
+ }, {
+ title: 'TITLE_2',
+ topic: 'TOPIC_2',
+ reviewedBy: 'REV_2',
+ author: 'FORBIDDEN'
+ }]
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs: [PageInput]!) {
+ insertPages(inputs: $inputs) {
+ id
+ title
+ }
+ }
+
+ `,
+ variables: {
+ inputs: books
+ }
+ }
+ })
+
+ same(res.json(), {
+ data: {
+ insertPages: null
+ },
+ errors: [
+ {
+ message: 'field not allowed: author',
+ locations: [
+ {
+ line: 3,
+ column: 15
+ }
+ ],
+ path: [
+ 'insertPages'
+ ]
+ }
+ ]
+ }, 'insertPages response')
+ }
+})
+
+test('app should not start if there are not nullable and not allowed fields in save rule', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ fields: ['id', 'title']
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+ try {
+ await app.ready()
+ } catch (err) {
+ same(err.message, 'missing not nullable field: "topic" in save rule for entity "page"')
+ }
+})
diff --git a/packages/db-authorization/test/find-rule.test.js b/packages/db-authorization/test/find-rule.test.js
new file mode 100644
index 0000000000..63545a8f7c
--- /dev/null
+++ b/packages/db-authorization/test/find-rule.test.js
@@ -0,0 +1,95 @@
+'use strict'
+const { test } = require('tap')
+const findRule = require('../lib/find-rule')
+
+const allowAll = {
+ save: true,
+ find: true,
+ delete: true
+}
+const denyAll = {
+ save: false,
+ find: false,
+ delete: false
+}
+
+test('should return first rule that match', ({ same, plan }) => {
+ plan(1)
+ const roles = ['role1']
+ const rules = [
+ {
+ _id: 'RULE1',
+ role: 'role1',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE2',
+ role: 'role2',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE3',
+ role: 'role1',
+ entity: 'page',
+ ...denyAll
+ }
+ ]
+ const found = findRule(rules, roles)
+ same(found._id, 'RULE1')
+})
+
+test('should return null if no match', ({ same, plan }) => {
+ plan(1)
+ const roles = ['role3']
+ const rules = [
+ {
+ _id: 'RULE1',
+ role: 'role1',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE2',
+ role: 'role2',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE3',
+ role: 'role1',
+ entity: 'page',
+ ...denyAll
+ }
+ ]
+ const found = findRule(rules, roles)
+ same(found, null)
+})
+
+test('should search all roles until match', ({ same, plan }) => {
+ plan(1)
+ const roles = ['role3', 'role2']
+ const rules = [
+ {
+ _id: 'RULE1',
+ role: 'role1',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE2',
+ role: 'role2',
+ entity: 'page',
+ ...allowAll
+ },
+ {
+ _id: 'RULE3',
+ role: 'role1',
+ entity: 'page',
+ ...denyAll
+ }
+ ]
+ const found = findRule(rules, roles)
+ same(found._id, 'RULE2')
+})
diff --git a/packages/db-authorization/test/helper.js b/packages/db-authorization/test/helper.js
new file mode 100644
index 0000000000..c0b52632aa
--- /dev/null
+++ b/packages/db-authorization/test/helper.js
@@ -0,0 +1,58 @@
+'use strict'
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+ try {
+ await db.query(sql`DROP TABLE categories`)
+ } catch (err) {
+ }
+}
+
+async function createBasicPages (db, sql) {
+ if (module.exports.isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+module.exports.createBasicPages = createBasicPages
diff --git a/packages/db-authorization/test/jwt.test.js b/packages/db-authorization/test/jwt.test.js
new file mode 100644
index 0000000000..905c38a4f7
--- /dev/null
+++ b/packages/db-authorization/test/jwt.test.js
@@ -0,0 +1,515 @@
+'use strict'
+
+const fastify = require('fastify')
+const auth = require('..')
+const { test } = require('tap')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const { createPublicKey, generateKeyPairSync } = require('crypto')
+const { createSigner } = require('fast-jwt')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+// creates a RSA key pair for the test
+const { publicKey, privateKey } = generateKeyPairSync('rsa', {
+ modulusLength: 2048,
+ publicKeyEncoding: { type: 'pkcs1', format: 'pem' },
+ privateKeyEncoding: { type: 'pkcs1', format: 'pem' }
+})
+const jwtPublicKey = createPublicKey(publicKey).export({ format: 'jwk' })
+
+async function buildJwksEndpoint (jwks, fail = false) {
+ const app = fastify()
+ app.get('/.well-known/jwks.json', async (request, reply) => {
+ if (fail) {
+ throw Error('JWKS ENDPOINT ERROR')
+ }
+ return jwks
+ })
+ await app.listen({ port: 0 })
+ return app
+}
+
+test('jwt verify success getting public key from jwks endpoint', async ({ pass, teardown, same, equal }) => {
+ const { n, e, kty } = jwtPublicKey
+ const kid = 'TEST-KID'
+ const alg = 'RS256'
+ const jwksEndpoint = await buildJwksEndpoint(
+ {
+ keys: [
+ {
+ alg,
+ kty,
+ n,
+ e,
+ use: 'sig',
+ kid
+ }
+ ]
+ }
+ )
+ const issuer = `http://localhost:${jwksEndpoint.server.address().port}`
+ const header = {
+ kid,
+ alg,
+ typ: 'JWT'
+ }
+ const payload = {
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['user']
+ }
+
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ jwks: true
+ },
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => jwksEndpoint.close())
+
+ await app.ready()
+
+ const signSync = createSigner({
+ algorithm: 'RS256',
+ key: privateKey,
+ header,
+ iss: issuer,
+ kid
+ })
+ const token = signSync(payload)
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+})
+
+test('jwt verify fail if getting public key from jwks endpoint fails', async ({ pass, teardown, same, equal }) => {
+ const kid = 'TEST-KID'
+ const alg = 'RS256'
+ // This fails
+ const jwksEndpoint = await buildJwksEndpoint(
+ {}, true
+ )
+ const issuer = `http://localhost:${jwksEndpoint.server.address().port}`
+ const header = {
+ kid,
+ alg,
+ typ: 'JWT'
+ }
+ const payload = {
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['user']
+ }
+
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ jwks: true
+ },
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => jwksEndpoint.close())
+
+ await app.ready()
+
+ const signSync = createSigner({
+ algorithm: 'RS256',
+ key: privateKey,
+ header,
+ iss: issuer,
+ kid
+ })
+ const token = signSync(payload)
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
+
+test('jwt verify fail if jwks succeed but kid is not found', async ({ pass, teardown, same, equal }) => {
+ const { n, e, kty } = jwtPublicKey
+ const kid = 'TEST-KID'
+ const alg = 'RS256'
+
+ const jwksEndpoint = await buildJwksEndpoint(
+ {
+ keys: [
+ {
+ alg,
+ kty,
+ n,
+ e,
+ use: 'sig',
+ kid
+ }
+ ]
+ }
+ )
+
+ const issuer = `http://localhost:${jwksEndpoint.server.address().port}`
+ const header = {
+ kid: 'DIFFERENT_KID',
+ alg,
+ typ: 'JWT'
+ }
+ const payload = {
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['user']
+ }
+
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ jwks: true
+ },
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => jwksEndpoint.close())
+
+ await app.ready()
+
+ const signSync = createSigner({
+ algorithm: 'RS256',
+ key: privateKey,
+ header,
+ iss: issuer,
+ kid
+ })
+ const token = signSync(payload)
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
+
+test('jwt verify fail if the domain is not allowed', async ({ pass, teardown, same, equal }) => {
+ const { n, e, kty } = jwtPublicKey
+ const kid = 'TEST-KID'
+ const alg = 'RS256'
+
+ const jwksEndpoint = await buildJwksEndpoint(
+ {
+ keys: [
+ {
+ alg,
+ kty,
+ n,
+ e,
+ use: 'sig',
+ kid
+ }
+ ]
+ }
+ )
+
+ const issuer = `http://localhost:${jwksEndpoint.server.address().port}`
+ const header = {
+ kid,
+ alg,
+ typ: 'JWT'
+ }
+ const payload = {
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': ['user']
+ }
+
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ jwks: {
+ allowedDomains: ['http://myalloawedomain.com']
+ }
+ },
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => jwksEndpoint.close())
+
+ await app.ready()
+
+ const signSync = createSigner({
+ algorithm: 'RS256',
+ key: privateKey,
+ header,
+ iss: issuer,
+ kid
+ })
+ const token = signSync(payload)
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
diff --git a/packages/db-authorization/test/multi-role.test.js b/packages/db-authorization/test/multi-role.test.js
new file mode 100644
index 0000000000..f0cfc6a5fd
--- /dev/null
+++ b/packages/db-authorization/test/multi-role.test.js
@@ -0,0 +1,483 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('moderators can delete user pages', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'moderator',
+ entity: 'page',
+ find: true,
+ delete: true,
+ save: true,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }, {
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user,moderator'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 1,
+ title: 'Hello'
+ }]
+ }
+ }, 'deletePages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'moderator,user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 2,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 2,
+ title: 'Hello'
+ }]
+ }
+ }, 'deletePages response')
+ }
+
+ const token3 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token3}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 3,
+ title: 'Hello',
+ userId: 43
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token3}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+
+test('blocked users cannot update', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'blocked',
+ entity: 'page',
+ find: true,
+ delete: false,
+ save: false
+ }, {
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'blocked,user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ pages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }]
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
diff --git a/packages/db-authorization/test/nested.test.js b/packages/db-authorization/test/nested.test.js
new file mode 100644
index 0000000000..642f862ba5
--- /dev/null
+++ b/packages/db-authorization/test/nested.test.js
@@ -0,0 +1,220 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear } = require('./helper')
+const auth = require('..')
+
+async function createPagesAndCategories (db, sql) {
+ await db.query(sql`CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );`)
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER references categories(id),
+ user_id INTEGER
+ );`)
+}
+
+test('categories are global, but pages are user specific', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createPagesAndCategories(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'category',
+ find: true,
+ save: true
+ }, {
+ role: 'user',
+ entity: 'page',
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ saveCategory(input: { name: "pets" }) {
+ id
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'saveCategory status code')
+ same(res.json(), {
+ data: {
+ saveCategory: {
+ id: 1
+ }
+ }
+ }, 'saveCategory response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { categoryId: 1, title: "Hello World" }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const data = res.json()
+ same(data, {
+ data: {
+ savePage: {
+ id: 2,
+ title: 'Hello World',
+ category: {
+ id: 1,
+ name: 'pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ getCategoryById(id: 1) {
+ id
+ name
+ pages {
+ id
+ title
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getCategoryById status code')
+ same(res.json(), {
+ data: {
+ getCategoryById: {
+ id: 1,
+ name: 'pets',
+ pages: [{
+ id: 2,
+ title: 'Hello World'
+ }]
+ }
+ }
+ }, 'getCategoryById response')
+ }
+})
diff --git a/packages/db-authorization/test/set-from-code.test.js b/packages/db-authorization/test/set-from-code.test.js
new file mode 100644
index 0000000000..10df84e507
--- /dev/null
+++ b/packages/db-authorization/test/set-from-code.test.js
@@ -0,0 +1,487 @@
+'use strict'
+
+const { test } = require('tap')
+const core = require('@platformatic/db-core')
+const fastify = require('fastify')
+const { connInfo, clear, createBasicPages } = require('./helper')
+const auth = require('..')
+
+test('users can save and update their own pages, read everybody\'s and delete none', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ await app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous'
+ })
+ app.platformatic.addRulesForRoles([{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }])
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'getPageById'
+ ]
+ }
+ ]
+ }, 'getPageById response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'insertPages status code')
+ same(res.json(), {
+ data: {
+ insertPages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'insertPages'
+ ]
+ }
+ ]
+ }, 'insertPages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
diff --git a/packages/db-authorization/test/simple-rest.test.js b/packages/db-authorization/test/simple-rest.test.js
new file mode 100644
index 0000000000..3c77e8f2c1
--- /dev/null
+++ b/packages/db-authorization/test/simple-rest.test.js
@@ -0,0 +1,228 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('users can save and update their own pages, read everybody\'s and delete none', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ title: 'Hello'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1',
+ headers: {
+ Authorization: `Bearer ${token}`
+ }
+ })
+ equal(res.statusCode, 200, 'GET /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }, 'GET /pages/1 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1?fields=id,title',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ title: 'Hello World'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello World'
+ }, 'POST /pages/1 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1?fields=id,title',
+ headers: {
+ Authorization: `Bearer ${token}`
+ }
+ })
+ equal(res.statusCode, 200, 'GET /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello World'
+ }, 'GET /pages/1 response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ title: 'Hello World2'
+ }
+ })
+ equal(res.statusCode, 401, 'POST /pages/1 status code (Unauthorized)')
+ same(res.json(), {
+ message: 'operation not allowed',
+ code: 'PLT_DB_AUTH_UNAUTHORIZED',
+ error: 'Unauthorized',
+ statusCode: 401
+ }, 'POST /pages/1 response (Unauthorized)')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ }
+ })
+ equal(res.statusCode, 200, 'GET /pages/1 status code (Authorized)')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello World',
+ userId: 42
+ }, 'GET /pages/1 response (Authorized)')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 401, 'GET /pages/1 status code (Anonymous)')
+ same(res.json(), {
+ message: 'operation not allowed',
+ code: 'PLT_DB_AUTH_UNAUTHORIZED',
+ error: 'Unauthorized',
+ statusCode: 401
+ }, 'GET /pages/1 response (Anonymous)')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1',
+ body: {
+ title: 'Hello World3'
+ }
+ })
+ equal(res.statusCode, 401, 'POST /pages/1 status code (Anonymous)')
+ same(res.json(), {
+ message: 'operation not allowed',
+ code: 'PLT_DB_AUTH_UNAUTHORIZED',
+ error: 'Unauthorized',
+ statusCode: 401
+ }, 'POST /pages/1 response (Anonymous)')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'DELETE',
+ url: '/pages/1',
+ headers: {
+ Authorization: `Bearer ${token}`
+ }
+ })
+ equal(res.statusCode, 401, 'DELETE /pages/1 status code (Unauthorized)')
+ same(res.json(), {
+ message: 'operation not allowed',
+ code: 'PLT_DB_AUTH_UNAUTHORIZED',
+ error: 'Unauthorized',
+ statusCode: 401
+ }, 'DELETE /pages/1 response (Unauthorized)')
+ }
+})
diff --git a/packages/db-authorization/test/simple.test.js b/packages/db-authorization/test/simple.test.js
new file mode 100644
index 0000000000..fc61cc84f8
--- /dev/null
+++ b/packages/db-authorization/test/simple.test.js
@@ -0,0 +1,1439 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const auth = require('..')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('users can save and update their own pages, read everybody\'s and delete none', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'getPageById'
+ ]
+ }
+ ]
+ }, 'getPageById response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'insertPages status code')
+ same(res.json(), {
+ data: {
+ insertPages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'insertPages'
+ ]
+ }
+ ]
+ }, 'insertPages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+
+test('not allowed without permissions', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ }
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'getPageById'
+ ]
+ }
+ ]
+ }, 'getPageById response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'insertPages status code')
+ same(res.json(), {
+ data: {
+ insertPages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'insertPages'
+ ]
+ }
+ ]
+ }, 'insertPages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'getPageById'
+ ]
+ }
+ ]
+ }, 'getPageById response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+
+test('users can read, save their own pages', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ },
+ delete: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ const token2 = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }]
+ }
+ }, 'deletePages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 2,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token2}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: []
+ }
+ }, 'deletePages response')
+ }
+})
+
+test('defaults are false', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'anonymous',
+ entity: 'page'
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'anonymous'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'getPageById'
+ ]
+ }
+ ]
+ }, 'getPageById response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ pages {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'pages'
+ ]
+ }
+ ]
+ }, 'pages response')
+ }
+})
+
+test('should throw if context is not passed', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous'
+ })
+ teardown(app.close.bind(app))
+ app.get('/no-context', async (req, reply) => {
+ const res = await app.platformatic.entities.page.find({
+ fields: ['id', 'title']
+ })
+ return res
+ })
+ await app.ready()
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/no-context'
+ })
+
+ equal(res.statusCode, 500)
+ same(res.json(), {
+ statusCode: 500,
+ error: 'Internal Server Error',
+ message: 'Missing context. You should call this function with { ctx: { reply }}'
+ })
+})
+
+test('should not complain if context is passed', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [
+ {
+ role: 'anonymous',
+ entity: 'page',
+ find: true,
+ delete: true,
+ save: true
+ }
+ ]
+ })
+ teardown(app.close.bind(app))
+ app.get('/with-context', async (req, reply) => {
+ const res = await app.platformatic.entities.page.find({
+ fields: ['id', 'title'],
+ ctx: { reply }
+ })
+ return res
+ })
+ await app.ready()
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/with-context'
+ })
+
+ equal(res.statusCode, 200)
+ same(res.json(), [])
+})
+
+test('should support where conditions expressed with object', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ jwt: {
+ secret: 'supersecret'
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ save: true,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: {
+ eq: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const token = await app.jwt.sign({
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ Authorization: `Bearer ${token}`
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+})
diff --git a/packages/db-authorization/test/utils.test.js b/packages/db-authorization/test/utils.test.js
new file mode 100644
index 0000000000..78a66a8170
--- /dev/null
+++ b/packages/db-authorization/test/utils.test.js
@@ -0,0 +1,61 @@
+'use strict'
+const { test } = require('tap')
+const { getRequestFromContext, getRoles } = require('../lib/utils')
+
+const ANONYMOUS_ROLE = 'anonymous'
+test('should extract request from context', ({ equal, plan }) => {
+ plan(1)
+ const fakeContext = {
+ reply: {
+ request: 'hooray'
+ }
+ }
+ const res = getRequestFromContext(fakeContext)
+ equal(res, 'hooray')
+})
+
+test('should throw', ({ throws, plan }) => {
+ plan(2)
+ throws(() => {
+ getRequestFromContext()
+ })
+
+ throws(() => {
+ getRequestFromContext({
+ noReplyHere: true
+ })
+ })
+})
+
+test('should get roles from user', ({ same, plan }) => {
+ plan(4)
+ const roleKey = 'role'
+ {
+ const requestWithNoUser = {}
+ same(getRoles(requestWithNoUser, roleKey, ANONYMOUS_ROLE), [ANONYMOUS_ROLE])
+ }
+ {
+ const requestWithStringRoles = {
+ user: {
+ [roleKey]: 'role1,role2,role3'
+ }
+ }
+ same(getRoles(requestWithStringRoles, roleKey, ANONYMOUS_ROLE), ['role1', 'role2', 'role3'])
+ }
+ {
+ const requestWithArrayRoles = {
+ user: {
+ [roleKey]: ['role1', 'role2', 'role3']
+ }
+ }
+ same(getRoles(requestWithArrayRoles, roleKey, ANONYMOUS_ROLE), ['role1', 'role2', 'role3'])
+ }
+ {
+ const requestWithOtherKindOfRole = {
+ user: {
+ [roleKey]: { role1: true, role2: true }
+ }
+ }
+ same(getRoles(requestWithOtherKindOfRole, roleKey, ANONYMOUS_ROLE), [ANONYMOUS_ROLE])
+ }
+})
diff --git a/packages/db-authorization/test/webhook.test.js b/packages/db-authorization/test/webhook.test.js
new file mode 100644
index 0000000000..a59dd278b9
--- /dev/null
+++ b/packages/db-authorization/test/webhook.test.js
@@ -0,0 +1,719 @@
+'use strict'
+
+const fastify = require('fastify')
+const auth = require('..')
+const { test } = require('tap')
+const core = require('@platformatic/db-core')
+const { connInfo, clear, isSQLite } = require('./helper')
+const { request, Agent, setGlobalDispatcher } = require('undici')
+
+const agent = new Agent({
+ keepAliveTimeout: 10,
+ keepAliveMaxTimeout: 10
+})
+setGlobalDispatcher(agent)
+
+async function buildAuthorizer (opts = {}) {
+ const app = fastify()
+ app.register(require('@fastify/cookie'))
+ app.register(require('@fastify/session'), {
+ cookieName: 'sessionId',
+ secret: 'a secret with minimum length of 32 characters',
+ cookie: { secure: false }
+ })
+
+ app.post('/login', async (request, reply) => {
+ request.session.user = request.body
+ return {
+ status: 'ok'
+ }
+ })
+
+ app.post('/authorize', async (request, reply) => {
+ if (typeof opts.onAuthorize === 'function') {
+ await opts.onAuthorize(request)
+ }
+
+ const user = request.session.user
+ if (!user) {
+ return reply.code(401).send({ error: 'Unauthorized' })
+ }
+ return user
+ })
+
+ await app.listen({ port: 0 })
+
+ return app
+}
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ user_id INTEGER
+ );`)
+ }
+}
+
+test('users can save and update their own pages, read everybody\'s and delete none', async ({ pass, teardown, same, equal }) => {
+ const authorizer = await buildAuthorizer()
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ webhook: {
+ url: `http://localhost:${authorizer.server.address().port}/authorize`
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => authorizer.close())
+
+ await app.ready()
+
+ async function getCookie (userId, role) {
+ const res = await request(`http://localhost:${authorizer.server.address().port}/login`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ 'X-PLATFORMATIC-USER-ID': userId,
+ 'X-PLATFORMATIC-ROLE': role
+ })
+ })
+
+ res.body.resume()
+
+ const cookie = res.headers['set-cookie'].split(';')[0]
+ return cookie
+ }
+
+ const cookie = await getCookie(42, 'user')
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ cookie
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ cookie
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ cookie
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ cookie
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+})
+
+test('Non-200 status code', async ({ plan, pass, teardown, same, equal }) => {
+ plan(6)
+
+ const authorizer = await buildAuthorizer({
+ onAuthorize: async (request) => {
+ if (request.headers['x-status-code']) {
+ pass('authorizer called, throwing exception')
+ const err = new Error('Unauthorized')
+ err.statusCode = request.headers['X-STATUS-CODE']
+ throw err
+ }
+ }
+ })
+ const app = fastify()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ webhook: {
+ url: `http://localhost:${authorizer.server.address().port}/authorize`
+ },
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ find: true,
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(() => authorizer.close())
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-STATUS-CODE': '403'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
+
+test('admin secret does not reach webhook', async ({ pass, teardown, same, equal, fail }) => {
+ const authorizer = await buildAuthorizer({
+ onAuthorize: async (request) => {
+ fail('authorizer called')
+ }
+ })
+ const app = fastify()
+ const adminSecret = require('crypto').randomUUID()
+ app.register(core, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(auth, {
+ webhook: {
+ url: `http://localhost:${authorizer.server.address().port}/authorize`
+ },
+ adminSecret,
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous',
+ rules: [{
+ role: 'user',
+ entity: 'page',
+ delete: false,
+ defaults: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ },
+ find: {
+ checks: {
+ userId: 'x-platformatic-user-id'
+ }
+ },
+ save: {
+ checks: {
+ userId: 'X-PLATFORMATIC-USER-ID'
+ }
+ }
+ }, {
+ role: 'anonymous',
+ entity: 'page',
+ find: false,
+ delete: false,
+ save: false
+ }]
+ })
+ teardown(app.close.bind(app))
+ teardown(authorizer.close.bind(authorizer))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ userId: 42
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 43,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ userId
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title,
+ userId
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 2, title: 'Page 1', userId: 42 },
+ { id: 3, title: 'Page 2', userId: 42 },
+ { id: 4, title: 'Page 3', userId: 42 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret,
+ 'X-PLATFORMATIC-USER-ID': 42,
+ 'X-PLATFORMATIC-ROLE': 'user'
+ },
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(res.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
diff --git a/packages/db-core/.npmignore b/packages/db-core/.npmignore
new file mode 100644
index 0000000000..be27365759
--- /dev/null
+++ b/packages/db-core/.npmignore
@@ -0,0 +1,2 @@
+.nyc_output
+coverage
diff --git a/packages/db-core/.taprc b/packages/db-core/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/db-core/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/db-core/LICENSE b/packages/db-core/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/db-core/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-core/NOTICE b/packages/db-core/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/db-core/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-core/README.md b/packages/db-core/README.md
new file mode 100644
index 0000000000..1c655d1f50
--- /dev/null
+++ b/packages/db-core/README.md
@@ -0,0 +1,32 @@
+# @platformatic/db-core
+
+This modules wraps [`@platformatic/sql-mapper`](https://www.npmjs.com/package/@platformatic/sql-mapper),
+[`@platformatic/sql-graphql`](https://www.npmjs.com/package/@platformatic/sql-graphql) and
+[`@platformatic/sql-openapi`](https://www.npmjs.com/package/@platformatic/sql-openapi) in a convenient [Fastify](https://www.fastify.io/)
+plugin.
+
+Check out the full documentation for Platformatic DB on [our website](https://oss.platformatic.dev/docs/getting-started/quick-start-guide).
+
+## Install
+
+```sh
+npm install @platformatic/db-core
+```
+
+## Usage
+
+```js
+import fastify from 'fastify'
+import db from '@platformatic/db-core'
+
+const app = Fastify()
+app.register(db, {
+ // connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres'
+ // connectionString: 'mysql://root@127.0.0.1:3307/graph'
+ connectionString: 'sqlite://:memory:'
+})
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/db-core/index.js b/packages/db-core/index.js
new file mode 100644
index 0000000000..0da413a03e
--- /dev/null
+++ b/packages/db-core/index.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const fp = require('fastify-plugin')
+const sqlMapper = require('@platformatic/sql-mapper')
+const sqlOpenAPI = require('@platformatic/sql-openapi')
+const sqlGraphQL = require('@platformatic/sql-graphql')
+
+module.exports = fp(async function (app, opts) {
+ app.register(sqlMapper, {
+ ...opts
+ })
+
+ if (opts.graphql !== false) {
+ const graphqlConfig = typeof opts.graphql === 'object' ? opts.graphql : {}
+ app.register(sqlGraphQL, {
+ ...graphqlConfig
+ })
+ }
+
+ // enabled by default
+ if (opts.openapi !== false) {
+ const openapiConfig = typeof opts.openapi === 'object' ? opts.openapi : {}
+ app.register(sqlOpenAPI, {
+ ...openapiConfig
+ })
+ }
+})
+
+module.exports.connect = sqlMapper.connect
diff --git a/packages/db-core/package.json b/packages/db-core/package.json
new file mode 100644
index 0000000000..a6986eb7b2
--- /dev/null
+++ b/packages/db-core/package.json
@@ -0,0 +1,32 @@
+{
+ "name": "@platformatic/db-core",
+ "version": "0.0.21",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && tap test/*test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "author": "Matteo Collina ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "fastify": "^4.6.0",
+ "mercurius": "^11.0.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0"
+ },
+ "dependencies": {
+ "@platformatic/sql-graphql": "workspace:*",
+ "@platformatic/sql-mapper": "workspace:*",
+ "@platformatic/sql-openapi": "workspace:*",
+ "fastify-plugin": "^4.1.0"
+ }
+}
diff --git a/packages/db-core/test/basic.test.js b/packages/db-core/test/basic.test.js
new file mode 100644
index 0000000000..e2a946b4ea
--- /dev/null
+++ b/packages/db-core/test/basic.test.js
@@ -0,0 +1,161 @@
+'use strict'
+
+const { test } = require('tap')
+const Fastify = require('fastify')
+const { clear, connInfo } = require('./helper')
+const core = require('..')
+
+async function createBasicPages (db, sql) {
+ if (module.exports.isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+}
+
+async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+}
+
+test('entities are available', async ({ ok, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad
+ })
+ teardown(() => app.close())
+
+ await app.ready()
+ ok(app.platformatic.entities.page)
+})
+
+test('graphql is available', async ({ equal, same, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+})
+
+test('graphiql can be enabled', async ({ equal, same, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad,
+ graphql: {
+ graphiql: true
+ }
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/graphiql'
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+})
+
+test('graphql can be disabled', async ({ equal, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad,
+ graphql: false
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 404, '/graphql not found')
+})
+
+test('openapi is available', async ({ equal, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages'
+ })
+ equal(res.statusCode, 200, '/pages status code')
+})
+
+test('openapi can be disabled', async ({ equal, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad,
+ openapi: false
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages'
+ })
+ equal(res.statusCode, 404, '/pages status code')
+})
+
+test('openapi with an object', async ({ equal, teardown }) => {
+ const app = Fastify()
+ app.register(core, {
+ ...connInfo,
+ onDatabaseLoad,
+ openapi: {}
+ })
+ teardown(() => app.close())
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages'
+ })
+ equal(res.statusCode, 200, '/pages status code')
+})
diff --git a/packages/db-core/test/helper.js b/packages/db-core/test/helper.js
new file mode 100644
index 0000000000..ed606b4b8a
--- /dev/null
+++ b/packages/db-core/test/helper.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const why = require('why-is-node-running')
+const { Agent, setGlobalDispatcher } = require('undici')
+
+// This file must be required/imported as the first file
+// in the test suite. It sets up the global environment
+// to track the open handles via why-is-node-running.
+setInterval(() => {
+ why()
+}, 10000).unref()
+
+const agent = new Agent({
+ keepAliveTimeout: 10,
+ keepAliveMaxTimeout: 10,
+ tls: {
+ rejectUnauthorized: false
+ }
+})
+setGlobalDispatcher(agent)
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+}
+
+function buildConfig (options) {
+ const base = {
+ server: {},
+ core: {},
+ cli: {},
+ dashboard: {},
+ authorization: {}
+ }
+
+ return Object.assign(base, options)
+}
+
+module.exports.buildConfig = buildConfig
diff --git a/packages/db-dashboard/.gitignore b/packages/db-dashboard/.gitignore
new file mode 100644
index 0000000000..a547bf36d8
--- /dev/null
+++ b/packages/db-dashboard/.gitignore
@@ -0,0 +1,24 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
+node_modules
+dist
+dist-ssr
+*.local
+
+# Editor directories and files
+.vscode/*
+!.vscode/extensions.json
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
diff --git a/packages/db-dashboard/LICENSE b/packages/db-dashboard/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/db-dashboard/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-dashboard/NOTICE b/packages/db-dashboard/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/db-dashboard/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db-dashboard/README.md b/packages/db-dashboard/README.md
new file mode 100644
index 0000000000..c35424eb08
--- /dev/null
+++ b/packages/db-dashboard/README.md
@@ -0,0 +1,8 @@
+# @platformatic/db-dashboard
+
+Experimental dashboard for Platformatic DB, do not use standalone.
+Check out the full documentation for Platformatic DB on [our website](https://oss.platformatic.dev/docs/getting-started/quick-start-guide).
+
+## License
+
+Apache 2.0
diff --git a/packages/db-dashboard/index.html b/packages/db-dashboard/index.html
new file mode 100644
index 0000000000..dcced66f26
--- /dev/null
+++ b/packages/db-dashboard/index.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+ Platformatic DB
+
+
+
+
+
+
diff --git a/packages/db-dashboard/index.js b/packages/db-dashboard/index.js
new file mode 100644
index 0000000000..9255784267
--- /dev/null
+++ b/packages/db-dashboard/index.js
@@ -0,0 +1,20 @@
+'use strict'
+
+const fastifyStatic = require('@fastify/static')
+const path = require('path')
+module.exports = async function app (app, opts) {
+ app.log.info('dashboard plugin loaded.')
+ if (opts.dashboardAtRoot !== false) {
+ app.get('/', { hide: true }, function (req, reply) {
+ return reply.redirect(302, '/dashboard')
+ })
+ }
+
+ app.register(fastifyStatic, {
+ root: path.join(__dirname, 'build')
+ })
+
+ app.get('/dashboard', { hide: true }, function (req, reply) {
+ return reply.sendFile('index.html')
+ })
+}
diff --git a/packages/db-dashboard/package.json b/packages/db-dashboard/package.json
new file mode 100644
index 0000000000..cc64bde0ab
--- /dev/null
+++ b/packages/db-dashboard/package.json
@@ -0,0 +1,69 @@
+{
+ "name": "@platformatic/db-dashboard",
+ "version": "0.0.21",
+ "main": "index.js",
+ "description": "Platformatic DB Dashboard",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "scripts": {
+ "dev": "vite",
+ "build": "vite build",
+ "prepublish": "npm run build",
+ "preview": "vite preview",
+ "test": "standard | snazzy && vitest run",
+ "test:e2e": "playwright test",
+ "test:watch": "vitest watch",
+ "lint": "standard | snazzy"
+ },
+ "dependencies": {
+ "@fastify/static": "^6.5.0"
+ },
+ "devDependencies": {
+ "@graphiql/toolkit": "^0.8.0",
+ "@playwright/test": "^1.24.2",
+ "bulma": "^0.9.4",
+ "graphiql": "^2.0.0",
+ "json-format-highlight": "^1.0.4",
+ "jsoneditor": "^9.9.0",
+ "react": "^18.2.0",
+ "react-dom": "^18.2.0",
+ "react-hot-toast": "^2.3.0",
+ "react-router-dom": "^6.3.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "swagger-ui-react": "4.13.0",
+ "@types/react": "^18.0.17",
+ "@types/react-dom": "^18.0.6",
+ "@vitejs/plugin-react": "^2.0.0",
+ "happy-dom": "^6.0.4",
+ "history": "^5.3.0",
+ "playwright": "^1.24.2",
+ "react-test-renderer": "^18.2.0",
+ "vite": "^3.0.4",
+ "vitest": "^0.23.0"
+ },
+ "standard": {
+ "globals": [
+ "describe",
+ "context",
+ "before",
+ "beforeEach",
+ "after",
+ "afterEach",
+ "it",
+ "expect",
+ "test",
+ "screen"
+ ],
+ "ignore": [
+ "/build"
+ ]
+ }
+}
diff --git a/packages/db-dashboard/playwright.config.js b/packages/db-dashboard/playwright.config.js
new file mode 100644
index 0000000000..f6cc8d1886
--- /dev/null
+++ b/packages/db-dashboard/playwright.config.js
@@ -0,0 +1,72 @@
+// import type { PlaywrightTestConfig } from '@playwright/test';
+import { devices } from '@playwright/test'
+
+/**
+ * Read environment variables from file.
+ * https://github.com/motdotla/dotenv
+ */
+// require('dotenv').config();
+
+/**
+ * See https://playwright.dev/docs/test-configuration.
+ */
+const config = {
+ testDir: './test/e2e',
+ /* Maximum time one test can run for. */
+ timeout: 30 * 1000,
+ expect: {
+ /**
+ * Maximum time expect() should wait for the condition to be met.
+ * For example in `await expect(locator).toHaveText();`
+ */
+ timeout: 5000
+ },
+ /* Run tests in files in parallel */
+ fullyParallel: true,
+ /* Fail the build on CI if you accidentally left test.only in the source code. */
+ forbidOnly: !!process.env.CI,
+ /* Retry on CI only */
+ retries: process.env.CI ? 2 : 0,
+ /* Opt out of parallel tests on CI. */
+ workers: process.env.CI ? 1 : undefined,
+ /* Reporter to use. See https://playwright.dev/docs/test-reporters */
+ reporter: 'html',
+ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
+ use: {
+ /* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */
+ actionTimeout: 0,
+ /* Base URL to use in actions like `await page.goto('/')`. */
+ // baseURL: 'http://localhost:3000',
+
+ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
+ trace: 'on-first-retry'
+ },
+
+ /* Configure projects for major browsers */
+ projects: [
+ {
+ name: 'chromium',
+ use: {
+ ...devices['Desktop Chrome']
+ }
+ },
+
+ {
+ name: 'firefox',
+ use: {
+ ...devices['Desktop Firefox']
+ }
+ }
+ ]
+
+ /* Folder for test artifacts such as screenshots, videos, traces, etc. */
+ // outputDir: 'test-results/',
+
+ /* Run your local dev server before starting the tests */
+ // webServer: {
+ // command: 'npx platformatic db --config=./test/e2e/fixtures/e2e-test-config.json',
+ // port: 3042,
+ // },
+}
+
+export default config
diff --git a/packages/db-dashboard/public/images/apple-touch-icon.png b/packages/db-dashboard/public/images/apple-touch-icon.png
new file mode 100644
index 0000000000..369931d3ba
Binary files /dev/null and b/packages/db-dashboard/public/images/apple-touch-icon.png differ
diff --git a/packages/db-dashboard/public/images/favicon-16x16.png b/packages/db-dashboard/public/images/favicon-16x16.png
new file mode 100644
index 0000000000..b82114bcdd
Binary files /dev/null and b/packages/db-dashboard/public/images/favicon-16x16.png differ
diff --git a/packages/db-dashboard/public/images/favicon-32x32.png b/packages/db-dashboard/public/images/favicon-32x32.png
new file mode 100644
index 0000000000..0fb5849f51
Binary files /dev/null and b/packages/db-dashboard/public/images/favicon-32x32.png differ
diff --git a/packages/db-dashboard/public/images/favicon.ico b/packages/db-dashboard/public/images/favicon.ico
new file mode 100644
index 0000000000..6b064af16f
Binary files /dev/null and b/packages/db-dashboard/public/images/favicon.ico differ
diff --git a/packages/db-dashboard/public/images/logo-192x192.png b/packages/db-dashboard/public/images/logo-192x192.png
new file mode 100644
index 0000000000..d0db02ddec
Binary files /dev/null and b/packages/db-dashboard/public/images/logo-192x192.png differ
diff --git a/packages/db-dashboard/public/images/logo-512x512.png b/packages/db-dashboard/public/images/logo-512x512.png
new file mode 100644
index 0000000000..df0960e702
Binary files /dev/null and b/packages/db-dashboard/public/images/logo-512x512.png differ
diff --git a/packages/db-dashboard/public/index.html b/packages/db-dashboard/public/index.html
new file mode 100644
index 0000000000..3666539a92
--- /dev/null
+++ b/packages/db-dashboard/public/index.html
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ Dashboard - Platformatic DB
+
+
+ You need to enable JavaScript to run this app.
+
+
+
+
diff --git a/packages/db-dashboard/public/manifest.json b/packages/db-dashboard/public/manifest.json
new file mode 100644
index 0000000000..2c1f6d01ed
--- /dev/null
+++ b/packages/db-dashboard/public/manifest.json
@@ -0,0 +1,25 @@
+{
+ "short_name": "Platformatic DB Dashboard",
+ "name": "Platformatic DB Dashboard",
+ "icons": [
+ {
+ "src": "images/favicon.ico",
+ "sizes": "64x64 32x32 24x24 16x16",
+ "type": "image/x-icon"
+ },
+ {
+ "src": "images/logo-192x192.png",
+ "type": "image/png",
+ "sizes": "192x192"
+ },
+ {
+ "src": "images/logo-512x512.png",
+ "type": "image/png",
+ "sizes": "512x512"
+ }
+ ],
+ "start_url": ".",
+ "display": "standalone",
+ "theme_color": "#000000",
+ "background_color": "#ffffff"
+}
diff --git a/packages/db-dashboard/public/robots.txt b/packages/db-dashboard/public/robots.txt
new file mode 100644
index 0000000000..e9e57dc4d4
--- /dev/null
+++ b/packages/db-dashboard/public/robots.txt
@@ -0,0 +1,3 @@
+# https://www.robotstxt.org/robotstxt.html
+User-agent: *
+Disallow:
diff --git a/packages/db-dashboard/schema.js b/packages/db-dashboard/schema.js
new file mode 100644
index 0000000000..ccacec309b
--- /dev/null
+++ b/packages/db-dashboard/schema.js
@@ -0,0 +1 @@
+'use strict'
diff --git a/packages/db-dashboard/src/App.css b/packages/db-dashboard/src/App.css
new file mode 100644
index 0000000000..2c5e2ef5cd
--- /dev/null
+++ b/packages/db-dashboard/src/App.css
@@ -0,0 +1,41 @@
+#root {
+ max-width: 1280px;
+ margin: 0 auto;
+ padding: 2rem;
+ text-align: center;
+}
+
+.logo {
+ height: 6em;
+ padding: 1.5em;
+ will-change: filter;
+}
+.logo:hover {
+ filter: drop-shadow(0 0 2em #646cffaa);
+}
+.logo.react:hover {
+ filter: drop-shadow(0 0 2em #61dafbaa);
+}
+
+@keyframes logo-spin {
+ from {
+ transform: rotate(0deg);
+ }
+ to {
+ transform: rotate(360deg);
+ }
+}
+
+@media (prefers-reduced-motion: no-preference) {
+ a:nth-of-type(2) .logo {
+ animation: logo-spin infinite 20s linear;
+ }
+}
+
+.card {
+ padding: 2em;
+}
+
+.read-the-docs {
+ color: #888;
+}
diff --git a/packages/db-dashboard/src/App.jsx b/packages/db-dashboard/src/App.jsx
new file mode 100644
index 0000000000..4533764449
--- /dev/null
+++ b/packages/db-dashboard/src/App.jsx
@@ -0,0 +1,51 @@
+import { Navigate, Routes, Route } from 'react-router-dom'
+import { createContext, useState } from 'react'
+import Layout from './components/Layout'
+import Home from './pages/Home'
+import GraphiQLPage from './pages/GQL'
+import ConfigViewer from './pages/ConfigViewer'
+import SwaggerViewer from './pages/SwaggerViewer'
+const AppContext = createContext({})
+export { AppContext }
+function getCurrentUrl () {
+ return `${window.location.protocol}//${window.location.host}`
+}
+function App () {
+ const [userName, setUsername] = useState(null)
+ const [logged, setLogged] = useState(false)
+ const [adminSecret, setAdminSecret] = useState(null)
+ const urlPrefix = import.meta.env.VITE_SERVER_URL || getCurrentUrl()
+ return (
+
+
+
+ } />
+ } />
+
+ }
+ />
+ } />
+ } />
+ } />
+
+
+
+ )
+}
+
+export default App
diff --git a/packages/db-dashboard/src/assets/react.svg b/packages/db-dashboard/src/assets/react.svg
new file mode 100644
index 0000000000..6c87de9bb3
--- /dev/null
+++ b/packages/db-dashboard/src/assets/react.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/packages/db-dashboard/src/components/Layout.jsx b/packages/db-dashboard/src/components/Layout.jsx
new file mode 100644
index 0000000000..a1c1179582
--- /dev/null
+++ b/packages/db-dashboard/src/components/Layout.jsx
@@ -0,0 +1,56 @@
+import Navbar from './Navbar'
+import { Fragment, useContext, useEffect, useState } from 'react'
+import Sidebar from './Sidebar'
+import { Toaster } from 'react-hot-toast'
+import LoginBox from './LoginBox'
+import { AppContext } from '../App'
+export default function Layout (props) {
+ const [loaded, setLoaded] = useState(false)
+ const { logged, setLogged, urlPrefix } = useContext(AppContext)
+ useEffect(() => {
+ async function getConfig () {
+ const apiUrl = `${urlPrefix}/_admin/config`
+ const response = await fetch(apiUrl)
+ if (response.status === 200) {
+ const body = await response.json()
+ if (body.loginRequired !== true) {
+ setLogged(true)
+ }
+ setLoaded(true)
+ }
+ }
+ getConfig()
+ }, [])
+ if (!loaded) {
+ return Dashboard is Loading
+ }
+ if (logged) {
+ return (
+ <>
+
+
+ >
+ )
+ } else {
+ return (
+
+ )
+ }
+}
diff --git a/packages/db-dashboard/src/components/LoginBox.jsx b/packages/db-dashboard/src/components/LoginBox.jsx
new file mode 100644
index 0000000000..6477ba8356
--- /dev/null
+++ b/packages/db-dashboard/src/components/LoginBox.jsx
@@ -0,0 +1,54 @@
+import { useContext, useRef, useState } from 'react'
+import { AppContext } from '../App'
+export default function LoginBox () {
+ const inputPasswordRef = useRef('platformatic')
+ const { setLogged, setUsername, setAdminSecret, urlPrefix } = useContext(AppContext)
+ const [loginError, setLoginError] = useState(null)
+ async function onLoginButtonClicked (event) {
+ event.preventDefault()
+ const apiUrl = `${urlPrefix}/_admin/login`
+ const res = await fetch(apiUrl, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({ password: inputPasswordRef.current.value })
+ })
+
+ if (res.status === 200) {
+ const body = await res.json()
+ if (body.authorized) {
+ setLogged(true)
+ setUsername('admin')
+ setAdminSecret(inputPasswordRef.current.value)
+ }
+ } else {
+ setLoginError('Wrong password.')
+ }
+ }
+ return (
+
+
+
+
+
Login
+
+
Please login to proceed.
+
+
+
+
+
setLoginError(null)} ref={inputPasswordRef} />
+ {loginError &&
Wrong password.
}
+
+
+
+ Login
+
+
+
+
+
+
+ )
+}
diff --git a/packages/db-dashboard/src/components/Navbar.jsx b/packages/db-dashboard/src/components/Navbar.jsx
new file mode 100644
index 0000000000..391e836202
--- /dev/null
+++ b/packages/db-dashboard/src/components/Navbar.jsx
@@ -0,0 +1,25 @@
+import styles from './Navbar.module.css'
+import { AppContext } from '../App'
+import { useContext } from 'react'
+export default function Navbar () {
+ const { userName, setUsername, logged, setLogged } = useContext(AppContext)
+ function onLogoutButtonClicked (event) {
+ event.preventDefault()
+ setLogged(false)
+ setUsername(null)
+ }
+ return (
+
+
+
+ {logged && (
+
Logout {userName}
+ )}
+
+
+ )
+}
diff --git a/packages/db-dashboard/src/components/Navbar.module.css b/packages/db-dashboard/src/components/Navbar.module.css
new file mode 100644
index 0000000000..faea0fcfe8
--- /dev/null
+++ b/packages/db-dashboard/src/components/Navbar.module.css
@@ -0,0 +1,3 @@
+.logo {
+ margin-right: .25rem;
+}
\ No newline at end of file
diff --git a/packages/db-dashboard/src/components/Navbar.test.jsx b/packages/db-dashboard/src/components/Navbar.test.jsx
new file mode 100644
index 0000000000..edd6c722bf
--- /dev/null
+++ b/packages/db-dashboard/src/components/Navbar.test.jsx
@@ -0,0 +1,12 @@
+import renderer from 'react-test-renderer'
+import React from 'react'
+import Navbar from './Navbar'
+
+test('Navbar', () => {
+ test('renders Navbar component', () => {
+ renderer.create( )
+ expect(screen.getByTestId('navbar-home-link')).toHaveTextContent(
+ 'Platformatic DB'
+ )
+ })
+})
diff --git a/packages/db-dashboard/src/components/Notification.jsx b/packages/db-dashboard/src/components/Notification.jsx
new file mode 100644
index 0000000000..853513ce9a
--- /dev/null
+++ b/packages/db-dashboard/src/components/Notification.jsx
@@ -0,0 +1,9 @@
+
+export default function Notification (props) {
+ const { text } = props
+ return (
+
+ {text}
+
+ )
+}
diff --git a/packages/db-dashboard/src/components/Sidebar.css b/packages/db-dashboard/src/components/Sidebar.css
new file mode 100644
index 0000000000..9fc117e176
--- /dev/null
+++ b/packages/db-dashboard/src/components/Sidebar.css
@@ -0,0 +1,3 @@
+.active {
+ background-color: #17e98a;
+}
\ No newline at end of file
diff --git a/packages/db-dashboard/src/components/Sidebar.jsx b/packages/db-dashboard/src/components/Sidebar.jsx
new file mode 100644
index 0000000000..77f247a1eb
--- /dev/null
+++ b/packages/db-dashboard/src/components/Sidebar.jsx
@@ -0,0 +1,27 @@
+import { NavLink } from 'react-router-dom'
+import './Sidebar.css'
+export default function Sidebar () {
+ return (
+
+ General
+
+
+ Dashboard
+
+
+ GraphiQL
+
+
+ View Config
+
+
+ Entity API Docs
+
+
+ Platformatic DB Admin API Docs
+
+
+
+
+ )
+}
diff --git a/packages/db-dashboard/src/components/Sidebar.test.jsx b/packages/db-dashboard/src/components/Sidebar.test.jsx
new file mode 100644
index 0000000000..e829644f55
--- /dev/null
+++ b/packages/db-dashboard/src/components/Sidebar.test.jsx
@@ -0,0 +1,18 @@
+import renderer from 'react-test-renderer'
+import { createMemoryHistory } from 'history'
+import React from 'react'
+import { Router } from 'react-router-dom'
+import Sidebar from './Sidebar'
+
+test('Sidebar', () => {
+ test('renders Sidebar component', () => {
+ const history = createMemoryHistory()
+ renderer.create(
+
+
+
+ )
+ expect(screen.getByTestId('dashboard-link')).toHaveTextContent('Dashboard')
+ expect(screen.getByTestId('graphiql-link')).toHaveTextContent('GraphiQL')
+ })
+})
diff --git a/packages/db-dashboard/src/elements/Title.jsx b/packages/db-dashboard/src/elements/Title.jsx
new file mode 100644
index 0000000000..07e436eae3
--- /dev/null
+++ b/packages/db-dashboard/src/elements/Title.jsx
@@ -0,0 +1,3 @@
+export default function Title (props) {
+ return {props.children}
+}
diff --git a/packages/db-dashboard/src/index.css b/packages/db-dashboard/src/index.css
new file mode 100644
index 0000000000..917888c1d1
--- /dev/null
+++ b/packages/db-dashboard/src/index.css
@@ -0,0 +1,70 @@
+:root {
+ font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
+ font-size: 16px;
+ line-height: 24px;
+ font-weight: 400;
+
+ color-scheme: light dark;
+ color: rgba(255, 255, 255, 0.87);
+ background-color: #242424;
+
+ font-synthesis: none;
+ text-rendering: optimizeLegibility;
+ -webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
+ -webkit-text-size-adjust: 100%;
+}
+
+a {
+ font-weight: 500;
+ color: #646cff;
+ text-decoration: inherit;
+}
+a:hover {
+ color: #535bf2;
+}
+
+body {
+ margin: 0;
+ display: flex;
+ place-items: center;
+ min-width: 320px;
+ min-height: 100vh;
+}
+
+h1 {
+ font-size: 3.2em;
+ line-height: 1.1;
+}
+
+button {
+ border-radius: 8px;
+ border: 1px solid transparent;
+ padding: 0.6em 1.2em;
+ font-size: 1em;
+ font-weight: 500;
+ font-family: inherit;
+ background-color: #1a1a1a;
+ cursor: pointer;
+ transition: border-color 0.25s;
+}
+button:hover {
+ border-color: #646cff;
+}
+button:focus,
+button:focus-visible {
+ outline: 4px auto -webkit-focus-ring-color;
+}
+
+@media (prefers-color-scheme: light) {
+ :root {
+ color: #213547;
+ background-color: #ffffff;
+ }
+ a:hover {
+ color: #747bff;
+ }
+ button {
+ background-color: #f9f9f9;
+ }
+}
diff --git a/packages/db-dashboard/src/main.jsx b/packages/db-dashboard/src/main.jsx
new file mode 100644
index 0000000000..ee81aa3408
--- /dev/null
+++ b/packages/db-dashboard/src/main.jsx
@@ -0,0 +1,13 @@
+import React from 'react'
+import ReactDOM from 'react-dom/client'
+import 'bulma/css/bulma.min.css'
+import App from './App'
+import { BrowserRouter } from 'react-router-dom'
+const root = ReactDOM.createRoot(document.getElementById('root'))
+root.render(
+
+
+
+
+
+)
diff --git a/packages/db-dashboard/src/pages/ConfigViewer.jsx b/packages/db-dashboard/src/pages/ConfigViewer.jsx
new file mode 100644
index 0000000000..438f19f70f
--- /dev/null
+++ b/packages/db-dashboard/src/pages/ConfigViewer.jsx
@@ -0,0 +1,78 @@
+import { Fragment, useContext, useEffect, useRef, useState } from 'react'
+import Title from '../elements/Title'
+import formatHighlight from 'json-format-highlight'
+import JSONEditor from 'jsoneditor'
+import 'jsoneditor/dist/jsoneditor.min.css'
+import { notify } from '../utils'
+import { AppContext } from '../App'
+let editor
+
+export default function ConfigViewer () {
+ const { adminSecret, urlPrefix } = useContext(AppContext)
+ const [saveEnabled, setSaveEnabled] = useState(true)
+ const jsonEditorRef = useRef()
+ /* eslint-disable no-unused-vars */
+ const [_, setConfig] = useState('')
+ /* eslint-enable no-unused-vars */
+ const editorOptions = {}
+ const configFileUrl = `${urlPrefix}/_admin/config-file`
+
+ async function onSaveButtonClicked (event) {
+ event.preventDefault()
+ const newConfig = editor.get()
+ const res = await fetch(configFileUrl, {
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ },
+ method: 'POST',
+ body: JSON.stringify(newConfig)
+ })
+ if (res.status === 200) {
+ notify({
+ message: 'Config file saved',
+ type: 'success'
+ })
+ } else {
+ notify({
+ message: await res.json(),
+ type: 'error'
+ })
+ }
+ }
+ let rendered = false
+ useEffect(() => {
+ async function getConfig () {
+ const response = await fetch(configFileUrl, {
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': adminSecret
+ }
+ })
+ if (response.status === 200) {
+ const body = await response.json()
+ if (!body.configFileLocation) {
+ setSaveEnabled(false)
+ }
+ setConfig(formatHighlight(body))
+ if (!rendered) {
+ editor = new JSONEditor(jsonEditorRef.current, editorOptions)
+ rendered = true
+ }
+ editor.set(body)
+ }
+ }
+ getConfig()
+ }, [])
+
+ return (
+ <>
+ Platformatic DB Config File
+
+
+
+ {saveEnabled && Save }
+
+
+ >
+ )
+}
diff --git a/packages/db-dashboard/src/pages/GQL.jsx b/packages/db-dashboard/src/pages/GQL.jsx
new file mode 100644
index 0000000000..6163c6b872
--- /dev/null
+++ b/packages/db-dashboard/src/pages/GQL.jsx
@@ -0,0 +1,48 @@
+import GraphiQL from 'graphiql'
+import { Fragment, useContext } from 'react'
+import { createGraphiQLFetcher } from '@graphiql/toolkit'
+import { AppContext } from '../App'
+import 'graphiql/graphiql.css'
+import styles from './GQL.module.css'
+
+export default function GraphiQLPage (props) {
+ const { adminSecret } = useContext(AppContext)
+ const { graphqlEndpoint } = props
+ const fetcher = getFetcher(graphqlEndpoint)
+ return (
+ <>
+
+
+
+
+
+
+
+ >
+ )
+}
+
+function getFetcher (endpoint) {
+ const parsedUrl = new URL(endpoint)
+ const host = parsedUrl.host
+
+ const websocketProtocol = parsedUrl.protocol === 'https:' ? 'wss:' : 'ws:'
+
+ const url = `${parsedUrl.protocol}//${host}${parsedUrl.pathname}`
+ const subscriptionUrl = `${websocketProtocol}//${host}${parsedUrl.pathname}`
+
+ const fetcher = createGraphiQLFetcher({
+ url,
+ subscriptionUrl
+ })
+ return fetcher
+}
diff --git a/packages/db-dashboard/src/pages/GQL.module.css b/packages/db-dashboard/src/pages/GQL.module.css
new file mode 100644
index 0000000000..99b039c97e
--- /dev/null
+++ b/packages/db-dashboard/src/pages/GQL.module.css
@@ -0,0 +1,13 @@
+.graphiql {
+ width: 100%;
+ height: 95vh;
+ overflow: hidden;
+ box-sizing: initial;
+ border: 1px solid hsl(0, 0%, 71%);
+}
+
+.graphiql-plt-logo {
+ height: 36px;
+ padding: 0;
+ margin-top: 7px;
+}
\ No newline at end of file
diff --git a/packages/db-dashboard/src/pages/Home.jsx b/packages/db-dashboard/src/pages/Home.jsx
new file mode 100644
index 0000000000..d174b84a53
--- /dev/null
+++ b/packages/db-dashboard/src/pages/Home.jsx
@@ -0,0 +1,28 @@
+import { Fragment } from 'react'
+import Title from '../elements/Title'
+import { notify } from '../utils'
+export default function Home () {
+ async function onRestartClicked () {
+ const res = await fetch('/_admin/restart', {
+ method: 'POST'
+ })
+ if (res.status === 200) {
+ notify({
+ message: 'Server restarted',
+ type: 'success'
+ })
+ } else {
+ notify({
+ message: 'There was an error...',
+ type: 'error'
+ })
+ }
+ }
+ return (
+ <>
+ Welcome to Platformatic DB!
+ Restart Server
+ >
+
+ )
+}
diff --git a/packages/db-dashboard/src/pages/SwaggerViewer.jsx b/packages/db-dashboard/src/pages/SwaggerViewer.jsx
new file mode 100644
index 0000000000..5fb7c9ee40
--- /dev/null
+++ b/packages/db-dashboard/src/pages/SwaggerViewer.jsx
@@ -0,0 +1,10 @@
+'use strict'
+import SwaggerUI from 'swagger-ui-react'
+import 'swagger-ui-react/swagger-ui.css'
+
+export default function SwaggerViewer (props) {
+ const { swaggerDocUrl } = props
+ return (
+
+ )
+}
diff --git a/packages/db-dashboard/src/reportWebVitals.js b/packages/db-dashboard/src/reportWebVitals.js
new file mode 100644
index 0000000000..9381231d21
--- /dev/null
+++ b/packages/db-dashboard/src/reportWebVitals.js
@@ -0,0 +1,13 @@
+const reportWebVitals = onPerfEntry => {
+ if (onPerfEntry && onPerfEntry instanceof Function) {
+ import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
+ getCLS(onPerfEntry)
+ getFID(onPerfEntry)
+ getFCP(onPerfEntry)
+ getLCP(onPerfEntry)
+ getTTFB(onPerfEntry)
+ })
+ }
+}
+
+export default reportWebVitals
diff --git a/packages/db-dashboard/src/setupTests.js b/packages/db-dashboard/src/setupTests.js
new file mode 100644
index 0000000000..52aaef1d24
--- /dev/null
+++ b/packages/db-dashboard/src/setupTests.js
@@ -0,0 +1,5 @@
+// jest-dom adds custom jest matchers for asserting on DOM nodes.
+// allows you to do things like:
+// expect(element).toHaveTextContent(/react/i)
+// learn more: https://github.com/testing-library/jest-dom
+import '@testing-library/jest-dom'
diff --git a/packages/db-dashboard/src/utils.js b/packages/db-dashboard/src/utils.js
new file mode 100644
index 0000000000..93bc7f0d93
--- /dev/null
+++ b/packages/db-dashboard/src/utils.js
@@ -0,0 +1,14 @@
+
+import toast from 'react-hot-toast'
+
+function notify ({ message, type = 'success' }) {
+ const opts = {
+ position: 'top-right'
+ }
+ if (undefined === toast[type]) {
+ return toast(message, opts)
+ }
+ return toast[type](message, opts)
+}
+
+export { notify }
diff --git a/packages/db-dashboard/test/e2e/fixtures/e2e-test-config.json b/packages/db-dashboard/test/e2e/fixtures/e2e-test-config.json
new file mode 100644
index 0000000000..07434338e9
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/fixtures/e2e-test-config.json
@@ -0,0 +1,35 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": "3042",
+ "logger": {
+ "level": "info"
+ },
+ "cors": {
+ "origin": true,
+ "methods": [
+ "GET",
+ "POST",
+ "PUT",
+ "DELETE",
+ "PATCH"
+ ]
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1:5432/postgres",
+ "ignore": {
+ "versions": true
+ }
+ },
+ "dashboard": {
+ "enabled": true,
+ "rootPath": true
+ },
+ "authorization": {
+ "adminSecret": "basegraph"
+ }
+}
\ No newline at end of file
diff --git a/packages/db-dashboard/test/e2e/fixtures/migrations/001.do.sql b/packages/db-dashboard/test/e2e/fixtures/migrations/001.do.sql
new file mode 100644
index 0000000000..0a09b9f9cb
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/fixtures/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+);
diff --git a/packages/db-dashboard/test/e2e/fixtures/migrations/001.undo.sql b/packages/db-dashboard/test/e2e/fixtures/migrations/001.undo.sql
new file mode 100644
index 0000000000..f5465cf307
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/fixtures/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE pages;
diff --git a/packages/db-dashboard/test/e2e/fixtures/migrations/002.do.sql b/packages/db-dashboard/test/e2e/fixtures/migrations/002.do.sql
new file mode 100644
index 0000000000..098ff52de4
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/fixtures/migrations/002.do.sql
@@ -0,0 +1,5 @@
+CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255) NOT NULL
+);
+ALTER TABLE pages ADD COLUMN category_id INTEGER REFERENCES categories(id);
diff --git a/packages/db-dashboard/test/e2e/fixtures/migrations/002.undo.sql b/packages/db-dashboard/test/e2e/fixtures/migrations/002.undo.sql
new file mode 100644
index 0000000000..048007a86d
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/fixtures/migrations/002.undo.sql
@@ -0,0 +1,2 @@
+ALTER TABLE pages DROP COLUMN category_id;
+DROP TABLE categories;
diff --git a/packages/db-dashboard/test/e2e/index.spec.js b/packages/db-dashboard/test/e2e/index.spec.js
new file mode 100644
index 0000000000..6b66b2b4c8
--- /dev/null
+++ b/packages/db-dashboard/test/e2e/index.spec.js
@@ -0,0 +1,27 @@
+import { test, expect } from '@playwright/test'
+
+test('login box should appear', async ({ page }) => {
+ await page.goto('http://localhost:3042/')
+
+ await expect(page).toHaveTitle(/Platformatic DB/)
+
+ await page.locator('input[type=password]').fill('basegraph')
+ const loginButton = page.locator('.box button')
+ await loginButton.click()
+ await page.waitForTimeout(2000)
+
+ const mainTitle = await page.locator('main h1.title').innerHTML()
+ expect(mainTitle).toBe('Welcome to Platformatic DB!')
+})
+
+test('graphiql is loading', async ({ page }) => {
+ await page.goto('http://localhost:3042/')
+ await page.locator('input[type=password]').fill('basegraph')
+ const loginButton = page.locator('.box button')
+ await loginButton.click()
+ await page.waitForTimeout(2000)
+
+ await page.locator('a[href="/giql"]').click()
+ const graphiQLContainer = await page.locator('.graphiql-container')
+ await expect(graphiQLContainer).toBeVisible()
+})
diff --git a/packages/db-dashboard/vite.config.js b/packages/db-dashboard/vite.config.js
new file mode 100644
index 0000000000..d88cc89bd7
--- /dev/null
+++ b/packages/db-dashboard/vite.config.js
@@ -0,0 +1,10 @@
+import { defineConfig } from 'vite'
+import react from '@vitejs/plugin-react'
+
+// https://vitejs.dev/config/
+export default defineConfig({
+ plugins: [react()],
+ build: {
+ outDir: 'build'
+ }
+})
diff --git a/packages/db-dashboard/vitest.config.js b/packages/db-dashboard/vitest.config.js
new file mode 100644
index 0000000000..dba2e55aa4
--- /dev/null
+++ b/packages/db-dashboard/vitest.config.js
@@ -0,0 +1,11 @@
+///
+
+import { defineConfig } from 'vite'
+
+export default defineConfig({
+ test: {
+ globals: true,
+ environment: 'happy-dom',
+ exclude: ['**/test/e2e/**', 'node_modules/**']
+ }
+})
diff --git a/packages/db/.npmignore b/packages/db/.npmignore
new file mode 100644
index 0000000000..be27365759
--- /dev/null
+++ b/packages/db/.npmignore
@@ -0,0 +1,2 @@
+.nyc_output
+coverage
diff --git a/packages/db/.taprc b/packages/db/.taprc
new file mode 100644
index 0000000000..2fced675cf
--- /dev/null
+++ b/packages/db/.taprc
@@ -0,0 +1,2 @@
+jobs: 1
+timeout: 60
diff --git a/packages/db/LICENSE b/packages/db/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/db/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db/NOTICE b/packages/db/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/db/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/db/README.md b/packages/db/README.md
new file mode 100644
index 0000000000..6320ce8414
--- /dev/null
+++ b/packages/db/README.md
@@ -0,0 +1,13 @@
+# @platformatic/db
+
+Check out the full documentation for Platformatic DB on [our website](https://oss.platformatic.dev/docs/getting-started/quick-start-guide).
+
+## Install
+
+```sh
+npm install @platformatic/db
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/db/_admin/auth-routes.js b/packages/db/_admin/auth-routes.js
new file mode 100644
index 0000000000..c02d245cad
--- /dev/null
+++ b/packages/db/_admin/auth-routes.js
@@ -0,0 +1,48 @@
+'use strict'
+
+async function authRoutes (app, opts) {
+ const headersSchema = {
+ type: 'object',
+ properties: {
+ 'x-platformatic-admin-secret': {
+ type: 'string',
+ description: 'The secret defined in authorization.adminSecret property of config file.'
+ }
+ },
+ required: ['x-platformatic-admin-secret']
+ }
+ const unauthorizedResponseSchema = {
+ type: 'object',
+ properties: {
+ success: { type: 'boolean', default: false },
+ message: { type: 'string', default: 'Unauthorized' }
+ }
+ }
+ // restarts the server
+ app.post('/restart', {
+ schema: {
+ headers: headersSchema,
+ response: {
+ 200: {
+ type: 'object',
+ properties: {
+ success: { type: 'boolean', default: true }
+ }
+ },
+ 401: unauthorizedResponseSchema
+ }
+ }
+ },
+ async function (req, reply) {
+ app.log.info('Restarting server...')
+ await app.restart()
+ app.log.info('...server restarted')
+ return { success: true }
+ })
+
+ if (opts.configManager) {
+ app.register(opts.configManager.toFastifyPlugin())
+ }
+}
+
+module.exports = authRoutes
diff --git a/packages/db/_admin/index.js b/packages/db/_admin/index.js
new file mode 100644
index 0000000000..fec6acc878
--- /dev/null
+++ b/packages/db/_admin/index.js
@@ -0,0 +1,24 @@
+'use strict'
+
+const Swagger = require('@fastify/swagger')
+
+module.exports = async (app, opts) => {
+ await app.register(Swagger, {
+ routePrefix: 'documentation',
+ exposeRoute: true,
+ openapi: {
+ info: {
+ title: 'Platformatic DB Admin Routes',
+ description: 'Configure and manage your Platformatic DB instance.'
+ }
+ }
+ })
+ app.register(require('./non-auth-routes'), {
+ ...opts,
+ prefix: ''
+ })
+ app.register(require('./auth-routes'), {
+ ...opts,
+ prefix: ''
+ })
+}
diff --git a/packages/db/_admin/non-auth-routes.js b/packages/db/_admin/non-auth-routes.js
new file mode 100644
index 0000000000..8ca49675b6
--- /dev/null
+++ b/packages/db/_admin/non-auth-routes.js
@@ -0,0 +1,59 @@
+'use strict'
+
+async function nonAuthRoutes (app, opts) {
+ let adminSecret
+ if (opts.authorization && opts.authorization.adminSecret) {
+ adminSecret = opts.authorization.adminSecret
+ }
+ /** NON AUTHENTICATED ROUTES */
+ app.get('/config', {
+ schema: {
+ response: {
+ 200: {
+ type: 'object',
+ properties: {
+ loginRequired: { type: 'boolean' }
+ }
+ }
+ }
+ }
+ },
+ async (req, reply) => {
+ const output = {
+ loginRequired: false
+ }
+ if (adminSecret) {
+ output.loginRequired = true
+ }
+ return reply.code(200).send(output)
+ })
+
+ // handles login
+ app.post('/login', {
+ schema: {
+ response: {
+ 200: {
+ type: 'object',
+ properties: {
+ authorized: { type: 'boolean' }
+ }
+ }
+ },
+ body: {
+ type: 'object',
+ required: ['password'],
+ properties: {
+ password: { type: 'string' }
+ }
+ }
+ },
+ handler: (req, reply) => {
+ if (adminSecret !== null && req.body.password === adminSecret) {
+ return reply.code(200).send({ authorized: true })
+ } else {
+ return reply.code(401).send({ authorized: false })
+ }
+ }
+ })
+}
+module.exports = nonAuthRoutes
diff --git a/packages/db/db.mjs b/packages/db/db.mjs
new file mode 100755
index 0000000000..6471498fec
--- /dev/null
+++ b/packages/db/db.mjs
@@ -0,0 +1,64 @@
+#! /usr/bin/env node
+
+import commist from 'commist'
+import parseArgs from 'minimist'
+import isMain from 'es-main'
+import helpMe from 'help-me'
+import { readFile } from 'fs/promises'
+import { join } from 'desm'
+
+import start from './lib/start.mjs'
+import { init } from './lib/init.mjs'
+import { migrate } from './lib/migrate.mjs'
+import { seed } from './lib/seed.mjs'
+import { generateTypes } from './lib/gen-types.mjs'
+import { printGraphQLSchema, printOpenAPISchema } from './lib/gen-schema.mjs'
+
+const help = helpMe({
+ dir: join(import.meta.url, 'help'),
+ // the default
+ ext: '.txt'
+})
+
+const program = commist({ maxDistance: 4 })
+
+program.register('help', help.toStdout)
+program.register('help init', help.toStdout.bind(null, ['init']))
+program.register('help start', help.toStdout.bind(null, ['start']))
+program.register('help migrate', help.toStdout.bind(null, ['migrate']))
+program.register({ command: 'help seed', strict: true }, help.toStdout.bind(null, ['seed']))
+program.register('help schema', help.toStdout.bind(null, ['schema']))
+
+program.register('start', start)
+program.register('init', init)
+program.register('migrate', migrate)
+program.register('seed', seed)
+program.register('types', generateTypes)
+program.register('schema graphql', printGraphQLSchema)
+program.register('schema openapi', printOpenAPISchema)
+
+// TODO add help command
+
+export async function runDB (argv) {
+ const args = parseArgs(argv, {
+ alias: {
+ v: 'version'
+ }
+ })
+
+ if (args.version) {
+ console.log('v' + JSON.parse(await readFile(join(import.meta.url, 'package.json'))).version)
+ process.exit(0)
+ }
+
+ const result = program.parse(argv)
+
+ if (result) {
+ // We did have a command we did not match
+ return start(result)
+ }
+}
+
+if (isMain(import.meta)) {
+ await runDB(process.argv.splice(2))
+}
diff --git a/packages/db/fixtures/bad-migrations.json b/packages/db/fixtures/bad-migrations.json
new file mode 100644
index 0000000000..6ed8b8194b
--- /dev/null
+++ b/packages/db/fixtures/bad-migrations.json
@@ -0,0 +1,15 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./bad-migrations"
+ }
+}
diff --git a/packages/db/fixtures/bad-migrations/001.do.sql b/packages/db/fixtures/bad-migrations/001.do.sql
new file mode 100644
index 0000000000..220b6404de
--- /dev/null
+++ b/packages/db/fixtures/bad-migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id SERIAL PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/fixtures/bad-migrations/001.undo.sql b/packages/db/fixtures/bad-migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/fixtures/bad-migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/fixtures/bad-migrations/002.do.sql b/packages/db/fixtures/bad-migrations/002.do.sql
new file mode 100644
index 0000000000..f622b15d34
--- /dev/null
+++ b/packages/db/fixtures/bad-migrations/002.do.sql
@@ -0,0 +1,3 @@
+CREATE TABLE somethings (
+ id SERIAL PRIMARY KEY,
+ name TEXT
diff --git a/packages/db/fixtures/migrations/001.do.sql b/packages/db/fixtures/migrations/001.do.sql
new file mode 100644
index 0000000000..220b6404de
--- /dev/null
+++ b/packages/db/fixtures/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id SERIAL PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/fixtures/migrations/001.undo.sql b/packages/db/fixtures/migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/fixtures/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/fixtures/no-connectionString.json b/packages/db/fixtures/no-connectionString.json
new file mode 100644
index 0000000000..87db6e5a2f
--- /dev/null
+++ b/packages/db/fixtures/no-connectionString.json
@@ -0,0 +1,12 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {},
+ "dashboard": {},
+ "authorization": {}
+}
diff --git a/packages/db/fixtures/no-migrations-dir.json b/packages/db/fixtures/no-migrations-dir.json
new file mode 100644
index 0000000000..396a36c2f7
--- /dev/null
+++ b/packages/db/fixtures/no-migrations-dir.json
@@ -0,0 +1,6 @@
+{
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {}
+}
diff --git a/packages/db/fixtures/no-migrations.json b/packages/db/fixtures/no-migrations.json
new file mode 100644
index 0000000000..0daface9cc
--- /dev/null
+++ b/packages/db/fixtures/no-migrations.json
@@ -0,0 +1,15 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "dashboard": {},
+ "authorization": {}
+
+}
diff --git a/packages/db/fixtures/simple.json b/packages/db/fixtures/simple.json
new file mode 100644
index 0000000000..6a25da2061
--- /dev/null
+++ b/packages/db/fixtures/simple.json
@@ -0,0 +1,18 @@
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": "3042"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions",
+ "autoApply": false,
+ "validateChecksums": true
+ }
+}
diff --git a/packages/db/fixtures/sqlite/migrations/001.do.sql b/packages/db/fixtures/sqlite/migrations/001.do.sql
new file mode 100644
index 0000000000..0c81f656df
--- /dev/null
+++ b/packages/db/fixtures/sqlite/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id INTEGER PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/fixtures/sqlite/migrations/001.undo.sql b/packages/db/fixtures/sqlite/migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/fixtures/sqlite/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/fixtures/sqlite/platformatic.db.json b/packages/db/fixtures/sqlite/platformatic.db.json
new file mode 100644
index 0000000000..9aa9800be6
--- /dev/null
+++ b/packages/db/fixtures/sqlite/platformatic.db.json
@@ -0,0 +1,16 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions"
+ }
+}
diff --git a/packages/db/help/help.txt b/packages/db/help/help.txt
new file mode 100644
index 0000000000..fd6e46c85c
--- /dev/null
+++ b/packages/db/help/help.txt
@@ -0,0 +1,8 @@
+Available commands:
+
+* `help` - show this help message.
+* `help ` - shows more information about a command.
+* `init` - initiate default application.
+* `start` - start the server.
+* `migrate` - run migrations.
+* `seed` - run a seed file.
diff --git a/packages/db/help/init.txt b/packages/db/help/init.txt
new file mode 100644
index 0000000000..5dadc2ff6e
--- /dev/null
+++ b/packages/db/help/init.txt
@@ -0,0 +1,14 @@
+Initiate default Platformatic DB application:
+
+ $ platformatic db init
+
+As a result of executing this command, the `platformatic.db.json` configuration
+file and the `migrations` folder with migration examples will be generated.
+
+Options:
+
+ * `-h, --hostname `: The hostname where Platformatic DB server will listen for connections.
+ * `-p, --port `: The port where Platformatic DB server will listen for connections.
+ * `-db, --database `: The name of the database to use. Default: `sqlite`.
+ * `-m, --migrations `: Relative path to the migrations folder. Default: `./migrations`.
+ * `-t, --types `: Set true to enable type autogeneration. Default: `true`.
diff --git a/packages/db/help/migrate.txt b/packages/db/help/migrate.txt
new file mode 100644
index 0000000000..ef816736bc
--- /dev/null
+++ b/packages/db/help/migrate.txt
@@ -0,0 +1,32 @@
+Apply all configurated migrations to the database:
+
+ $ platformatic db migrate
+
+The migrations will be applied in the order they are specified in the
+folder defined in the configuration file. If you want to apply a specific migration,
+you can use the `--to` option:
+
+ $ platformatic db migrate --to 001
+
+Here is an example migration:
+
+ CREATE TABLE graphs (
+ id SERIAL PRIMARY KEY,
+ name TEXT
+ );
+
+You can always rollback to a specific migration with:
+
+ $ platformatic db migrate --to VERSION
+
+Use 000 to reset to the initial state.
+
+Options:
+
+ * `-c, --config `: Path to the configuration file.
+ * `-t, --to `: Migrate to a specific version.
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
diff --git a/packages/db/help/schema.txt b/packages/db/help/schema.txt
new file mode 100644
index 0000000000..afa94672ca
--- /dev/null
+++ b/packages/db/help/schema.txt
@@ -0,0 +1,13 @@
+Generate a schema from the database and prints it to standard output:
+
+* `schema graphql` - generate the GraphQL schema
+* `schema openapi` - generate the OpenAPI schema
+
+Options:
+
+ -c, --config FILE Specify a configuration file to use
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
diff --git a/packages/db/help/seed.txt b/packages/db/help/seed.txt
new file mode 100644
index 0000000000..8e2c238edc
--- /dev/null
+++ b/packages/db/help/seed.txt
@@ -0,0 +1,27 @@
+Load a seed into the database. This is a convenience method that loads
+a JavaScript file and configure @platformatic/sql-mapper to connect to
+the database specified in the configuration file.
+
+Here is an example of a seed file:
+
+ 'use strict'
+
+ module.exports = async function ({ entities, db, sql }) {
+ await entities.graph.save({ input: { name: 'Hello' } })
+ await db.query(sql`
+ INSERT INTO graphs (name) VALUES ('Hello 2');
+ `)
+ }
+
+You can run this using the `seed` command:
+
+ $ platformatic db seed seed.js
+
+Options:
+
+ * `--config` - Path to the configuration file.
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
diff --git a/packages/db/help/start.txt b/packages/db/help/start.txt
new file mode 100644
index 0000000000..a11bafc642
--- /dev/null
+++ b/packages/db/help/start.txt
@@ -0,0 +1,41 @@
+Start the Platformatic DB server with the following command:
+
+ $ platformatic db start
+
+You will need a configuration file. Here is an example to get you started,
+save the following as `platformatic.db.json`:
+
+ {
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+ }
+
+
+Remeber to create a migration, run the `db help migrate` command to know more.
+
+All outstanding migrations will be applied to the database unless the
+`migrations.autoApply` configuration option is set to false.
+
+By sending the SIGUSR2 signal, the server can be reloaded.
+
+Options:
+
+ -c, --config FILE Specify a configuration file to use
+ --watch-ignore LIST Specify a comma separated list of glob patterns to
+ ignore when watching for changes
+
+If not specified, the configuration specified will be loaded from `platformatic.db.json`,
+`platformatic.db.yml`, or `platformatic.db.tml` in the current directory. You can find more details about
+the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
diff --git a/packages/db/help/types.txt b/packages/db/help/types.txt
new file mode 100644
index 0000000000..2b0f7f856e
--- /dev/null
+++ b/packages/db/help/types.txt
@@ -0,0 +1,32 @@
+Generate typescript types for your entities from the database.
+
+ $ platformatic db types
+
+As a result of executing this command, the Platformatic DB will generate a `types`
+folder with a typescript file for each database entity. It will also generate a
+`global.d.ts` file that injects the types into the Application instance.
+
+In order to add type support to your plugins, you need to install some additional
+dependencies. To do this, copy and run an `npm install` command with dependencies
+that "platformatic db types" will ask you.
+
+Here is an example of a platformatic plugin.js with jsdoc support.
+You can use it to add autocomplete to your code.
+
+///
+'use strict'
+
+/** @param {import('fastify').FastifyInstance} app */
+module.exports = async function (app) {
+ app.get('/movie', async () => {
+ const movies = await app.platformatic.entities.movie.find({
+ where: { title: { eq: 'The Hitchhiker\'s Guide to the Galaxy' } }
+ })
+ return movies[0].id
+ })
+}
+
+If not specified, the configuration specified will be loaded from
+`platformatic.db.json`, `platformatic.db.yml`, or `platformatic.db.tml` in the current directory.
+You can find more details about the configuration format at:
+https://oss.platformatic.dev/docs/reference/configuration.
diff --git a/packages/db/index.js b/packages/db/index.js
new file mode 100644
index 0000000000..d9d639ebd1
--- /dev/null
+++ b/packages/db/index.js
@@ -0,0 +1,158 @@
+'use strict'
+
+const core = require('@platformatic/db-core')
+const auth = require('@platformatic/db-authorization')
+const dashboard = require('@platformatic/db-dashboard')
+const { start } = require('@fastify/restartable')
+const isolate = require('fastify-isolate')
+const underPressure = require('@fastify/under-pressure')
+
+const { isKeyEnabledInConfig } = require('./lib/helper')
+const { schema } = require('./lib/schema')
+const ConfigManager = require('./lib/config.js')
+
+function deepmergeArray (options) {
+ const deepmerge = options.deepmerge
+ const clone = options.clone
+ return function (target, source) {
+ let i = 0
+ const sl = source.length
+ const il = Math.max(target.length, source.length)
+ const result = new Array(il)
+ for (i = 0; i < il; ++i) {
+ if (i < sl) {
+ result[i] = deepmerge(target[i], source[i])
+ /* c8 ignore next 3 */
+ } else {
+ result[i] = clone(target[i])
+ }
+ }
+ return result
+ }
+}
+
+const deepmerge = require('@fastify/deepmerge')({ all: true, mergeArray: deepmergeArray })
+
+function createServerConfig (config) {
+ // convert the config file to a new structure
+ // to make @fastify/restartable happy
+ const serverConfig = Object.assign({ ...config.server }, config)
+ delete serverConfig.server
+ return serverConfig
+}
+
+async function platformaticDB (app, opts) {
+ if (opts.migrations && opts.migrations.autoApply !== false) {
+ app.log.debug({ migrations: opts.migrations }, 'running migrations')
+ const { execute } = await import('./lib/migrate.mjs')
+ await execute(app.log, { config: opts.configFileLocation }, opts)
+ }
+
+ app.register(require('./_admin'), { ...opts, prefix: '_admin' })
+ if (isKeyEnabledInConfig('dashboard', opts) && opts.dashboard.enabled) {
+ await app.register(dashboard, {
+ dashboardAtRoot: opts.dashboard.rootPath || true
+ })
+ }
+ app.register(core, opts.core)
+
+ if (opts.authorization) {
+ app.register(auth, opts.authorization)
+ }
+
+ // Metrics plugin
+ if (isKeyEnabledInConfig('metrics', opts)) {
+ app.register(require('./lib/metrics-plugin'), opts.metrics)
+ }
+
+ if (opts.plugin) {
+ app.log.debug({ plugin: opts.plugin }, 'loading plugin')
+ await app.register(isolate, {
+ ...opts.plugin,
+ customizeGlobalThis (_globalThis) {
+ // Taken from https://github.com/nodejs/undici/blob/fa9fd9066569b6357acacffb806aa804b688c9d8/lib/global.js#L5
+ const globalDispatcher = Symbol.for('undici.globalDispatcher.1')
+ const dispatcher = globalThis[globalDispatcher]
+ if (dispatcher) {
+ _globalThis[globalDispatcher] = dispatcher
+ }
+ }
+ })
+ }
+
+ // Enable CORS
+ if (opts.cors) {
+ app.register(require('@fastify/cors'), opts.cors)
+ }
+ if (isKeyEnabledInConfig('healthCheck', opts)) {
+ app.register(underPressure, {
+ exposeStatusRoute: '/status',
+ healthCheckInterval: opts.healthCheck.interval !== undefined ? opts.healthCheck.interval : 5000,
+ healthCheck: async function (serverInstance) {
+ const { db, sql } = serverInstance.platformatic
+ try {
+ await db.query(sql`SELECT 1`)
+ return true
+ } catch (err) {
+ app.log.warn({ err }, 'Healthcheck failed')
+ return false
+ }
+ }
+ })
+ }
+ if (!app.hasRoute({ url: '/', method: 'GET' })) {
+ app.register(require('./lib/root-endpoint'), opts)
+ }
+ await app
+}
+
+platformaticDB[Symbol.for('skip-override')] = true
+
+async function buildServer (options) {
+ if (!options.configManager) {
+ // instantiate a new config manager from current options
+ const cm = new ConfigManager({
+ source: { ...options },
+ schema
+ })
+ await cm.parseAndValidate()
+ options = deepmerge({}, cm.current, options)
+ options.configManager = cm
+ }
+ const serverConfig = createServerConfig(options)
+
+ serverConfig.originalConfig = options
+ serverConfig.app = platformaticDB
+ const handler = await start(serverConfig)
+
+ Object.defineProperty(handler, 'url', {
+ get () {
+ const address = handler.address
+ const port = handler.port
+ const url = `http://${address}:${port}`
+ return url
+ }
+ })
+
+ const _restart = handler.restart
+
+ handler.restart = (opts) => {
+ // Ignore because not tested on Windows
+ // TODO: remove the ignore, we shoduld be testing
+ // this on Windows
+ /* c8 ignore next 5 */
+ if (opts) {
+ opts = createServerConfig(opts)
+ opts.app = platformaticDB
+ return _restart(opts)
+ }
+ return _restart()
+ }
+
+ return handler
+}
+
+module.exports.buildServer = buildServer
+module.exports.schema = schema
+module.exports.createServerConfig = createServerConfig
+module.exports.platformaticDB = platformaticDB
diff --git a/packages/db/lib/config.js b/packages/db/lib/config.js
new file mode 100644
index 0000000000..44542dd796
--- /dev/null
+++ b/packages/db/lib/config.js
@@ -0,0 +1,63 @@
+'use strict'
+
+const ConfigManager = require('@platformatic/config')
+const { dirname, resolve } = require('path')
+const { schema } = require('./schema')
+const { computeSQLiteIgnores } = require('./utils')
+
+class DBConfigManager extends ConfigManager {
+ constructor (opts) {
+ super({
+ ...opts,
+ schema,
+ schemaOptions: {
+ useDefaults: true,
+ coerceTypes: true,
+ allErrors: true
+ },
+ envWhitelist: ['PORT', 'DATABASE_URL', ...(opts.envWhitelist || [])]
+ })
+ }
+
+ _fixRelativePath (path) {
+ return resolve(dirname(this.fullPath), path)
+ }
+
+ _transformConfig () {
+ const dirOfConfig = dirname(this.fullPath)
+ if (this.current.core && this.current.core.connectionString.indexOf('sqlite') === 0) {
+ const originalSqlitePath = this.current.core.connectionString.replace('sqlite://', '')
+ const sqliteFullPath = resolve(dirOfConfig, originalSqlitePath)
+ const ignores = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ this.current.core.connectionString = 'sqlite://' + sqliteFullPath
+ if (!this.watchIgnore.includes(ignores[0])) { // if restarted, this array may include already the file
+ this.watchIgnore.push(...ignores)
+ }
+ }
+
+ // Ignore the ESM jumpfile created by fastify-isolate
+ this.watchIgnore.push('.esm*')
+
+ // relative-to-absolute migrations path
+ if (this.current.migrations) {
+ this.current.migrations.dir = this._fixRelativePath(this.current.migrations.dir)
+ this.current.migrations.table = this.current.migrations.table || 'versions'
+ }
+
+ // relative-to-absolute plugin path
+ if (this.current.plugin) {
+ this.current.plugin.path = this._fixRelativePath(this.current.plugin.path)
+ }
+
+ if (this.current.migrations && this.current.core) {
+ // TODO remove the ignores
+ /* c8 ignore next 4 */
+ this.current.core.ignore = this.current.core.ignore || {}
+ this.current.core.ignore = Object.assign({}, {
+ [this.current.migrations.table || 'versions']: true
+ }, this.current.core.ignore)
+ }
+ }
+}
+
+module.exports = DBConfigManager
diff --git a/packages/db/lib/errors.mjs b/packages/db/lib/errors.mjs
new file mode 100644
index 0000000000..e12209e33f
--- /dev/null
+++ b/packages/db/lib/errors.mjs
@@ -0,0 +1,18 @@
+class MigrateError extends Error {
+ constructor (message) {
+ super(message)
+ this.name = 'MigrateError'
+ }
+}
+
+class SeedError extends Error {
+ constructor (message) {
+ super(message)
+ this.name = 'SeedError'
+ }
+}
+
+export {
+ MigrateError,
+ SeedError
+}
diff --git a/packages/db/lib/gen-schema.mjs b/packages/db/lib/gen-schema.mjs
new file mode 100644
index 0000000000..86d4f0bcea
--- /dev/null
+++ b/packages/db/lib/gen-schema.mjs
@@ -0,0 +1,54 @@
+import pino from 'pino'
+import pretty from 'pino-pretty'
+import Fastify from 'fastify'
+import graphql from 'graphql'
+import loadConfig from './load-config.mjs'
+import { createServerConfig, platformaticDB } from '../index.js'
+
+async function buildServer (_args, onServer) {
+ const logger = pino(pretty({
+ translateTime: 'SYS:HH:MM:ss',
+ ignore: 'hostname,pid',
+ minimumLevel: 'error'
+ }))
+
+ try {
+ const { configManager } = await loadConfig({}, _args)
+
+ await configManager.parseAndValidate()
+ const config = configManager.current
+ config.logger = logger
+
+ const serverConfig = createServerConfig(config)
+ serverConfig.originalConfig = config
+
+ const app = Fastify(serverConfig)
+ app.register(platformaticDB, serverConfig)
+
+ await app.ready()
+
+ await onServer(app)
+ /* c8 ignore next 4 */
+ } catch (err) {
+ logger.error(err)
+ process.exit(1)
+ }
+}
+
+function printGraphQLSchema (_args) {
+ buildServer(_args, async function (app) {
+ const schema = graphql.printSchema(app.graphql.schema)
+ console.log(schema)
+ await app.close()
+ })
+}
+
+function printOpenAPISchema (_args) {
+ buildServer(_args, async function (app) {
+ const schema = app.swagger()
+ console.log(JSON.stringify(schema, null, 2))
+ await app.close()
+ })
+}
+
+export { printGraphQLSchema, printOpenAPISchema }
diff --git a/packages/db/lib/gen-types.mjs b/packages/db/lib/gen-types.mjs
new file mode 100644
index 0000000000..6413fdbcf3
--- /dev/null
+++ b/packages/db/lib/gen-types.mjs
@@ -0,0 +1,162 @@
+import { resolve, join, dirname } from 'path'
+import { createRequire } from 'module'
+import { access, mkdir, writeFile, readFile, readdir, unlink } from 'fs/promises'
+import { join as desmJoin } from 'desm'
+import pino from 'pino'
+import pretty from 'pino-pretty'
+import dtsgenerator, { parseSchema } from 'dtsgenerator'
+import { mapSQLEntityToJSONSchema } from '@platformatic/sql-json-schema-mapper'
+import { setupDB } from './utils.js'
+import loadConfig from './load-config.mjs'
+
+const TYPES_FOLDER_PATH = resolve(process.cwd(), 'types')
+
+const GLOBAL_TYPES_TEMPLATE = `\
+import { Entity } from '@platformatic/sql-mapper';
+ENTITIES_IMPORTS_PLACEHOLDER
+
+declare module '@platformatic/sql-mapper' {
+ interface Entities {
+ ENTITIES_DEFINITION_PLACEHOLDER
+ }
+}
+`
+
+async function isFileAccessible (filename) {
+ try {
+ await access(filename)
+ return true
+ } catch (err) {
+ return false
+ }
+}
+
+async function removeAllFilesFromDir (dir) {
+ const files = await readdir(dir)
+ await Promise.all(files.map((file) => unlink(join(dir, file))))
+}
+
+async function generateEntityType (entity) {
+ const jsonSchema = mapSQLEntityToJSONSchema(entity)
+ jsonSchema.id = jsonSchema.$id
+
+ const tsCode = await dtsgenerator.default({ contents: [parseSchema(jsonSchema)] })
+ return tsCode + `\nexport { ${entity.name} };\n`
+}
+
+async function generateGlobalTypes (entities, config) {
+ const globalTypesImports = []
+ const globalTypesInterface = []
+
+ if (config.core.graphiql) {
+ globalTypesImports.push('import graphqlPlugin from \'@platformatic/sql-graphql\';')
+ }
+
+ for (const [key, entity] of Object.entries(entities)) {
+ globalTypesImports.push(`import { ${entity.name} } from './types/${entity.name}'`)
+ globalTypesInterface.push(`${key}: Entity<${entity.name}>,`)
+ }
+
+ return GLOBAL_TYPES_TEMPLATE
+ .replace('ENTITIES_IMPORTS_PLACEHOLDER', globalTypesImports.join('\n'))
+ .replace('ENTITIES_DEFINITION_PLACEHOLDER', globalTypesInterface.join('\n '))
+}
+
+async function getDependencyVersion (dependencyName) {
+ const require = createRequire(import.meta.url)
+ const pathToPackageJson = join(dirname(require.resolve(dependencyName)), 'package.json')
+ const packageJsonFile = await readFile(pathToPackageJson, 'utf-8')
+ const packageJson = JSON.parse(packageJsonFile)
+ return packageJson.version
+}
+
+async function getPlatformaticPackageVersion (packageFolderName) {
+ const pathToPackageJson = desmJoin(import.meta.url, '..', '..', packageFolderName, 'package.json')
+ const packageJsonFile = await readFile(pathToPackageJson, 'utf-8')
+ const packageJson = JSON.parse(packageJsonFile)
+ return packageJson.version
+}
+
+function hasDependency (packageJson, dependencyName) {
+ const dependencies = packageJson.dependencies || {}
+ const devDependencies = packageJson.devDependencies || {}
+
+ return dependencies[dependencyName] !== undefined ||
+ devDependencies[dependencyName] !== undefined
+}
+
+async function checkForDependencies (logger, args, config) {
+ const requiredDependencies = {}
+ requiredDependencies.fastify = await getDependencyVersion('fastify')
+ requiredDependencies['@platformatic/sql-mapper'] = await getPlatformaticPackageVersion('sql-mapper')
+
+ if (config.core.graphiql) {
+ requiredDependencies['@platformatic/sql-graphql'] = await getPlatformaticPackageVersion('sql-graphql')
+ }
+
+ const packageJsonPath = resolve(process.cwd(), 'package.json')
+ const isPackageJsonExists = await isFileAccessible(packageJsonPath)
+
+ if (isPackageJsonExists) {
+ const packageJsonFile = await readFile(packageJsonPath, 'utf-8')
+ const packageJson = JSON.parse(packageJsonFile)
+
+ let allRequiredDependenciesInstalled = true
+ for (const dependencyName in requiredDependencies) {
+ if (!hasDependency(packageJson, dependencyName)) {
+ allRequiredDependenciesInstalled = false
+ break
+ }
+ }
+
+ if (allRequiredDependenciesInstalled) return
+ }
+
+ let command = 'npm i --save-dev'
+ for (const [depName, depVersion] of Object.entries(requiredDependencies)) {
+ command += ` ${depName}@${depVersion}`
+ }
+ logger.warn(`Please run \`${command}\` to install types dependencies.`)
+}
+
+async function execute (logger, args, config) {
+ const { db, entities } = await setupDB(logger, config.core)
+
+ const isTypeFolderExists = await isFileAccessible(TYPES_FOLDER_PATH)
+ if (isTypeFolderExists) {
+ await removeAllFilesFromDir(TYPES_FOLDER_PATH)
+ } else {
+ await mkdir(TYPES_FOLDER_PATH)
+ }
+
+ for (const entity of Object.values(entities)) {
+ logger.info(`Generating types for ${entity.name}`)
+
+ const types = await generateEntityType(entity)
+
+ const pathToFile = join(TYPES_FOLDER_PATH, entity.name + '.d.ts')
+ await writeFile(pathToFile, types)
+ }
+
+ const globalTypes = await generateGlobalTypes(entities, config)
+ await writeFile(join(TYPES_FOLDER_PATH, '..', 'global.d.ts'), globalTypes)
+
+ await db.dispose()
+}
+
+async function generateTypes (_args) {
+ const logger = pino(pretty({
+ translateTime: 'SYS:HH:MM:ss',
+ ignore: 'hostname,pid'
+ }))
+
+ const { configManager, args } = await loadConfig({}, _args)
+
+ await configManager.parseAndValidate()
+ const config = configManager.current
+
+ await execute(logger, args, config)
+ await checkForDependencies(logger, args, config)
+}
+
+export { execute, generateTypes, checkForDependencies }
diff --git a/packages/db/lib/helper.js b/packages/db/lib/helper.js
new file mode 100644
index 0000000000..4c13f3b8e6
--- /dev/null
+++ b/packages/db/lib/helper.js
@@ -0,0 +1,13 @@
+'use strict'
+
+function isKeyEnabledInConfig (key, config) {
+ if (typeof config[key] === 'boolean') {
+ return config[key]
+ }
+ if (config[key] === undefined) {
+ return false
+ }
+ return true
+}
+
+module.exports.isKeyEnabledInConfig = isKeyEnabledInConfig
diff --git a/packages/db/lib/init.mjs b/packages/db/lib/init.mjs
new file mode 100644
index 0000000000..dca4ebe076
--- /dev/null
+++ b/packages/db/lib/init.mjs
@@ -0,0 +1,125 @@
+import { access, writeFile, mkdir } from 'fs/promises'
+import { join } from 'path'
+import pino from 'pino'
+import pretty from 'pino-pretty'
+import parseArgs from 'minimist'
+import { checkForDependencies } from './gen-types.mjs'
+import loadConfig from './load-config.mjs'
+
+const connectionStrings = {
+ postgres: 'postgres://postgres:postgres@localhost:5432/postgres',
+ sqlite: 'sqlite://./db.sqlite',
+ mysql: 'mysql://root@localhost:3306/graph',
+ mysql8: 'mysql://root@localhost:3308/graph',
+ mariadb: 'mysql://root@localhost:3307/graph'
+}
+
+const moviesMigration = `
+-- Add SQL in this file to create the database tables for your API
+CREATE TABLE IF NOT EXISTS movies (
+ id INTEGER PRIMARY KEY,
+ title TEXT NOT NULL
+);
+`
+
+function generateConfig (hostname, port, database, migrations, types) {
+ const logger = { level: 'info' }
+ const connectionString = connectionStrings[database]
+
+ const config = {
+ server: { logger, hostname, port },
+ core: { connectionString, graphiql: true },
+ migrations: { dir: migrations }
+ }
+
+ if (types === true) {
+ config.types = {
+ autogenerate: true
+ }
+ }
+
+ return config
+}
+
+async function isFileAccessible (filename) {
+ try {
+ await access(filename)
+ return true
+ } catch (err) {
+ return false
+ }
+}
+
+async function init (_args) {
+ const logger = pino(pretty({
+ translateTime: 'SYS:HH:MM:ss',
+ ignore: 'hostname,pid'
+ }))
+
+ const args = parseArgs(_args, {
+ default: {
+ hostname: '127.0.0.1',
+ port: 3042,
+ database: 'sqlite',
+ migrations: './migrations',
+ types: true
+ },
+ alias: {
+ h: 'hostname',
+ p: 'port',
+ db: 'database',
+ m: 'migrations',
+ t: 'types'
+ }
+ })
+
+ const { hostname, port, database, migrations, types } = args
+
+ const configFileNames = [
+ 'platformatic.db.json',
+ 'platformatic.db.json5',
+ 'platformatic.db.yaml',
+ 'platformatic.db.yml',
+ 'platformatic.db.toml'
+ ]
+
+ const configFilesAccessibility = await Promise.all(configFileNames.map(isFileAccessible))
+ const accessibleConfigFilename = configFileNames.find((value, index) => configFilesAccessibility[index])
+
+ if (accessibleConfigFilename === undefined) {
+ const config = generateConfig(hostname, port, database, migrations, types)
+ await writeFile('platformatic.db.json', JSON.stringify(config, null, 2))
+ logger.info('Configuration file platformatic.db.json successfully created.')
+ } else {
+ logger.info(`Configuration file ${accessibleConfigFilename} found, skipping creation of configuration file.`)
+ }
+
+ const { configManager } = await loadConfig({}, _args)
+ await configManager.parseAndValidate()
+ const config = configManager.current
+
+ const migrationsFolderName = migrations
+ const isMigrationFolderExists = await isFileAccessible(migrationsFolderName)
+ if (!isMigrationFolderExists) {
+ await mkdir(migrationsFolderName)
+ logger.info(`Migrations folder ${migrationsFolderName} successfully created.`)
+ } else {
+ logger.info(`Migrations folder ${migrationsFolderName} found, skipping creation of migrations folder.`)
+ }
+
+ const migrationFileName = '001.do.sql'
+ const migrationFilePath = join(migrationsFolderName, migrationFileName)
+ const isMigrationFileExists = await isFileAccessible(migrationFilePath)
+ if (!isMigrationFileExists) {
+ await writeFile(migrationFilePath, moviesMigration)
+ logger.info(`Migration file ${migrationFileName} successfully created.`)
+ } else {
+ logger.info(`Migration file ${migrationFileName} found, skipping creation of migration file.`)
+ }
+
+ if (types === true) {
+ await checkForDependencies(logger, args, config)
+ }
+}
+
+export { init }
diff --git a/packages/db/lib/load-config.mjs b/packages/db/lib/load-config.mjs
new file mode 100644
index 0000000000..b731eaad84
--- /dev/null
+++ b/packages/db/lib/load-config.mjs
@@ -0,0 +1,58 @@
+import parseArgs from 'minimist'
+import { access } from 'fs/promises'
+import { resolve } from 'path'
+import ConfigManager from './config.js'
+import deepmerge from '@fastify/deepmerge'
+
+async function loadConfig (minimistConfig, _args, configOpts = {}) {
+ const args = parseArgs(_args, deepmerge({ all: true })({
+ string: ['allow-env'],
+ default: {
+ config: resolve(process.cwd(), 'platformatic.db.json'),
+ allowEnv: '' // The default is set in ConfigManager
+ },
+ alias: {
+ v: 'version',
+ c: 'config',
+ allowEnv: ['allow-env', 'E']
+ }
+ }, minimistConfig))
+ try {
+ await access(args.config)
+ } catch (err) {
+ console.error('Missing config file')
+ process.exit(1)
+ }
+ let watchIgnore = []
+ // Apparently C8 cannot detect these three lines on Windows
+ /* c8 ignore next 3 */
+ if (args['watch-ignore']) {
+ watchIgnore = args['watch-ignore'].split(',')
+ }
+ const configManager = new ConfigManager({
+ source: args.config,
+ envWhitelist: [...args.allowEnv.split(',')],
+ watchIgnore,
+ ...configOpts
+ })
+
+ const parsingResult = await configManager.parse()
+ if (!parsingResult) {
+ printConfigValidationErrors(configManager)
+ process.exit(1)
+ }
+
+ return { configManager, args }
+}
+
+function printConfigValidationErrors (configManager) {
+ const tabularData = configManager.validationErrors.map((err) => {
+ return {
+ path: err.path,
+ message: err.message
+ }
+ })
+ console.table(tabularData, ['path', 'message'])
+}
+
+export default loadConfig
diff --git a/packages/db/lib/metrics-plugin.js b/packages/db/lib/metrics-plugin.js
new file mode 100644
index 0000000000..a5a1daaf94
--- /dev/null
+++ b/packages/db/lib/metrics-plugin.js
@@ -0,0 +1,88 @@
+'use strict'
+
+const fp = require('fastify-plugin')
+const metricsPlugin = require('fastify-metrics')
+const basicAuth = require('@fastify/basic-auth')
+const Fastify = require('fastify')
+const http = require('http')
+
+// This is a global server to match global
+// prometheus. It's an antipattern, so do
+// not use it elsewhere.
+let server = null
+let handler = null
+
+module.exports = fp(async function (app, opts) {
+ let port = 9090
+ let host = '0.0.0.0'
+ if (typeof opts === 'object') {
+ if (undefined !== opts.port) {
+ port = opts.port
+ }
+ /* c8 ignore next 3 */
+ if (undefined !== opts.hostname) {
+ host = opts.hostname
+ }
+ }
+ app.register(metricsPlugin, {
+ defaultMetrics: { enabled: true },
+ endpoint: null,
+ name: 'metrics',
+ routeMetrics: { enabled: true },
+ clearRegisterOnInit: true
+ })
+
+ if (server && server.address().port !== port) {
+ server.close()
+ server = null
+ handler = null
+ }
+
+ if (!server) {
+ server = http.createServer()
+ server.listen(port, host)
+ server.unref()
+ }
+
+ const promServer = Fastify({
+ name: 'Prometheus server',
+ serverFactory: (_handler) => {
+ if (handler) {
+ server.off('request', handler)
+ }
+ server.on('request', _handler)
+ handler = _handler
+ return server
+ },
+ logger: app.log.child({ name: 'prometheus' })
+ })
+
+ const metricsEndpointOptions = {
+ url: '/metrics',
+ method: 'GET',
+ logLevel: 'info',
+ handler: async (_, reply) => {
+ reply.type('text/plain')
+ return await app.metrics.client.register.metrics()
+ }
+ }
+ if (opts.auth) {
+ const { username, password } = opts.auth
+ await promServer.register(basicAuth, {
+ validate: function (user, pass, req, reply, done) {
+ if (username !== user || password !== pass) {
+ return reply.code(401).send({ message: 'Unauthorized' })
+ }
+ return done()
+ }
+ })
+ metricsEndpointOptions.onRequest = promServer.basicAuth
+ }
+ promServer.route(metricsEndpointOptions)
+
+ app.addHook('onClose', async (instance) => {
+ await promServer.close()
+ })
+
+ await promServer.ready()
+})
diff --git a/packages/db/lib/migrate.mjs b/packages/db/lib/migrate.mjs
new file mode 100755
index 0000000000..4926116421
--- /dev/null
+++ b/packages/db/lib/migrate.mjs
@@ -0,0 +1,48 @@
+#! /usr/bin/env node
+
+import { execute } from './migrator.mjs'
+import isMain from 'es-main'
+import pino from 'pino'
+import pretty from 'pino-pretty'
+import { MigrateError } from './errors.mjs'
+import loadConfig from './load-config.mjs'
+import { execute as generateTypes, checkForDependencies } from './gen-types.mjs'
+
+async function migrate (_args) {
+ const logger = pino(pretty({
+ translateTime: 'SYS:HH:MM:ss',
+ ignore: 'hostname,pid'
+ }))
+
+ try {
+ const { configManager, args } = await loadConfig({
+ string: ['to'],
+ alias: {
+ t: 'to'
+ }
+ }, _args)
+
+ await configManager.parseAndValidate()
+ const config = configManager.current
+
+ await execute(logger, args, config)
+
+ if (config.types && config.types.autogenerate) {
+ await generateTypes(logger, args, config)
+ await checkForDependencies(logger, args, config)
+ }
+ } catch (err) {
+ if (err instanceof MigrateError) {
+ logger.error(err.message)
+ process.exit(1)
+ }
+ /* c8 ignore next 2 */
+ throw err
+ }
+}
+
+export { migrate, execute }
+
+if (isMain(import.meta)) {
+ await migrate(process.argv.splice(2))
+}
diff --git a/packages/db/lib/migrator.mjs b/packages/db/lib/migrator.mjs
new file mode 100644
index 0000000000..9867774706
--- /dev/null
+++ b/packages/db/lib/migrator.mjs
@@ -0,0 +1,66 @@
+import { join, basename } from 'path'
+import Postgrator from 'postgrator'
+import { MigrateError } from './errors.mjs'
+import { setupDB } from './utils.js'
+import { stat } from 'fs/promises'
+async function execute (logger, args, config) {
+ const migrationsConfig = config.migrations
+ if (!migrationsConfig || !migrationsConfig.dir) {
+ throw new MigrateError('Missing migrations in config file')
+ }
+ // Check migrations directory exists
+ await checkMigrationsDirectoryExists(migrationsConfig.dir)
+
+ const { db, sql, driver } = await setupDB(logger, config.core)
+
+ const database = driver !== 'sqlite3' ? new URL(config.core.connectionString).pathname.replace(/^\//, '') : ''
+ try {
+ const migrationsFolder = join(migrationsConfig.dir, '*')
+ logger.debug(`Migrating from ${migrationsFolder}`)
+ // Create postgrator instance
+ const postgrator = new Postgrator({
+ migrationPattern: migrationsFolder,
+ driver,
+ database,
+ schemaTable: migrationsConfig.table,
+ execQuery: async (query) => {
+ const res = await db.query(sql`${sql.__dangerous__rawValue(query)}`)
+ return {
+ rows: res
+ }
+ },
+ validateChecksums: migrationsConfig.validateChecksums === true
+ })
+ if (migrationsConfig.validateChecksums === true) {
+ postgrator.on(
+ 'validation-started',
+ (migration) => logger.info(`verifying checksum of migration ${basename(migration.filename)}`)
+ )
+ }
+ postgrator.on(
+ 'migration-started',
+ (migration) => logger.info(`running ${basename(migration.filename)}`)
+ )
+ postgrator.on(
+ 'migration-finished',
+ (migration) => logger.debug(`completed ${basename(migration.filename)}`)
+ )
+
+ await postgrator.migrate(args.to)
+ } catch (error) {
+ logger.error(error)
+ }
+
+ // Once done migrating, close your connection.
+ await db.dispose()
+}
+async function checkMigrationsDirectoryExists (dirName) {
+ try {
+ await stat(dirName)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ throw new MigrateError(`Migrations directory ${dirName} does not exist.`)
+ }
+ }
+}
+export { execute }
diff --git a/packages/db/lib/root-endpoint/index.js b/packages/db/lib/root-endpoint/index.js
new file mode 100644
index 0000000000..255863c19e
--- /dev/null
+++ b/packages/db/lib/root-endpoint/index.js
@@ -0,0 +1,21 @@
+'use strict'
+const path = require('path')
+const fastifyStatic = require('@fastify/static')
+const userAgentParser = require('ua-parser-js')
+
+module.exports = async (app, opts) => {
+ app.register(fastifyStatic, {
+ root: path.join(__dirname, 'public')
+ })
+ // root endpoint
+ app.get('/', (req, reply) => {
+ const uaString = req.headers['user-agent']
+ if (uaString) {
+ const parsed = userAgentParser(uaString)
+ if (parsed.browser.name !== undefined) {
+ return reply.sendFile('./index.html')
+ }
+ }
+ return { message: 'Welcome to Platformatic! Please visit https://oss.platformatic.dev' }
+ })
+}
diff --git a/packages/db/lib/root-endpoint/public/index.html b/packages/db/lib/root-endpoint/public/index.html
new file mode 100644
index 0000000000..d9bd293646
--- /dev/null
+++ b/packages/db/lib/root-endpoint/public/index.html
@@ -0,0 +1,99 @@
+
+
+
+
+
+
+ Platformatic DB
+
+
+
+
+
+
Welcome to Platformatic DB
+
+
+
+
diff --git a/packages/db/lib/root-endpoint/public/logo-512x512.png b/packages/db/lib/root-endpoint/public/logo-512x512.png
new file mode 100644
index 0000000000..df0960e702
Binary files /dev/null and b/packages/db/lib/root-endpoint/public/logo-512x512.png differ
diff --git a/packages/db/lib/schema.js b/packages/db/lib/schema.js
new file mode 100644
index 0000000000..f8b387ed7a
--- /dev/null
+++ b/packages/db/lib/schema.js
@@ -0,0 +1,379 @@
+'use strict'
+
+const cors = {
+ type: 'object',
+ $comment: 'See https://github.com/fastify/fastify-cors',
+ properties: {
+ origin: {
+ anyOf: [
+ { type: 'boolean' },
+ { type: 'string' },
+ {
+ type: 'array',
+ items: {
+ type: 'string'
+ }
+ }
+ ]
+ },
+ methods: {
+ type: 'array',
+ items: {
+ type: 'string'
+ }
+ },
+ allowedHeaders: {
+ type: 'string',
+ description: 'Comma separated string of allowed headers.'
+ },
+ exposedHeaders: {
+ anyOf: [
+ {
+ type: 'array',
+ items: {
+ type: 'string'
+ }
+ },
+ {
+ type: 'string',
+ description: 'Comma separated string of exposed headers.'
+ }
+ ]
+ },
+ credentials: {
+ type: 'boolean'
+ },
+ maxAge: {
+ type: 'integer'
+ },
+ preflightContinue: {
+ type: 'boolean',
+ default: false
+ },
+ optionsSuccessStatus: {
+ type: 'integer',
+ default: 204
+ },
+ preflight: {
+ type: 'boolean',
+ default: true
+ },
+ strictPreflight: {
+ type: 'boolean',
+ default: true
+ },
+ hideOptionsRoute: {
+ type: 'boolean',
+ default: true
+ }
+ }
+
+}
+const server = {
+ $id: 'https://schemas.platformatic.dev/db/server',
+ type: 'object',
+ properties: {
+ // TODO add support for level
+ hostname: {
+ type: 'string'
+ },
+ port: {
+ anyOf: [
+ { type: 'string' },
+ { type: 'integer' }
+ ]
+ },
+ healthCheck: {
+ anyOf: [
+ { type: 'boolean' },
+ {
+ type: 'object',
+ properties: {
+ interval: { type: 'integer' }
+ }
+ }
+ ]
+ },
+ cors
+ },
+ required: ['hostname', 'port']
+}
+
+const core = {
+ $id: 'https://schemas.platformatic.dev/db/core',
+ type: 'object',
+ properties: {
+ connectionString: {
+ type: 'string'
+ },
+ graphql: {
+ anyOf: [{
+ type: 'boolean'
+ }, {
+ type: 'object',
+ properties: {
+ graphiql: {
+ type: 'boolean'
+ }
+ }
+ }]
+ },
+ openapi: {
+ anyOf: [{
+ type: 'boolean'
+ }, {
+ type: 'object',
+ properties: {
+ info: {
+ type: 'object',
+ properties: {
+ title: { type: 'string' },
+ description: { type: 'string' },
+ version: { type: 'string' }
+ }
+ }
+ }
+ }]
+ },
+ ignore: {
+ type: 'object',
+ // TODO add support for column-level ignore
+ properties: {
+ key: {
+ type: 'string',
+ description: 'Non-entity table name.'
+ },
+ value: {
+ type: 'boolean'
+ }
+ }
+ }
+ },
+ required: ['connectionString']
+}
+
+const authorization = {
+ $id: 'https://schemas.platformatic.dev/db/authorization',
+ type: 'object',
+ properties: {
+ adminSecret: {
+ type: 'string',
+ description: 'The password should be used to login dashboard and to access routes under /_admin prefix and for admin access to REST and GraphQL endpoints with X-PLATFORMATIC-ADMIN-SECRET header.'
+ },
+ roleKey: {
+ type: 'string',
+ description: 'The user metadata key to store user roles',
+ default: 'X-PLATFORMATIC-ROLE'
+ },
+ anonymousRole: {
+ type: 'string',
+ description: 'The role name for anonymous users',
+ default: 'anonymous'
+ },
+ jwt: {
+ type: 'object',
+ properties: {
+ secret: {
+ type: 'string',
+ description: 'the shared secret for JWT'
+ },
+ jwks: {
+ oneOf: [{
+ type: 'boolean'
+ }, {
+ // shall we replicate here all the options in https://github.com/nearform/get-jwks#options
+ type: 'object',
+ additionalProperties: true
+ }]
+ }
+ }
+ },
+ webhook: {
+ type: 'object',
+ properties: {
+ url: {
+ type: 'string',
+ description: 'the webhook url'
+ }
+ }
+ },
+ rules: {
+ type: 'array',
+ items: {
+ type: 'object',
+ properties: {
+ role: {
+ type: 'string',
+ description: 'the role name to match the rule'
+ },
+ entity: {
+ type: 'string',
+ description: 'the DB entity type to which the rule applies'
+ },
+ defaults: {
+ type: 'object',
+ description: 'defaults for entity creation',
+ patternProperties: {
+ '.*': {
+ type: 'string'
+ }
+ }
+ },
+ find: {
+ $ref: '#crud-operation-auth'
+ },
+ save: {
+ $ref: '#crud-operation-auth'
+ },
+ delete: {
+ $ref: '#crud-operation-auth'
+ }
+ },
+ required: ['role', 'entity'],
+ additionalProperties: false
+ }
+ }
+ },
+ additionalProperties: false,
+ $defs: {
+ crudOperationAuth: {
+ $id: '#crud-operation-auth',
+ oneOf: [{
+ type: 'object',
+ description: 'CRUD operation authorization config',
+ properties: {
+ checks: {
+ description: 'checks for the operation',
+ type: 'object',
+ patternProperties: {
+ '.*': {
+ if: {
+ type: 'object'
+ },
+ then: {
+ type: 'object',
+ properties: {
+ eq: { type: 'string' },
+ in: { type: 'string' },
+ nin: { type: 'string' },
+ nen: { type: 'string' },
+ gt: { type: 'string' },
+ gte: { type: 'string' },
+ lt: { type: 'string' },
+ lte: { type: 'string' }
+ },
+ additionalProperties: false
+ },
+ else: {
+ type: 'string'
+ }
+ }
+ }
+ },
+ fields: {
+ type: 'array',
+ description: 'array of enabled field for the operation',
+ items: {
+ type: 'string'
+ }
+ }
+ },
+ additionalProperties: false
+ }, {
+ type: 'boolean',
+ description: 'true if enabled (with not authorization constraints enabled)'
+ }]
+
+ }
+ }
+}
+
+const dashboard = {
+ $id: 'https://schemas.platformatic.dev/db/dashboard',
+ type: 'object',
+ properties: {
+ rootPath: {
+ type: 'boolean',
+ description: 'Whether the dashboard should be served on / path or not.',
+ default: false
+ }
+ }
+}
+
+const migrations = {
+ $id: 'https://schemas.platformatic.dev/db/migrations',
+ type: 'object',
+ properties: {
+ dir: {
+ type: 'string',
+ description: 'The path to the directory containing the migrations.'
+ },
+ autoApply: {
+ type: 'boolean',
+ description: 'Whether to automatically apply migrations when running the migrate command.'
+ }
+ },
+ required: ['dir']
+}
+
+const metrics = {
+ $id: 'https://schemas.platformatic.dev/db/metrics',
+ anyOf: [
+ { type: 'boolean' },
+ {
+ type: 'object',
+ properties: {
+ port: { type: 'integer' },
+ hostname: { type: 'string' },
+ auth: {
+ type: 'object',
+ properties: {
+ username: { type: 'string' },
+ password: { type: 'string' }
+ },
+ required: ['username', 'password']
+ }
+ }
+ }
+ ]
+}
+
+const types = {
+ $id: 'https://schemas.platformatic.dev/db/types',
+ type: 'object',
+ properties: {
+ autogenerate: {
+ type: 'boolean'
+ }
+ }
+}
+
+const platformaticDBschema = {
+ $id: 'https://schemas.platformatic.dev/db',
+ type: 'object',
+ additionalProperties: false,
+ properties: {
+ server,
+ core,
+ dashboard,
+ authorization,
+ migrations,
+ metrics,
+ types,
+ plugin: {
+ type: 'object',
+ properties: {
+ path: {
+ type: 'string'
+ },
+ stopTimeout: {
+ type: 'integer'
+ }
+ },
+ required: ['path']
+ }
+ },
+ required: ['core', 'server']
+}
+
+module.exports.schema = platformaticDBschema
diff --git a/packages/db/lib/seed.mjs b/packages/db/lib/seed.mjs
new file mode 100644
index 0000000000..d4c8e2ddce
--- /dev/null
+++ b/packages/db/lib/seed.mjs
@@ -0,0 +1,61 @@
+import { resolve } from 'path'
+import pino from 'pino'
+import pretty from 'pino-pretty'
+import { access } from 'fs/promises'
+import { setupDB } from './utils.js'
+import { SeedError } from './errors.mjs'
+import { execute as migrator } from './migrator.mjs'
+import { pathToFileURL } from 'url'
+import loadConfig from './load-config.mjs'
+async function execute (logger, args, config) {
+ const { db, sql, entities } = await setupDB(logger, config.core)
+
+ const seedFile = args._[0]
+
+ if (!seedFile) {
+ throw new SeedError('Missing seed file')
+ }
+
+ await access(seedFile)
+
+ logger.info(`seeding from ${seedFile}`)
+ const { default: seed } = await import(pathToFileURL(seedFile))
+
+ await seed({ db, sql, entities })
+ logger.info('seeding complete')
+
+ // Once done seeding, close your connection.
+ await db.dispose()
+}
+
+async function seed (_args) {
+ const logger = pino(pretty({
+ translateTime: 'SYS:HH:MM:ss',
+ ignore: 'hostname,pid'
+ }))
+
+ try {
+ const { configManager, args } = await loadConfig({
+ default: {
+ config: resolve(process.cwd(), 'platformatic.db.json')
+ },
+ alias: {
+ c: 'config'
+ }
+ }, _args)
+ await configManager.parseAndValidate()
+ const config = configManager.current
+
+ await migrator(logger, args, config)
+ await execute(logger, args, config)
+ } catch (err) {
+ if (err instanceof SeedError) {
+ logger.error(err.message)
+ process.exit(1)
+ }
+ /* c8 ignore next 2 */
+ throw err
+ }
+}
+
+export { seed, execute }
diff --git a/packages/db/lib/start.mjs b/packages/db/lib/start.mjs
new file mode 100644
index 0000000000..ef2b62842d
--- /dev/null
+++ b/packages/db/lib/start.mjs
@@ -0,0 +1,108 @@
+import { buildServer } from '../index.js'
+import close from 'close-with-grace'
+import loadConfig from './load-config.mjs'
+
+// TODO make sure coverage is reported for Windows
+// Currently C8 is not reporting it
+/* c8 ignore start */
+async function start (_args) {
+ const { configManager } = await loadConfig({
+ string: ['to']
+ }, _args, { watch: true })
+
+ // Set the logger if not present
+ let logger = configManager.current.server.logger
+ if (!logger) {
+ configManager.current.server.logger = { level: 'info' }
+ logger = configManager.current.server.logger
+ }
+
+ // If TTY use pino-pretty
+ if (process.stdout.isTTY) {
+ if (!logger.transport) {
+ logger.transport = {
+ target: 'pino-pretty'
+ }
+ }
+ }
+
+ // Set the location of the config
+ const server = await buildServer({
+ ...configManager.current,
+ configManager
+
+ })
+ configManager.on('update', (newConfig) => onConfigUpdated(newConfig, server))
+ server.app.platformatic.configManager = configManager
+ server.app.platformatic.config = configManager.current
+
+ await server.listen()
+
+ configManager.on('error', function (err) {
+ server.app.log.error({
+ err
+ }, 'error reloading the configuration')
+ })
+
+ // Ignore from CI because SIGUSR2 is not available
+ // on Windows
+ process.on('SIGUSR2', function () {
+ server.app.log.info('reloading configuration')
+ server.restart()
+ .then(() => {
+ server.app.log.info('restarted')
+ })
+ .catch((err) => {
+ server.app.log.error({
+ err: {
+ message: err.message,
+ stack: err.stack
+ }
+ }, 'failed to restart')
+ })
+ return false
+ })
+
+ close(async ({ signal, err }) => {
+ // Windows does not support trapping signals
+ if (err) {
+ server.app.log.error({
+ err: {
+ message: err.message,
+ stack: err.stack
+ }
+ }, 'exiting')
+ } else if (signal) {
+ server.app.log.info({ signal }, 'received signal')
+ }
+
+ await server.stop()
+ })
+}
+
+async function onConfigUpdated (newConfig, server) {
+ try {
+ server.app.platformatic.config = newConfig
+ server.app.log.info('config changed')
+ server.app.log.trace({ newConfig }, 'new config')
+ await server.restart(newConfig)
+ } catch (err) {
+ // TODO: test this
+ server.app.log.error({
+ err: {
+ message: err.message,
+ stack: err.stack
+ }
+ }, 'failed to reload config')
+ }
+}
+
+export default function (args) {
+ start(args).catch(exit)
+}
+
+function exit (err) {
+ console.error(err)
+ process.exit(1)
+}
+/* c8 ignore stop */
diff --git a/packages/db/lib/utils.js b/packages/db/lib/utils.js
new file mode 100644
index 0000000000..a8e29e264f
--- /dev/null
+++ b/packages/db/lib/utils.js
@@ -0,0 +1,45 @@
+'use strict'
+
+const { connect } = require('@platformatic/db-core')
+const { sep } = require('path')
+async function setupDB (log, config) {
+ const { db, sql, entities } = await connect({ ...config, log })
+ let driver = ''
+
+ // TODO Add tests for multiple databases
+ /* c8 ignore next 11 */
+ if (db.isPg) {
+ driver = 'pg'
+ } else if (db.isMySql) {
+ driver = 'mysql'
+ } else if (db.isMariaDB) {
+ driver = 'mysql'
+ } else if (db.isSQLite) {
+ driver = 'sqlite3'
+ } else {
+ throw new Error('unknown database')
+ }
+ return {
+ db,
+ sql,
+ entities,
+ driver
+ }
+}
+
+function computeSQLiteIgnores (sqliteFullPath, dirOfConfig) {
+ let result = []
+ const journalFullPath = sqliteFullPath + '-journal'
+ // [windows] remove Backslash at the beginning
+ if (sqliteFullPath.indexOf(dirOfConfig) === 0) {
+ const sqliteRelativePath = sqliteFullPath.replace(dirOfConfig + sep, '')
+ const journalRelativePath = journalFullPath.replace(dirOfConfig + sep, '')
+ result = [sqliteRelativePath, journalRelativePath]
+ }
+ return result
+}
+
+module.exports = {
+ setupDB,
+ computeSQLiteIgnores
+}
diff --git a/packages/db/package.json b/packages/db/package.json
new file mode 100644
index 0000000000..59bff93a88
--- /dev/null
+++ b/packages/db/package.json
@@ -0,0 +1,68 @@
+{
+ "name": "@platformatic/db",
+ "version": "0.0.21",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && c8 --100 tap --no-coverage test/*test.js test/*/*test.mjs"
+ },
+ "author": "Matteo Collina ",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "@databases/pg": "^5.3.0",
+ "@databases/sqlite": "^4.0.0",
+ "@platformatic/sql-mapper": "workspace:*",
+ "@platformatic/sql-graphql": "workspace:*",
+ "@platformatic/sql-json-schema-mapper": "workspace:*",
+ "c8": "^7.11.0",
+ "snazzy": "^9.0.0",
+ "split2": "^4.1.0",
+ "standard": "^17.0.0",
+ "strip-ansi": "^7.0.1",
+ "tap": "^16.0.0",
+ "undici": "^5.8.0",
+ "why-is-node-running": "^2.2.2",
+ "tsd": "0.24.1"
+ },
+ "dependencies": {
+ "@fastify/basic-auth": "^4.0.0",
+ "@fastify/cors": "^8.0.0",
+ "@fastify/deepmerge": "^1.1.0",
+ "@fastify/restartable": "^1.2.1",
+ "@fastify/static": "^6.5.0",
+ "@fastify/swagger": "^7.4.1",
+ "@fastify/under-pressure": "^8.0.0",
+ "@platformatic/config": "workspace:*",
+ "@platformatic/db-authorization": "workspace:*",
+ "@platformatic/db-core": "workspace:*",
+ "@platformatic/db-dashboard": "workspace:*",
+ "close-with-grace": "^1.1.0",
+ "commist": "^3.1.2",
+ "desm": "^1.2.0",
+ "execa": "^6.1.0",
+ "env-schema": "^5.0.0",
+ "es-main": "^1.2.0",
+ "fastify": "^4.6.0",
+ "fastify-isolate": "^0.7.0",
+ "fastify-metrics": "^9.2.1",
+ "fastify-plugin": "^4.1.0",
+ "fastify-print-routes": "^2.0.0",
+ "graphql": "^16.6.0",
+ "help-me": "^4.1.0",
+ "minimatch": "^5.1.0",
+ "minimist": "^1.2.6",
+ "pino": "^8.4.1",
+ "pino-pretty": "^9.0.0",
+ "postgrator": "^7.1.0",
+ "ua-parser-js": "^1.0.2",
+ "dtsgenerator": "^3.16.1"
+ }
+}
diff --git a/packages/db/tap-snapshots/test/cli/env.test.mjs.test.cjs b/packages/db/tap-snapshots/test/cli/env.test.mjs.test.cjs
new file mode 100644
index 0000000000..1ce0d625af
--- /dev/null
+++ b/packages/db/tap-snapshots/test/cli/env.test.mjs.test.cjs
@@ -0,0 +1,71 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['test/cli/env.test.mjs TAP env white list schema > must match snapshot 1'] = `
+type Query {
+ getPageById(id: ID!): Page
+ pages(limit: Int, offset: Int, orderBy: [PageOrderByArguments], where: PageWhereArguments): [Page]
+}
+
+type Page {
+ id: ID
+ title: String
+}
+
+input PageOrderByArguments {
+ field: PageOrderByField
+ direction: OrderByDirection!
+}
+
+enum PageOrderByField {
+ id
+ title
+}
+
+enum OrderByDirection {
+ ASC
+ DESC
+}
+
+input PageWhereArguments {
+ id: PageWhereArgumentsid
+ title: PageWhereArgumentstitle
+}
+
+input PageWhereArgumentsid {
+ eq: ID
+ neq: ID
+ gt: ID
+ gte: ID
+ lt: ID
+ lte: ID
+ in: [ID]
+ nin: [ID]
+}
+
+input PageWhereArgumentstitle {
+ eq: String
+ neq: String
+ gt: String
+ gte: String
+ lt: String
+ lte: String
+ in: [String]
+ nin: [String]
+}
+
+type Mutation {
+ savePage(input: PageInput!): Page
+ insertPages(inputs: [PageInput]!): [Page]
+ deletePages(where: PageWhereArguments): [Page]
+}
+
+input PageInput {
+ id: ID
+ title: String
+}
+`
diff --git a/packages/db/tap-snapshots/test/cli/schema.test.mjs.test.cjs b/packages/db/tap-snapshots/test/cli/schema.test.mjs.test.cjs
new file mode 100644
index 0000000000..7f62b22242
--- /dev/null
+++ b/packages/db/tap-snapshots/test/cli/schema.test.mjs.test.cjs
@@ -0,0 +1,520 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['test/cli/schema.test.mjs TAP print the graphql schema to stdout > must match snapshot 1'] = `
+type Query {
+ getGraphById(id: ID!): Graph
+ graphs(limit: Int, offset: Int, orderBy: [GraphOrderByArguments], where: GraphWhereArguments): [Graph]
+}
+
+type Graph {
+ id: ID
+ name: String
+}
+
+input GraphOrderByArguments {
+ field: GraphOrderByField
+ direction: OrderByDirection!
+}
+
+enum GraphOrderByField {
+ id
+ name
+}
+
+enum OrderByDirection {
+ ASC
+ DESC
+}
+
+input GraphWhereArguments {
+ id: GraphWhereArgumentsid
+ name: GraphWhereArgumentsname
+}
+
+input GraphWhereArgumentsid {
+ eq: ID
+ neq: ID
+ gt: ID
+ gte: ID
+ lt: ID
+ lte: ID
+ in: [ID]
+ nin: [ID]
+}
+
+input GraphWhereArgumentsname {
+ eq: String
+ neq: String
+ gt: String
+ gte: String
+ lt: String
+ lte: String
+ in: [String]
+ nin: [String]
+}
+
+type Mutation {
+ saveGraph(input: GraphInput!): Graph
+ insertGraphs(inputs: [GraphInput]!): [Graph]
+ deleteGraphs(where: GraphWhereArguments): [Graph]
+}
+
+input GraphInput {
+ id: ID
+ name: String
+}
+`
+
+exports['test/cli/schema.test.mjs TAP print the openapi schema to stdout > must match snapshot 1'] = `
+{
+ "openapi": "3.0.3",
+ "info": {
+ "title": "Platformatic DB",
+ "description": "Exposing a SQL database as REST"
+ },
+ "components": {
+ "schemas": {
+ "Graph": {
+ "title": "Graph",
+ "description": "A Graph",
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer"
+ },
+ "name": {
+ "type": "string",
+ "nullable": true
+ }
+ },
+ "required": []
+ }
+ }
+ },
+ "paths": {
+ "/graphs/": {
+ "get": {
+ "operationId": "getAllGraph",
+ "parameters": [
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "limit",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "offset",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "in": "query",
+ "name": "fields",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.id.in",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.eq",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.neq",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.gt",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.gte",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.lt",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.lte",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.in",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string"
+ },
+ "in": "query",
+ "name": "where.name.nin",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string",
+ "enum": [
+ "asc",
+ "desc"
+ ]
+ },
+ "in": "query",
+ "name": "orderby.id",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "string",
+ "enum": [
+ "asc",
+ "desc"
+ ]
+ },
+ "in": "query",
+ "name": "orderby.name",
+ "required": false
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ }
+ },
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ },
+ "links": {}
+ }
+ }
+ }
+ },
+ "/graphs/{id}": {
+ "get": {
+ "operationId": "getGraphById",
+ "parameters": [
+ {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "in": "query",
+ "name": "fields",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "path",
+ "name": "id",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ },
+ "links": {}
+ }
+ }
+ },
+ "post": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ }
+ },
+ "parameters": [
+ {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "in": "query",
+ "name": "fields",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "path",
+ "name": "id",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ },
+ "links": {}
+ }
+ }
+ },
+ "put": {
+ "requestBody": {
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ }
+ },
+ "parameters": [
+ {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "in": "query",
+ "name": "fields",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "path",
+ "name": "id",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ },
+ "links": {}
+ }
+ }
+ },
+ "delete": {
+ "parameters": [
+ {
+ "schema": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "in": "query",
+ "name": "fields",
+ "required": false
+ },
+ {
+ "schema": {
+ "type": "integer"
+ },
+ "in": "path",
+ "name": "id",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "Default Response",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/Graph"
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "/": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "Default Response"
+ }
+ }
+ }
+ }
+ }
+}
+`
diff --git a/packages/db/test/admin.test.js b/packages/db/test/admin.test.js
new file mode 100644
index 0000000000..c0becd9f81
--- /dev/null
+++ b/packages/db/test/admin.test.js
@@ -0,0 +1,332 @@
+'use strict'
+
+const { buildConfig, connInfo, clear, createBasicPages } = require('./helper')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request } = require('undici')
+
+test('adminSecret', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ url: '/graphql',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'wrong'
+ },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ deletePages(where: { title: { eq: "Hello" } }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ same(await res.body.json(), {
+ data: {
+ deletePages: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'deletePages'
+ ]
+ }
+ ]
+ }, 'deletePages response')
+ }
+})
+test('login route', async ({ teardown, same, equal }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ {
+ // right password provided
+ const res = await request(`${server.url}/_admin/login`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ password: 'secret'
+ })
+ })
+ equal(res.statusCode, 200)
+ same(await res.body.json(), {
+ authorized: true
+ })
+ }
+
+ {
+ // bad password provided
+ const res = await request(`${server.url}/_admin/login`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ password: 'this-is-not-the-right-password'
+ })
+ })
+ equal(res.statusCode, 401)
+ same(await res.body.json(), {
+ authorized: false
+ })
+ }
+
+ {
+ // no password provided
+ const res = await request(`${server.url}/_admin/login`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({})
+ })
+ equal(res.statusCode, 400)
+ same(await res.body.json(), {
+ statusCode: 400,
+ error: 'Bad Request',
+ message: 'body must have required property \'password\''
+ })
+ }
+})
+
+test('Swagger documentation', async ({ teardown, same, equal }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ // JSON Documentation
+ const res = await request(`${server.url}/_admin/documentation/json`)
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+
+ equal(body.openapi, '3.0.3')
+ same(body.info, {
+ title: 'Platformatic DB Admin Routes',
+ description: 'Configure and manage your Platformatic DB instance.'
+ })
+
+ same(Object.keys(body.paths), [
+ '/_admin/config',
+ '/_admin/login',
+ '/_admin/restart',
+ '/_admin/config-file'
+ ])
+ }
+
+ {
+ // HTML Documentation
+ const res = await request(`${server.url}/_admin/documentation/static/index.html`)
+ equal(res.statusCode, 200)
+ equal(res.headers['content-type'], 'text/html; charset=UTF-8')
+ }
+})
+
+test('admin routes are not included in main openapi', async ({ teardown, same, equal }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ // JSON Documentation
+ const res = await request(`${server.url}/documentation/json`)
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+
+ equal(body.openapi, '3.0.3')
+
+ same(Object.keys(body.paths), [
+ '/pages/',
+ '/pages/{id}',
+ '/'
+ ])
+ }
+})
diff --git a/packages/db/test/authorization-setup.test.js b/packages/db/test/authorization-setup.test.js
new file mode 100644
index 0000000000..ebf7b0640f
--- /dev/null
+++ b/packages/db/test/authorization-setup.test.js
@@ -0,0 +1,70 @@
+
+'use strict'
+
+const { buildConfig, connInfo, clear, createBasicPages } = require('./helper')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request } = require('undici')
+
+test('configure authorizations works even with empty object', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ authorization: {},
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ // This must fail because authorization is configured
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+
+ same(await res.body.json(), {
+ data: {
+ savePage: null
+ },
+ errors: [
+ {
+ message: 'operation not allowed',
+ locations: [
+ {
+ line: 3,
+ column: 13
+ }
+ ],
+ path: [
+ 'savePage'
+ ]
+ }
+ ]
+ }, 'savePage response')
+ }
+})
diff --git a/packages/db/test/cli/env.test.mjs b/packages/db/test/cli/env.test.mjs
new file mode 100644
index 0000000000..59cae1841c
--- /dev/null
+++ b/packages/db/test/cli/env.test.mjs
@@ -0,0 +1,145 @@
+import { cliPath, connectAndResetDB } from './helper.mjs'
+import { test } from 'tap'
+import { join } from 'desm'
+import { request } from 'undici'
+import { execa } from 'execa'
+import split from 'split2'
+import { once } from 'events'
+
+function parse (line) {
+ try {
+ return JSON.parse(line)
+ } catch {
+ console.log(line)
+ return null
+ }
+}
+
+test('env white list', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ const child = execa('node', [
+ cliPath,
+ '--config',
+ join(import.meta.url, '..', 'fixtures', 'env-whitelist.json'),
+ '--allow-env',
+ 'DATABASE_URL,HOSTNAME'
+ ], {
+ env: {
+ DATABASE_URL: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ HOSTNAME: '127.0.0.1'
+ }
+ })
+ const output = child.stdout.pipe(split(parse))
+ const [{ url }] = await once(output, 'data')
+
+ {
+ // should connect to db and query it.
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ savePage: {
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('env white list default values', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ const child = execa('node', [
+ cliPath,
+ '--config',
+ join(import.meta.url, '..', 'fixtures', 'env-whitelist-default.json')
+ ], {
+ env: {
+ DATABASE_URL: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ PORT: 10555
+ }
+ })
+ const output = child.stdout.pipe(split(parse))
+ const [{ url }] = await once(output, 'data')
+ equal(url, 'http://127.0.0.1:10555')
+ {
+ // should connect to db and query it.
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ savePage: {
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('env white list schema', async ({ matchSnapshot, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ const { stdout } = await execa('node', [
+ cliPath,
+ 'schema',
+ 'graphql',
+ '--config',
+ join(import.meta.url, '..', 'fixtures', 'env-whitelist.json'),
+ '--allow-env',
+ 'DATABASE_URL,HOSTNAME'
+ ], {
+ env: {
+ DATABASE_URL: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ HOSTNAME: '127.0.0.1'
+ }
+ })
+ matchSnapshot(stdout)
+})
diff --git a/packages/db/test/cli/gen-types.test.mjs b/packages/db/test/cli/gen-types.test.mjs
new file mode 100644
index 0000000000..2def08920b
--- /dev/null
+++ b/packages/db/test/cli/gen-types.test.mjs
@@ -0,0 +1,89 @@
+import path from 'path'
+import { rm } from 'fs/promises'
+import { cliPath } from './helper.mjs'
+import { test } from 'tap'
+import { fileURLToPath } from 'url'
+import { execa } from 'execa'
+
+function urlDirname (url) {
+ return path.dirname(fileURLToPath(url))
+}
+
+const pathToTSD = path.join(urlDirname(import.meta.url), '../../node_modules/.bin/tsd')
+
+test('generate ts types', async (t) => {
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'gen-types')
+
+ t.teardown(async () => {
+ await Promise.all([
+ rm(path.join(cwd, 'types'), { recursive: true, force: true }),
+ rm(path.join(cwd, 'global.d.ts'), { force: true })
+ ])
+ })
+
+ try {
+ await execa('node', [cliPath, 'types'], { cwd })
+ await execa(pathToTSD, { cwd })
+ } catch (err) {
+ console.log(err.stdout)
+ console.log(err.stderr)
+ t.fail(err.stderr)
+ }
+
+ t.pass()
+})
+
+test('generate ts types twice', async (t) => {
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'gen-types')
+
+ t.teardown(async () => {
+ await Promise.all([
+ rm(path.join(cwd, 'types'), { recursive: true, force: true }),
+ rm(path.join(cwd, 'global.d.ts'), { force: true })
+ ])
+ })
+
+ try {
+ await execa('node', [cliPath, 'types'], { cwd })
+ await execa('node', [cliPath, 'types'], { cwd })
+ await execa(pathToTSD, { cwd })
+ } catch (err) {
+ console.log(err.stdout)
+ console.log(err.stderr)
+ t.fail(err.stderr)
+ }
+
+ t.pass()
+})
+
+test('run migrate command with type generation', async (t) => {
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'auto-gen-types')
+
+ t.teardown(async () => {
+ await Promise.all([
+ rm(path.join(cwd, 'types'), { recursive: true, force: true }),
+ rm(path.join(cwd, 'global.d.ts'), { force: true }),
+ rm(path.join(cwd, 'db'), { force: true })
+ ])
+ })
+
+ try {
+ await execa('node', [cliPath, 'migrate'], { cwd })
+ await execa(pathToTSD, { cwd })
+ } catch (err) {
+ console.log(err.stdout)
+ console.log(err.stderr)
+ t.fail(err.stderr)
+ }
+
+ t.pass()
+})
+
+test('missing config file', async ({ equal, match }) => {
+ try {
+ await execa('node', [cliPath, 'seed'])
+ } catch (err) {
+ equal(err.exitCode, 1)
+ match(err.stderr, 'Missing config file')
+ }
+})
diff --git a/packages/db/test/cli/help.test.mjs b/packages/db/test/cli/help.test.mjs
new file mode 100644
index 0000000000..2b4305ef68
--- /dev/null
+++ b/packages/db/test/cli/help.test.mjs
@@ -0,0 +1,19 @@
+import { cliPath } from './helper.mjs'
+import { test } from 'tap'
+import { execa } from 'execa'
+import { join } from 'desm'
+import { readFile } from 'fs/promises'
+import { EOL } from 'os'
+
+for (const cmd of ['start', 'seed', 'schema', 'migrate']) {
+ test(`db help ${cmd}`, async (t) => {
+ const { stdout } = await execa(cliPath, ['help', cmd])
+ const path = join(import.meta.url, '..', '..', 'help', `${cmd}.txt`)
+ t.match(stdout + EOL, await readFile(path, 'utf8'))
+ })
+}
+
+test('db help foobar', async (t) => {
+ const { stdout } = await execa(cliPath, ['help', 'foobar'])
+ t.match(stdout, 'no such help file: foobar')
+})
diff --git a/packages/db/test/cli/helper.mjs b/packages/db/test/cli/helper.mjs
new file mode 100644
index 0000000000..70cb1ecee7
--- /dev/null
+++ b/packages/db/test/cli/helper.mjs
@@ -0,0 +1,143 @@
+import why from 'why-is-node-running'
+import { Agent, setGlobalDispatcher } from 'undici'
+import { join } from 'desm'
+import createConnectionPool from '@databases/pg'
+import { once } from 'events'
+import { execa } from 'execa'
+import { setTimeout as sleep } from 'timers/promises'
+import split from 'split2'
+import { rm } from 'fs/promises'
+
+// This file must be required/imported as the first file
+// in the test suite. It sets up the global environment
+// to track the open handles via why-is-node-running.
+setInterval(() => {
+ why()
+}, 20000).unref()
+
+setGlobalDispatcher(new Agent({
+ keepAliveTimeout: 10,
+ keepAliveMaxTimeout: 10,
+ tls: {
+ rejectUnauthorized: false
+ }
+}))
+
+const cliPath = join(import.meta.url, '..', '..', 'db.mjs')
+
+async function connectAndResetDB () {
+ // TODO support other databases
+ const db = await createConnectionPool({
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ bigIntMode: 'string',
+ max: 1
+ })
+
+ try {
+ await db.query(db.sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE graphs`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE versions`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE graphs`)
+ } catch {
+ }
+
+ return db
+}
+
+async function start (...args) {
+ const child = execa('node', [cliPath, ...args])
+ const output = child.stdout.pipe(split(function (line) {
+ try {
+ const obj = JSON.parse(line)
+ return obj
+ } catch (err) {
+ console.log(line)
+ }
+ }))
+ child.ndj = output
+
+ const [data] = await once(output, 'data')
+ const { url } = data
+
+ if (!url) {
+ throw new Error('Couldn\'t start server')
+ }
+
+ return {
+ child, url, output
+ }
+}
+
+function removeFileProtocol (str) {
+ return str.replace('file:', '')
+}
+
+function getFixturesConfigFileLocation (filename, subdirectories = []) {
+ return removeFileProtocol(join(import.meta.url, '..', 'fixtures', ...subdirectories, filename))
+}
+
+async function cleanSQLite (dbLocation, i = 0) {
+ if (i === 5) {
+ throw new Error('too many EBUSY')
+ }
+ i++
+ try {
+ await rm(dbLocation)
+ } catch (err) {
+ console.log('error cleaning up the file', err.code, err.message)
+
+ if (err.code === 'ENOENT') {
+ return
+ }
+
+ if (err.code === 'EBUSY') {
+ await sleep(i * 1000)
+ return cleanSQLite(dbLocation, i)
+ }
+ }
+}
+
+export {
+ cliPath,
+ start,
+ cleanSQLite,
+ connectAndResetDB,
+ getFixturesConfigFileLocation
+}
diff --git a/packages/db/test/cli/init.test.mjs b/packages/db/test/cli/init.test.mjs
new file mode 100644
index 0000000000..3f7bf9220e
--- /dev/null
+++ b/packages/db/test/cli/init.test.mjs
@@ -0,0 +1,298 @@
+import path from 'path'
+import fs from 'fs/promises'
+import { test } from 'tap'
+import { join } from 'desm'
+import { execa } from 'execa'
+import { cliPath } from './helper.mjs'
+
+const moviesMigration = `
+-- Add SQL in this file to create the database tables for your API
+CREATE TABLE IF NOT EXISTS movies (
+ id INTEGER PRIMARY KEY,
+ title TEXT NOT NULL
+);
+`
+
+test('run db init with default options', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'sqlite://./db.sqlite')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run init with default options twice', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ const { stdout: firstRunStdout } = await execa('node', [cliPath, 'init'], { cwd: pathToFolder })
+ const { stdout: secondRunStdout } = await execa('node', [cliPath, 'init'], { cwd: pathToFolder })
+
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const firstRunStdoutLines = firstRunStdout.split('\n')
+ t.match(firstRunStdoutLines[0], /(.*)Configuration file platformatic.db.json successfully created./)
+ t.match(firstRunStdoutLines[1], /(.*)Migrations folder .\/migrations successfully created./)
+ t.match(firstRunStdoutLines[2], /(.*)Migration file 001.do.sql successfully created./)
+ t.match(firstRunStdoutLines[3], /(.*)Please run `npm i --save-dev(.*)/)
+
+ const secondRunStdoutLines = secondRunStdout.split('\n')
+ t.match(secondRunStdoutLines[0], /(.*)Configuration file platformatic.db.json found, skipping creation of configuration file./)
+ t.match(secondRunStdoutLines[1], /(.*)Migrations folder .\/migrations found, skipping creation of migrations folder./)
+ t.match(secondRunStdoutLines[2], /(.*)Migration file 001.do.sql found, skipping creation of migration file./)
+ t.match(secondRunStdoutLines[3], /(.*)Please run `npm i --save-dev(.*)/)
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'sqlite://./db.sqlite')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --database postgres', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--database', 'postgres'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'postgres://postgres:postgres@localhost:5432/postgres')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --database mysql', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--database', 'mysql'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'mysql://root@localhost:3306/graph')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --database mariadb', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--database', 'mariadb'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'mysql://root@localhost:3307/graph')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --database mysql8', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--database', 'mysql8'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'mysql://root@localhost:3308/graph')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --hostname 127.0.0.5', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--hostname', '127.0.0.5'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.5')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'sqlite://./db.sqlite')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --port 3055', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, 'migrations')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--port', '3055'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3055)
+
+ t.equal(core.connectionString, 'sqlite://./db.sqlite')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, './migrations')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
+
+test('run db init --migrations custom-migrations-folder', async (t) => {
+ const pathToFolder = join(import.meta.url, '..', 'fixtures', 'init')
+ const pathToDbConfigFile = path.join(pathToFolder, 'platformatic.db.json')
+ const pathToMigrationFolder = path.join(pathToFolder, './custom-migrations-folder')
+ const pathToMigrationFile = path.join(pathToMigrationFolder, '001.do.sql')
+
+ await execa('node', [cliPath, 'init', '--migrations', 'custom-migrations-folder'], { cwd: pathToFolder })
+ t.teardown(async () => {
+ await fs.rm(pathToDbConfigFile)
+ await fs.rm(pathToMigrationFolder, { recursive: true, force: true })
+ })
+
+ const dbConfigFile = await fs.readFile(pathToDbConfigFile, 'utf8')
+ const dbConfig = JSON.parse(dbConfigFile)
+
+ const { server, core, migrations } = dbConfig
+
+ t.same(server.logger, { level: 'info' })
+ t.equal(server.hostname, '127.0.0.1')
+ t.equal(server.port, 3042)
+
+ t.equal(core.connectionString, 'sqlite://./db.sqlite')
+ t.equal(core.graphiql, true)
+
+ t.equal(migrations.dir, 'custom-migrations-folder')
+
+ const migrationFile = await fs.readFile(pathToMigrationFile, 'utf8')
+ t.equal(migrationFile, moviesMigration)
+})
diff --git a/packages/db/test/cli/load-and-reload-files.test.mjs b/packages/db/test/cli/load-and-reload-files.test.mjs
new file mode 100644
index 0000000000..2baa9348db
--- /dev/null
+++ b/packages/db/test/cli/load-and-reload-files.test.mjs
@@ -0,0 +1,273 @@
+import { start, connectAndResetDB } from './helper.mjs'
+import { test } from 'tap'
+import { request } from 'undici'
+import { join, basename } from 'path'
+import os from 'os'
+import { writeFile } from 'fs/promises'
+import { setTimeout as sleep } from 'timers/promises'
+
+const isWindows = os.platform() === 'win32'
+
+test('load and reload', { skip: isWindows }, async ({ teardown, equal, same, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+ const config = join(os.tmpdir(), `config-${process.pid}.json`)
+
+ await Promise.all([
+ writeFile(file, `
+ module.exports = async function (app) {
+ }`
+ ),
+
+ writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(file)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "authorization": {}
+}
+ `)
+ ])
+
+ comment('files written')
+
+ const { child, url } = await start('-c', config)
+
+ comment('server started')
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.log.info('loaded')
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await sleep(500)
+
+ child.kill('SIGUSR2')
+
+ // the plugin is reloaded
+ for await (const log of child.ndj) {
+ comment(log.msg)
+ if (log.msg === 'loaded') {
+ break
+ }
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('do not crash on reload', { skip: isWindows }, async ({ teardown, match, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+ const config = join(os.tmpdir(), `config-${process.pid}.json`)
+
+ await Promise.all([
+ writeFile(file, `
+ module.exports = async function (app) {
+ }`
+ ),
+
+ writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(file)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ }
+}
+ `)
+ ])
+
+ comment('files written')
+
+ const { child } = await start('-c', config)
+ // child.stderr.pipe(process.stderr)
+
+ comment('server started')
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ console.error('plugin loaded')
+ throw new Error('kaboom')
+ }`)
+
+ await sleep(500)
+
+ child.kill('SIGUSR2')
+ comment('signal sent')
+
+ // the plugin is reloaded
+ for await (const log of child.ndj) {
+ comment(log.msg)
+ if (log.msg === 'failed to restart') {
+ break
+ }
+ }
+
+ child.kill('SIGINT')
+})
+
+test('log the error', { skip: isWindows }, async ({ teardown, match, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+ const config = join(os.tmpdir(), `config-${process.pid}.json`)
+
+ await Promise.all([
+ writeFile(file, `
+ module.exports = async function (app) {
+ }`
+ ),
+
+ writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(file)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ }
+}
+ `)
+ ])
+
+ comment('files written')
+
+ const { child } = await start('-c', config)
+ // child.stderr.pipe(process.stderr)
+
+ comment('server started')
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ console.error('plugin loaded')
+ setTimeout(() => {
+ console.error('timeout triggered')
+ throw new Error('kaboom')
+ }, 2000)
+ }`)
+
+ await sleep(500)
+ comment('sending signal')
+ child.kill('SIGUSR2')
+
+ for await (const log of child.ndj) {
+ if (log.message === 'error encounterated within the isolate, routing to uncaughtException, use onError option to catch') {
+ match(log, {
+ err: {
+ message: 'kaboom'
+ }
+ })
+ }
+
+ if (log.message === 'exiting') {
+ match(log, {
+ err: {
+ message: 'kaboom'
+ }
+ })
+ }
+ }
+})
diff --git a/packages/db/test/cli/migrate.test.mjs b/packages/db/test/cli/migrate.test.mjs
new file mode 100644
index 0000000000..e8a1fff4d4
--- /dev/null
+++ b/packages/db/test/cli/migrate.test.mjs
@@ -0,0 +1,96 @@
+import { cliPath, connectAndResetDB, getFixturesConfigFileLocation } from './helper.mjs'
+import { test } from 'tap'
+import { execa } from 'execa'
+import stripAnsi from 'strip-ansi'
+import split from 'split2'
+import { once } from 'events'
+
+test('migrate on start', async ({ equal, teardown }) => {
+ const db = await connectAndResetDB()
+ let found = false
+ const child = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('auto-apply.json')])
+
+ teardown(() => child.kill('SIGINT'))
+ teardown(() => db.dispose())
+
+ const splitter = split()
+ child.stdout.pipe(splitter)
+ for await (const data of splitter) {
+ const sanitized = stripAnsi(data)
+ if (sanitized.match(/(.*)running 001\.do\.sql/)) {
+ found = true
+ break
+ }
+ }
+ equal(found, true)
+})
+
+test('validate migration checksums', async ({ equal, teardown }) => {
+ const db = await connectAndResetDB()
+ let firstFound = false
+
+ const firstChild = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('validate-migrations-checksums.json')])
+
+ teardown(() => firstChild.kill('SIGINT'))
+ teardown(() => db.dispose())
+
+ const splitter = split()
+ firstChild.stdout.pipe(splitter)
+ for await (const data of splitter) {
+ const sanitized = stripAnsi(data)
+ if (sanitized.match(/(.*)running 001\.do\.sql/)) {
+ firstFound = true
+ break
+ }
+ }
+ equal(firstFound, true)
+
+ let secondFound = false
+ const secondChild = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('validate-migrations-checksums.json')])
+
+ teardown(() => secondChild.kill('SIGINT'))
+ teardown(() => db.dispose())
+
+ const secondSplitter = split()
+ secondChild.stdout.pipe(secondSplitter)
+ for await (const data of secondSplitter) {
+ const sanitized = stripAnsi(data)
+ if (sanitized.match(/(.*)verifying checksum of migration 001\.do\.sql/)) {
+ secondFound = true
+ break
+ }
+ }
+ equal(secondFound, true)
+})
+
+test('do not validate migration checksums if not configured', async ({ equal, match, teardown }) => {
+ const db = await connectAndResetDB()
+
+ const firstChild = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('auto-apply.json')])
+
+ teardown(() => firstChild.kill('SIGINT'))
+ teardown(() => db.dispose())
+
+ const splitter = split()
+ const firstOutput = firstChild.stdout.pipe(splitter)
+ const [message] = await once(firstOutput, 'data')
+ match(stripAnsi(message), /(.*)running(.*)(001\.do\.sql)/)
+
+ const secondChild = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('auto-apply.json')])
+ secondChild.stderr.pipe(process.stderr)
+
+ teardown(() => secondChild.kill('SIGINT'))
+
+ const secondOutput = secondChild.stdout.pipe(split(JSON.parse))
+ // first output should be a "server listening" message
+ // no migration logging is expected
+ const [{ msg }] = await once(secondOutput, 'data')
+ equal(msg, 'server listening')
+})
+
+test('throws if migrations directory does not exist', async ({ match }) => {
+ const child = execa('node', [cliPath, 'start', '-c', getFixturesConfigFileLocation('invalid-migrations-directory.json')])
+ const output = child.stderr.pipe(split())
+ const [data] = await once(output, 'data')
+ match(data, /^MigrateError: Migrations directory (.*) does not exist.$/)
+})
diff --git a/packages/db/test/cli/schema.test.mjs b/packages/db/test/cli/schema.test.mjs
new file mode 100644
index 0000000000..700c9c46f5
--- /dev/null
+++ b/packages/db/test/cli/schema.test.mjs
@@ -0,0 +1,35 @@
+import { cliPath } from './helper.mjs'
+import { test } from 'tap'
+import { join } from 'desm'
+import { execa } from 'execa'
+import { rm } from 'fs/promises'
+
+const dbLocation = join(import.meta.url, '..', 'fixtures', 'sqlite', 'db')
+
+test('print the graphql schema to stdout', async ({ matchSnapshot }) => {
+ try {
+ await rm(dbLocation)
+ } catch {
+ // ignore
+ }
+
+ const { stdout } = await execa('node', [cliPath, 'schema', 'graphql'], {
+ cwd: join(import.meta.url, '..', 'fixtures', 'sqlite')
+ })
+
+ matchSnapshot(stdout)
+})
+
+test('print the openapi schema to stdout', async ({ matchSnapshot }) => {
+ try {
+ await rm(dbLocation)
+ } catch {
+ // ignore
+ }
+
+ const { stdout } = await execa('node', [cliPath, 'schema', 'openapi'], {
+ cwd: join(import.meta.url, '..', 'fixtures', 'sqlite')
+ })
+
+ matchSnapshot(stdout)
+})
diff --git a/packages/db/test/cli/seed.test.mjs b/packages/db/test/cli/seed.test.mjs
new file mode 100644
index 0000000000..98436d2e28
--- /dev/null
+++ b/packages/db/test/cli/seed.test.mjs
@@ -0,0 +1,180 @@
+import { cliPath, cleanSQLite } from './helper.mjs'
+import { test } from 'tap'
+import { request } from 'undici'
+import { execa } from 'execa'
+import stripAnsi from 'strip-ansi'
+import split from 'split2'
+import { fileURLToPath } from 'url'
+import path from 'path'
+
+function urlDirname (url) {
+ return path.dirname(fileURLToPath(url))
+}
+
+const dbLocation = path.resolve(path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite', 'db'))
+
+test('seed and start', async ({ comment, equal, match, teardown }) => {
+ await cleanSQLite(dbLocation)
+
+ comment('migrating and seeding')
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite')
+ const seed = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite', 'seed.js')
+ comment(`dbl ${dbLocation}`)
+ comment(`cwd ${cwd}`)
+ const { stdout } = await execa('node', [cliPath, 'seed', seed], {
+ cwd
+ })
+
+ {
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, /001\.do\.sql/)
+ match(sanitized, /seeding from .*seed\.js/)
+ match(sanitized, /seeding complete/)
+ }
+
+ comment('starting')
+
+ const child = execa('node', [cliPath, 'start'], {
+ cwd
+ })
+ // child.stderr.pipe(process.stderr)
+ const splitter = split()
+ child.stdout.pipe(splitter)
+ let url
+ for await (const data of splitter) {
+ try {
+ const parsed = JSON.parse(data)
+ if (parsed.url) {
+ url = parsed.url
+ break
+ }
+ } catch (err) {
+ // do nothing as the line is not JSON
+ }
+ }
+ teardown(() => child.kill('SIGINT'))
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ graphs {
+ id
+ name
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'graphs status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ graphs: [{
+ id: 1,
+ name: 'Hello'
+ }, {
+ id: 2,
+ name: 'Hello 2'
+ }]
+ }
+ }, 'graphs response')
+ }
+})
+
+test('missing config file', async ({ equal, match }) => {
+ try {
+ await execa('node', [cliPath, 'seed'])
+ } catch (err) {
+ equal(err.exitCode, 1)
+ match(err.stderr, 'Missing config file')
+ }
+})
+
+test('missing seed file', async ({ equal, match }) => {
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite')
+ try {
+ await execa('node', [cliPath, 'seed'], {
+ cwd
+ })
+ } catch (err) {
+ equal(err.exitCode, 1)
+ match(err.stdout, 'Missing seed file')
+ }
+})
+
+test('seed and start from cwd', async ({ comment, equal, match, teardown }) => {
+ await cleanSQLite(dbLocation)
+
+ comment('migrating and seeding')
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite')
+ const currentCWD = process.cwd()
+ teardown(() => process.chdir(currentCWD))
+ process.chdir(cwd)
+ comment(`dbl ${dbLocation}`)
+ comment(`cwd ${cwd}`)
+ const { stdout } = await execa('node', [cliPath, 'seed', 'seed.js'], {
+ cwd
+ })
+
+ {
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, /001\.do\.sql/)
+ match(sanitized, /seeding from .*seed\.js/)
+ }
+
+ comment('starting')
+
+ const child = execa('node', [cliPath, 'start'], {
+ cwd
+ })
+ // child.stderr.pipe(process.stderr)
+ const splitter = split()
+ child.stdout.pipe(splitter)
+ let url
+ for await (const data of splitter) {
+ try {
+ const parsed = JSON.parse(data)
+ if (parsed.url) {
+ url = parsed.url
+ break
+ }
+ } catch (err) {
+ // do nothing as the line is not JSON
+ }
+ }
+ teardown(() => child.kill('SIGINT'))
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ graphs {
+ id
+ name
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'graphs status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ graphs: [{
+ id: 1,
+ name: 'Hello'
+ }, {
+ id: 2,
+ name: 'Hello 2'
+ }]
+ }
+ }, 'graphs response')
+ }
+})
diff --git a/packages/db/test/cli/sqlite3.test.mjs b/packages/db/test/cli/sqlite3.test.mjs
new file mode 100644
index 0000000000..6fdf67d3ab
--- /dev/null
+++ b/packages/db/test/cli/sqlite3.test.mjs
@@ -0,0 +1,145 @@
+import { cliPath, cleanSQLite } from './helper.mjs'
+import { test } from 'tap'
+import { request } from 'undici'
+import { execa } from 'execa'
+import stripAnsi from 'strip-ansi'
+import { access } from 'fs/promises'
+import split from 'split2'
+import { fileURLToPath } from 'url'
+import path from 'path'
+
+function urlDirname (url) {
+ return path.dirname(fileURLToPath(url))
+}
+
+const dbLocation = path.resolve(path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite', 'db'))
+
+test('migrate and start', async ({ comment, equal, match, teardown }) => {
+ await cleanSQLite(dbLocation)
+
+ comment('migrating')
+ const cwd = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite')
+ comment(`dbl ${dbLocation}`)
+ comment(`cwd ${cwd}`)
+ const { stdout } = await execa('node', [cliPath, 'migrate'], {
+ cwd
+ })
+
+ {
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, /001\.do\.sql/)
+ }
+
+ comment('starting')
+
+ const child = execa('node', [cliPath, 'start'], {
+ cwd
+ })
+ // child.stderr.pipe(process.stderr)
+ const splitter = split()
+ child.stdout.pipe(splitter)
+ let url
+ for await (const data of splitter) {
+ try {
+ const parsed = JSON.parse(data)
+ if (parsed.url) {
+ url = parsed.url
+ break
+ }
+ } catch (err) {
+ // do nothing as the line is not JSON
+ }
+ }
+ teardown(() => child.kill('SIGINT'))
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ saveGraph(input: { name: "Hello" }) {
+ id
+ name
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'saveGraph status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ saveGraph: {
+ id: 1,
+ name: 'Hello'
+ }
+ }
+ }, 'saveGraph response')
+ }
+})
+
+test('no cwd', async ({ comment, equal, match, teardown }) => {
+ await cleanSQLite(dbLocation)
+ comment('migrating')
+
+ const config = path.join(urlDirname(import.meta.url), '..', 'fixtures', 'sqlite', 'platformatic.db.json')
+ comment(`dbl ${dbLocation}`)
+ comment(`cfg ${config}`)
+ const { stdout } = await execa('node', [cliPath, 'migrate', '-c', config])
+
+ {
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.do.sql')
+ }
+
+ await access(dbLocation)
+
+ comment('starting')
+
+ const child = execa('node', [cliPath, 'start', '-c', config])
+ child.stderr.pipe(process.stderr)
+ const splitter = split()
+ child.stdout.pipe(splitter)
+ let url
+ for await (const data of splitter) {
+ try {
+ const parsed = JSON.parse(data)
+ if (parsed.url) {
+ url = parsed.url
+ break
+ }
+ } catch (err) {
+ // do nothing as the line is not JSON
+ }
+ }
+ teardown(() => child.kill('SIGINT'))
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ saveGraph(input: { name: "Hello" }) {
+ id
+ name
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'saveGraph status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ saveGraph: {
+ id: 1,
+ name: 'Hello'
+ }
+ }
+ }, 'saveGraph response')
+ }
+})
diff --git a/packages/db/test/cli/start.test.mjs b/packages/db/test/cli/start.test.mjs
new file mode 100644
index 0000000000..0e194edb3c
--- /dev/null
+++ b/packages/db/test/cli/start.test.mjs
@@ -0,0 +1,386 @@
+import { cliPath, connectAndResetDB, start } from './helper.mjs'
+import { test } from 'tap'
+import { join } from 'desm'
+import { request } from 'undici'
+import { execa } from 'execa'
+import split from 'split2'
+import { once } from 'events'
+
+test('autostart', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const { child, url } = await start('-c', join(import.meta.url, '..', 'fixtures', 'simple.json'))
+
+ let id
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ savePage: {
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ id = body.data.savePage.id
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { id: ${id}, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('start command', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const { child, url } = await start('start', '-c', join(import.meta.url, '..', 'fixtures', 'simple.json'))
+
+ let id
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ savePage: {
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ id = body.data.savePage.id
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { id: ${id}, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('auto config', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const child = execa('node', [cliPath], {
+ cwd: join(import.meta.url, '..', 'fixtures', 'auto')
+ })
+ const output = child.stdout.pipe(split(JSON.parse))
+
+ const [{ url }] = await once(output, 'data')
+
+ let id
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const body = await res.body.json()
+ match(body, {
+ data: {
+ savePage: {
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ id = body.data.savePage.id
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { id: ${id}, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: ${id}) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('default logger', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const { child, url } = await start('-c', join(import.meta.url, '..', 'fixtures', 'no-server-logger.json'))
+ match(url, /http:\/\/127.0.0.1:[0-9]+/)
+ child.kill('SIGINT')
+})
diff --git a/packages/db/test/cli/validations.test.mjs b/packages/db/test/cli/validations.test.mjs
new file mode 100644
index 0000000000..fe4cc74f77
--- /dev/null
+++ b/packages/db/test/cli/validations.test.mjs
@@ -0,0 +1,33 @@
+import { cliPath } from './helper.mjs'
+import { test } from 'tap'
+import { join } from 'desm'
+import { readFile } from 'fs/promises'
+import { execa } from 'execa'
+
+const version = JSON.parse(await readFile(join(import.meta.url, '..', '..', 'package.json'))).version
+
+test('version', async (t) => {
+ const { stdout } = await execa('node', [cliPath, '--version'])
+ t.ok(stdout.includes('v' + version))
+})
+
+test('missing config', async (t) => {
+ await t.rejects(execa('node', [cliPath]))
+})
+
+test('print validation errors', async ({ equal, plan }) => {
+ plan(2)
+ try {
+ await execa('node', [cliPath, '--config', join(import.meta.url, '..', 'fixtures', 'missing-required-values.json')])
+ } catch (err) {
+ equal(err.exitCode, 1)
+ equal(err.stdout, `
+┌─────────┬───────────────┬─────────────────────────────────────────────────────────────────────┐
+│ (index) │ path │ message │
+├─────────┼───────────────┼─────────────────────────────────────────────────────────────────────┤
+│ 0 │ '/' │ \`must have required property 'server' {"missingProperty":"server"}\` │
+│ 1 │ '/migrations' │ \`must have required property 'dir' {"missingProperty":"dir"}\` │
+└─────────┴───────────────┴─────────────────────────────────────────────────────────────────────┘
+`.trim())
+ }
+})
diff --git a/packages/db/test/cli/watch.test.mjs b/packages/db/test/cli/watch.test.mjs
new file mode 100644
index 0000000000..4e46b4a2ea
--- /dev/null
+++ b/packages/db/test/cli/watch.test.mjs
@@ -0,0 +1,290 @@
+import { start, connectAndResetDB } from './helper.mjs'
+import { test } from 'tap'
+import { request } from 'undici'
+import { join, basename } from 'path'
+import os from 'os'
+import { writeFile, mkdir } from 'fs/promises'
+import { setTimeout as sleep } from 'timers/promises'
+
+test('watch file', async ({ teardown, equal, same, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+ const config = join(os.tmpdir(), `config-${process.pid}.json`)
+
+ await writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ }
+}
+ `)
+
+ comment('file written')
+
+ const { child, url } = await start('-c', config)
+
+ comment('server started')
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ comment('updating files')
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.log.info('loaded')
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(file)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "authorization": {},
+ "dashboard": {}
+}
+ `)
+
+ for await (const log of child.ndj) {
+ comment(log.msg)
+ if (log.msg === 'loaded') {
+ break
+ }
+ }
+
+ comment('reloaded')
+
+ {
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('do not watch ignored file', async ({ teardown, equal, same, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const folder = join(os.tmpdir(), `plt-${process.pid}`)
+ await mkdir(folder)
+ const pluginFile = join(folder, 'some-plugin-.js')
+ comment('plugin file is ' + pluginFile)
+
+ await writeFile(pluginFile, `
+ module.exports = async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`
+ )
+ const config = join(folder, `config-${process.pid}.json`)
+ await writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(pluginFile)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ }
+}
+ `)
+
+ const { child, url } = await start('-c', config, '--watch-ignore', basename(pluginFile))
+
+ await writeFile(pluginFile, `
+ module.exports = async function (app) {
+ app.log.info('loaded')
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y + 20
+ }
+ })
+ }`
+ )
+
+ await sleep(5000)
+
+ {
+ // plugin is not reloaded
+ const res = await request(`${url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+
+ child.kill('SIGINT')
+})
+
+test('does not loop forever when doing ESM', async ({ teardown, equal, same, comment }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ await db.query(db.sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+
+ const folder = join(os.tmpdir(), `plt-${process.pid}-2`)
+ await mkdir(folder)
+ const pluginFile = join(folder, 'some-plugin.mjs')
+ comment('plugin file is ' + pluginFile)
+
+ await writeFile(pluginFile, `
+ export default async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`
+ )
+ const config = join(folder, `config-${process.pid}.json`)
+ await writeFile(config, `
+{
+ "server": {
+ "logger": {
+ "level": "info"
+ },
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "plugin": {
+ "path": "./${basename(pluginFile)}",
+ "stopTimeout": 1000
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ }
+}
+ `)
+
+ const { child } = await start('-c', config, '--watch-ignore', basename(pluginFile))
+
+ await sleep(5000)
+
+ child.kill('SIGINT')
+
+ const lines = []
+ for await (const line of child.ndj) {
+ lines.push(line)
+ }
+ // lines will have a series of "config changed"
+ // messages without an ignore
+ equal(lines.length <= 2, true)
+})
diff --git a/packages/db/test/config.test.js b/packages/db/test/config.test.js
new file mode 100644
index 0000000000..1a93725967
--- /dev/null
+++ b/packages/db/test/config.test.js
@@ -0,0 +1,254 @@
+'use strict'
+
+const { buildConfig, connInfo } = require('./helper')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request } = require('undici')
+const { tmpdir } = require('os')
+const { writeFile, unlink } = require('fs/promises')
+const { join } = require('path')
+const DBConfigManager = require('../lib/config')
+
+test('return config with adminSecret', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/config`))
+ equal(res.statusCode, 200)
+ same(await res.body.json(), {
+ loginRequired: true
+ })
+})
+
+test('return config without adminSecret', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/config`))
+ equal(res.statusCode, 200)
+ same(await res.body.json(), {
+ loginRequired: false
+ })
+})
+
+test('return config file', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ dashboard: {
+ enabled: true,
+ rootPath: false
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/config-file`, {
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ }
+ }))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, {
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret',
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous'
+ },
+ dashboard: {
+ enabled: true,
+ rootPath: false
+ }
+ })
+})
+
+test('no need for configFileLocation to return config', async ({ teardown, equal, same }) => {
+ const targetConfigFile = join(tmpdir(), 'platformatic.json')
+ const theConfig = buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ })
+
+ await writeFile(targetConfigFile, JSON.stringify(theConfig, null, 2))
+ const server = await buildServer({
+ ...theConfig
+ })
+
+ teardown(server.stop)
+ teardown(() => unlink(targetConfigFile))
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/config-file`, {
+ headers: {
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ }
+ }))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, {
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret',
+ roleKey: 'X-PLATFORMATIC-ROLE',
+ anonymousRole: 'anonymous'
+ },
+ dashboard: {
+ rootPath: false
+ }
+ })
+})
+
+test('update config file', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+
+ teardown(server.stop)
+ await server.listen()
+
+ const res = await (request(`${server.url}/_admin/config-file`, {
+ headers: {
+ 'content-type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ method: 'POST',
+ body: JSON.stringify({
+ foo: 'bar'
+ })
+ }))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, {
+ success: true
+ })
+})
+
+test('not update config file if unauthorized', { skip: true }, async ({ teardown, equal, same }) => {
+ const targetConfigFile = join(tmpdir(), 'platformatic.json')
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ configFileLocation: targetConfigFile
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/config-file`, {
+ headers: {
+ 'content-type': 'application/json'
+ },
+ method: 'POST',
+ body: JSON.stringify({
+ foo: 'bar'
+ })
+ }))
+ equal(res.statusCode, 401)
+ const body = await res.body.json()
+ same(body, { success: false, message: 'Unauthorized' })
+})
+
+test('ignore watch sqlite file', async ({ teardown, equal, same, comment }) => {
+ {
+ // absolute path
+ const config = {
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ connectionString: 'sqlite://db-watchIgnore.sqlite'
+ }
+ }
+ const cm = new DBConfigManager({
+ source: config,
+ schema: {}
+ })
+ const parseResult = await cm.parse()
+ equal(parseResult, true)
+ same(cm.watchIgnore, ['db-watchIgnore.sqlite', 'db-watchIgnore.sqlite-journal', '.esm*'])
+ }
+
+ {
+ // Relative Path
+ const config = {
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ connectionString: 'sqlite://./databases/db-watchIgnore.sqlite'
+ }
+ }
+ const cm = new DBConfigManager({
+ source: config,
+ schema: {}
+ })
+ const parseResult = await cm.parse()
+ equal(parseResult, true)
+ same(cm.watchIgnore, [join('databases', 'db-watchIgnore.sqlite'), join('databases', 'db-watchIgnore.sqlite-journal'), '.esm*'])
+ }
+})
diff --git a/packages/db/test/cors.test.js b/packages/db/test/cors.test.js
new file mode 100644
index 0000000000..ee398837a8
--- /dev/null
+++ b/packages/db/test/cors.test.js
@@ -0,0 +1,55 @@
+'use strict'
+
+const { buildConfig, connInfo } = require('./helper')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request } = require('undici')
+
+test('CORS is disabled by default', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/login`, {
+ method: 'OPTIONS',
+ headers: {
+ 'Access-Control-Request-Method': 'POST',
+ Origin: 'https://foo.bar.org'
+ }
+ }))
+ equal(res.statusCode, 404)
+})
+test('CORS can be enabled', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ cors: {
+ origin: true,
+ methods: ['GET', 'POST']
+ }
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/login`, {
+ method: 'OPTIONS',
+ headers: {
+ 'Access-Control-Request-Method': 'POST',
+ Origin: 'https://foo.bar.org'
+ }
+ }))
+ equal(res.statusCode, 204)
+ equal(res.headers['access-control-allow-origin'], 'https://foo.bar.org')
+ equal(res.headers['access-control-allow-methods'], 'GET, POST')
+})
diff --git a/packages/db/test/fixtures/auto-apply.json b/packages/db/test/fixtures/auto-apply.json
new file mode 100644
index 0000000000..711605d010
--- /dev/null
+++ b/packages/db/test/fixtures/auto-apply.json
@@ -0,0 +1,17 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions"
+ },
+ "authorization": {}
+}
diff --git a/packages/db/test/fixtures/auto-gen-types/index.d.ts b/packages/db/test/fixtures/auto-gen-types/index.d.ts
new file mode 100644
index 0000000000..39a316616c
--- /dev/null
+++ b/packages/db/test/fixtures/auto-gen-types/index.d.ts
@@ -0,0 +1 @@
+// Need to Run tsd tests
\ No newline at end of file
diff --git a/packages/db/test/fixtures/auto-gen-types/index.test-d.ts b/packages/db/test/fixtures/auto-gen-types/index.test-d.ts
new file mode 100644
index 0000000000..40f16d9f3b
--- /dev/null
+++ b/packages/db/test/fixtures/auto-gen-types/index.test-d.ts
@@ -0,0 +1,14 @@
+///
+
+import { expectType } from 'tsd'
+import { Graph } from './types/Graph'
+import { FastifyInstance, fastify } from 'fastify'
+
+const app: FastifyInstance = fastify()
+
+const graphs = await app.platformatic.entities.graph.find()
+expectType(graphs)
+
+const graph = graphs[0]
+expectType(graph.id)
+expectType(graph.name)
diff --git a/packages/db/test/fixtures/auto-gen-types/migrations/001.do.sql b/packages/db/test/fixtures/auto-gen-types/migrations/001.do.sql
new file mode 100644
index 0000000000..0c81f656df
--- /dev/null
+++ b/packages/db/test/fixtures/auto-gen-types/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id INTEGER PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/test/fixtures/auto-gen-types/package.json b/packages/db/test/fixtures/auto-gen-types/package.json
new file mode 100644
index 0000000000..92ee2fd16d
--- /dev/null
+++ b/packages/db/test/fixtures/auto-gen-types/package.json
@@ -0,0 +1,16 @@
+{
+ "name": "gen-types",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "ISC",
+ "types": "./index.d.ts",
+ "devDependencies": {
+ "@platformatic/sql-mapper": "workspace:*",
+ "fastify": "^4.6.0"
+ }
+}
diff --git a/packages/db/test/fixtures/auto-gen-types/platformatic.db.json b/packages/db/test/fixtures/auto-gen-types/platformatic.db.json
new file mode 100644
index 0000000000..df51454150
--- /dev/null
+++ b/packages/db/test/fixtures/auto-gen-types/platformatic.db.json
@@ -0,0 +1,18 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ },
+ "types": {
+ "autogenerate": true
+ }
+}
diff --git a/packages/db/test/fixtures/auto/migrations/001.do.sql b/packages/db/test/fixtures/auto/migrations/001.do.sql
new file mode 100644
index 0000000000..0c81f656df
--- /dev/null
+++ b/packages/db/test/fixtures/auto/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id INTEGER PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/test/fixtures/auto/migrations/001.undo.sql b/packages/db/test/fixtures/auto/migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/test/fixtures/auto/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/test/fixtures/auto/platformatic.db.json b/packages/db/test/fixtures/auto/platformatic.db.json
new file mode 100644
index 0000000000..ec87b960a6
--- /dev/null
+++ b/packages/db/test/fixtures/auto/platformatic.db.json
@@ -0,0 +1,17 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions",
+ "autoApply": false
+ }
+}
diff --git a/packages/db/test/fixtures/env-whitelist-default.json b/packages/db/test/fixtures/env-whitelist-default.json
new file mode 100644
index 0000000000..3a8a05a25d
--- /dev/null
+++ b/packages/db/test/fixtures/env-whitelist-default.json
@@ -0,0 +1,12 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": "{PORT}",
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "{DATABASE_URL}"
+ }
+}
diff --git a/packages/db/test/fixtures/env-whitelist.json b/packages/db/test/fixtures/env-whitelist.json
new file mode 100644
index 0000000000..0cbc607934
--- /dev/null
+++ b/packages/db/test/fixtures/env-whitelist.json
@@ -0,0 +1,12 @@
+{
+ "server": {
+ "hostname": "{HOSTNAME}",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "{DATABASE_URL}"
+ }
+}
diff --git a/packages/db/test/fixtures/gen-types/db b/packages/db/test/fixtures/gen-types/db
new file mode 100644
index 0000000000..4b73bda480
Binary files /dev/null and b/packages/db/test/fixtures/gen-types/db differ
diff --git a/packages/db/test/fixtures/gen-types/index.d.ts b/packages/db/test/fixtures/gen-types/index.d.ts
new file mode 100644
index 0000000000..39a316616c
--- /dev/null
+++ b/packages/db/test/fixtures/gen-types/index.d.ts
@@ -0,0 +1 @@
+// Need to Run tsd tests
\ No newline at end of file
diff --git a/packages/db/test/fixtures/gen-types/index.test-d.ts b/packages/db/test/fixtures/gen-types/index.test-d.ts
new file mode 100644
index 0000000000..40f16d9f3b
--- /dev/null
+++ b/packages/db/test/fixtures/gen-types/index.test-d.ts
@@ -0,0 +1,14 @@
+///
+
+import { expectType } from 'tsd'
+import { Graph } from './types/Graph'
+import { FastifyInstance, fastify } from 'fastify'
+
+const app: FastifyInstance = fastify()
+
+const graphs = await app.platformatic.entities.graph.find()
+expectType(graphs)
+
+const graph = graphs[0]
+expectType(graph.id)
+expectType(graph.name)
diff --git a/packages/db/test/fixtures/gen-types/migrations/001.do.sql b/packages/db/test/fixtures/gen-types/migrations/001.do.sql
new file mode 100644
index 0000000000..0c81f656df
--- /dev/null
+++ b/packages/db/test/fixtures/gen-types/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id INTEGER PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/test/fixtures/gen-types/package.json b/packages/db/test/fixtures/gen-types/package.json
new file mode 100644
index 0000000000..cb5b81c84e
--- /dev/null
+++ b/packages/db/test/fixtures/gen-types/package.json
@@ -0,0 +1,17 @@
+{
+ "name": "gen-types",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "ISC",
+ "types": "./index.d.ts",
+ "dependencies": {
+ "@platformatic/sql-mapper": "workspace:*",
+ "@platformatic/sql-graphql": "workspace:*",
+ "fastify": "^4.6.0"
+ }
+}
diff --git a/packages/db/test/fixtures/gen-types/platformatic.db.json b/packages/db/test/fixtures/gen-types/platformatic.db.json
new file mode 100644
index 0000000000..0f5ac6c33d
--- /dev/null
+++ b/packages/db/test/fixtures/gen-types/platformatic.db.json
@@ -0,0 +1,16 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db",
+ "graphiql": true
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
diff --git a/packages/db/test/fixtures/init/.gitkeep b/packages/db/test/fixtures/init/.gitkeep
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/packages/db/test/fixtures/init/db.sqlite b/packages/db/test/fixtures/init/db.sqlite
new file mode 100644
index 0000000000..7fdb5c61f2
Binary files /dev/null and b/packages/db/test/fixtures/init/db.sqlite differ
diff --git a/packages/db/test/fixtures/init/package.json b/packages/db/test/fixtures/init/package.json
new file mode 100644
index 0000000000..c9b83edd70
--- /dev/null
+++ b/packages/db/test/fixtures/init/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "platformatic-example",
+ "version": "1.0.0",
+ "description": "",
+ "main": "index.js",
+ "dependencies": {},
+ "devDependencies": {},
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "author": "",
+ "license": "ISC"
+}
diff --git a/packages/db/test/fixtures/invalid-migrations-directory.json b/packages/db/test/fixtures/invalid-migrations-directory.json
new file mode 100644
index 0000000000..78fe7e3fa0
--- /dev/null
+++ b/packages/db/test/fixtures/invalid-migrations-directory.json
@@ -0,0 +1,16 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./invalid/migrations/path",
+ "table": "versions"
+ }
+}
diff --git a/packages/db/test/fixtures/migrations/001.do.sql b/packages/db/test/fixtures/migrations/001.do.sql
new file mode 100644
index 0000000000..220b6404de
--- /dev/null
+++ b/packages/db/test/fixtures/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id SERIAL PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/test/fixtures/migrations/001.undo.sql b/packages/db/test/fixtures/migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/test/fixtures/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/test/fixtures/missing-required-values.json b/packages/db/test/fixtures/missing-required-values.json
new file mode 100644
index 0000000000..15b1cd4831
--- /dev/null
+++ b/packages/db/test/fixtures/missing-required-values.json
@@ -0,0 +1,9 @@
+{
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "table": "versions",
+ "autoApply": false
+ }
+}
diff --git a/packages/db/test/fixtures/no-server-logger.json b/packages/db/test/fixtures/no-server-logger.json
new file mode 100644
index 0000000000..89836fdf37
--- /dev/null
+++ b/packages/db/test/fixtures/no-server-logger.json
@@ -0,0 +1,14 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions",
+ "autoApply": false
+ }
+}
\ No newline at end of file
diff --git a/packages/db/test/fixtures/placeholder.json b/packages/db/test/fixtures/placeholder.json
new file mode 100644
index 0000000000..265ea7547d
--- /dev/null
+++ b/packages/db/test/fixtures/placeholder.json
@@ -0,0 +1,18 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "plugin":{
+ "path": "./antani.js"
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "authorization": {
+ "adminSecret": "platformatic"
+ }
+}
diff --git a/packages/db/test/fixtures/root-endpoint-plugin.js b/packages/db/test/fixtures/root-endpoint-plugin.js
new file mode 100644
index 0000000000..819297e5c2
--- /dev/null
+++ b/packages/db/test/fixtures/root-endpoint-plugin.js
@@ -0,0 +1,8 @@
+'use strict'
+
+module.exports = async function (app) {
+ console.log('plugin loaded')
+ app.get('/', async function () {
+ return { message: 'Root Plugin' }
+ })
+}
diff --git a/packages/db/test/fixtures/simple.json b/packages/db/test/fixtures/simple.json
new file mode 100644
index 0000000000..ec87b960a6
--- /dev/null
+++ b/packages/db/test/fixtures/simple.json
@@ -0,0 +1,17 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions",
+ "autoApply": false
+ }
+}
diff --git a/packages/db/test/fixtures/sqlite/ignore.json b/packages/db/test/fixtures/sqlite/ignore.json
new file mode 100644
index 0000000000..6a2b722991
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/ignore.json
@@ -0,0 +1,18 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db",
+ "ignore": {
+ "versions": true
+ }
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 3042,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
diff --git a/packages/db/test/fixtures/sqlite/migrations/001.do.sql b/packages/db/test/fixtures/sqlite/migrations/001.do.sql
new file mode 100644
index 0000000000..0c81f656df
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/migrations/001.do.sql
@@ -0,0 +1,4 @@
+CREATE TABLE graphs (
+ id INTEGER PRIMARY KEY,
+ name TEXT
+);
diff --git a/packages/db/test/fixtures/sqlite/migrations/001.undo.sql b/packages/db/test/fixtures/sqlite/migrations/001.undo.sql
new file mode 100644
index 0000000000..df301dc819
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/migrations/001.undo.sql
@@ -0,0 +1 @@
+DROP TABLE graphs;
diff --git a/packages/db/test/fixtures/sqlite/no-table.json b/packages/db/test/fixtures/sqlite/no-table.json
new file mode 100644
index 0000000000..45fcdada06
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/no-table.json
@@ -0,0 +1,15 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 3042,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations"
+ }
+}
diff --git a/packages/db/test/fixtures/sqlite/platformatic.db.json b/packages/db/test/fixtures/sqlite/platformatic.db.json
new file mode 100644
index 0000000000..03f394027f
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/platformatic.db.json
@@ -0,0 +1,16 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions"
+ }
+}
diff --git a/packages/db/test/fixtures/sqlite/seed.js b/packages/db/test/fixtures/sqlite/seed.js
new file mode 100644
index 0000000000..177759c116
--- /dev/null
+++ b/packages/db/test/fixtures/sqlite/seed.js
@@ -0,0 +1,8 @@
+'use strict'
+
+module.exports = async function ({ entities, db, sql }) {
+ await entities.graph.save({ input: { name: 'Hello' } })
+ await db.query(sql`
+ INSERT INTO graphs (name) VALUES ('Hello 2');
+ `)
+}
diff --git a/packages/db/test/fixtures/undici-plugin.js b/packages/db/test/fixtures/undici-plugin.js
new file mode 100644
index 0000000000..50e3a82880
--- /dev/null
+++ b/packages/db/test/fixtures/undici-plugin.js
@@ -0,0 +1,15 @@
+'use strict'
+
+const undici = require('undici')
+
+module.exports = async function (app) {
+ app.get('/request', async function () {
+ try {
+ const res = await undici.request('http://localhost:42')
+ return await res.body.json()
+ } catch (err) {
+ console.log(err)
+ throw err
+ }
+ })
+}
diff --git a/packages/db/test/fixtures/validate-migrations-checksums.json b/packages/db/test/fixtures/validate-migrations-checksums.json
new file mode 100644
index 0000000000..4cbcfbeab9
--- /dev/null
+++ b/packages/db/test/fixtures/validate-migrations-checksums.json
@@ -0,0 +1,17 @@
+{
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 0,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "core": {
+ "connectionString": "postgres://postgres:postgres@127.0.0.1/postgres"
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions",
+ "validateChecksums": true
+ }
+}
diff --git a/packages/db/test/fixtures/watch/platformatic.db.json b/packages/db/test/fixtures/watch/platformatic.db.json
new file mode 100644
index 0000000000..587358051d
--- /dev/null
+++ b/packages/db/test/fixtures/watch/platformatic.db.json
@@ -0,0 +1,16 @@
+{
+ "core": {
+ "connectionString": "sqlite://./db"
+ },
+ "server": {
+ "hostname": "127.0.0.1",
+ "port": 3042,
+ "logger": {
+ "level": "info"
+ }
+ },
+ "migrations": {
+ "dir": "./migrations",
+ "table": "versions"
+ }
+}
diff --git a/packages/db/test/healthcheck.test.js b/packages/db/test/healthcheck.test.js
new file mode 100644
index 0000000000..b1d7ea8f00
--- /dev/null
+++ b/packages/db/test/healthcheck.test.js
@@ -0,0 +1,107 @@
+'use strict'
+
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { buildConfig, connInfo } = require('./helper')
+const { request } = require('undici')
+
+test('healthcheck route enabled with interval', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ healthCheck: {
+ enabled: true,
+ interval: 2000
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ {
+ const res = await (request(`${server.url}/status`))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, { status: 'ok' })
+ }
+
+ {
+ await server.app.platformatic.db.dispose()
+ const res = await (request(`${server.url}/status`))
+ equal(res.statusCode, 503)
+ const body = await res.body.json()
+ same(body, {
+ statusCode: 503,
+ code: 'FST_UNDER_PRESSURE',
+ error: 'Service Unavailable',
+ message: 'Service Unavailable'
+ })
+ }
+})
+
+test('healthcheck route enabled without interval', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ healthCheck: {
+ enabled: true
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ {
+ const res = await (request(`${server.url}/status`))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, { status: 'ok' })
+ }
+
+ {
+ await server.app.platformatic.db.dispose()
+ const res = await (request(`${server.url}/status`))
+ equal(res.statusCode, 503)
+ const body = await res.body.json()
+ same(body, {
+ statusCode: 503,
+ code: 'FST_UNDER_PRESSURE',
+ error: 'Service Unavailable',
+ message: 'Service Unavailable'
+ })
+ }
+})
+
+test('healthcheck route disabled', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ const res = await (request(`${server.url}/status`))
+ equal(res.statusCode, 404)
+})
diff --git a/packages/db/test/helper.js b/packages/db/test/helper.js
new file mode 100644
index 0000000000..59a35e64cb
--- /dev/null
+++ b/packages/db/test/helper.js
@@ -0,0 +1,105 @@
+'use strict'
+
+const why = require('why-is-node-running')
+const { Agent, setGlobalDispatcher } = require('undici')
+
+// This file must be required/imported as the first file
+// in the test suite. It sets up the global environment
+// to track the open handles via why-is-node-running.
+setInterval(() => {
+ why()
+}, 20000).unref()
+
+const agent = new Agent({
+ keepAliveTimeout: 10,
+ keepAliveMaxTimeout: 10,
+ tls: {
+ rejectUnauthorized: false
+ }
+})
+setGlobalDispatcher(agent)
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ await db.query(sql`DROP TABLE IF EXISTS versions;`)
+ await db.query(sql`DROP TABLE IF EXISTS graphs;`)
+ await db.query(sql`DROP TABLE IF EXISTS users;`)
+ await db.query(sql`DROP TABLE IF EXISTS pages;`)
+ await db.query(sql`DROP TABLE IF EXISTS posts;`)
+ await db.query(sql`DROP TABLE IF EXISTS owners;`)
+}
+
+async function createBasicPages (db, sql) {
+ if (module.exports.isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+}
+module.exports.createBasicPages = createBasicPages
+
+async function createAndPopulateUsersTable (db, sql) {
+ await db.query(sql`
+ CREATE TABLE users (
+ "id" int4 NOT NULL PRIMARY KEY,
+ "name" varchar(10),
+ "age" numeric
+ );
+ `)
+ await db.query(sql`
+ INSERT INTO users ("id", "name", "age") VALUES
+ (1, 'Leonardo', 40),
+ (2, 'Matteo', 37);
+ `)
+}
+module.exports.createAndPopulateUsersTable = createAndPopulateUsersTable
+
+async function dropUsersTable (db, sql) {
+ await db.query(sql`DROP TABLE IF EXISTS users;`)
+}
+module.exports.dropUsersTable = dropUsersTable
+
+function buildConfig (options) {
+ const base = {
+ server: {},
+ core: {},
+ dashboard: {}
+ }
+
+ return Object.assign(base, options)
+}
+
+module.exports.buildConfig = buildConfig
diff --git a/packages/db/test/helper.test.js b/packages/db/test/helper.test.js
new file mode 100644
index 0000000000..2e6ac10e07
--- /dev/null
+++ b/packages/db/test/helper.test.js
@@ -0,0 +1,15 @@
+'use strict'
+
+const { test } = require('tap')
+const { isKeyEnabledInConfig } = require('../lib/helper')
+
+test('config key is enabled', ({ equal, plan }) => {
+ plan(6)
+ equal(isKeyEnabledInConfig('foo', { bar: 'baz' }), false) // key is undefined
+ equal(isKeyEnabledInConfig('foo', { foo: false }), false)
+
+ equal(isKeyEnabledInConfig('foo', { foo: 'baz' }), true)
+ equal(isKeyEnabledInConfig('foo', { foo: {} }), true)
+ equal(isKeyEnabledInConfig('foo', { foo: { bar: 'baz' } }), true)
+ equal(isKeyEnabledInConfig('foo', { foo: true }), true)
+})
diff --git a/packages/db/test/load-and-reload-files.test.js b/packages/db/test/load-and-reload-files.test.js
new file mode 100644
index 0000000000..4ea3778291
--- /dev/null
+++ b/packages/db/test/load-and-reload-files.test.js
@@ -0,0 +1,495 @@
+'use strict'
+
+const { buildConfig, connInfo, clear, createBasicPages } = require('./helper')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request, setGlobalDispatcher, getGlobalDispatcher, MockAgent } = require('undici')
+const { join } = require('path')
+const os = require('os')
+const { writeFile } = require('fs/promises')
+
+test('load and reload', async ({ teardown, equal, pass, same }) => {
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ }`
+ )
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ plugin: {
+ path: file,
+ stopTimeout: 1000
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await server.restart()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+})
+
+test('error', async ({ teardown, equal, pass, same }) => {
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => { throw new Error('kaboom') }
+ }
+ })
+ }`)
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ plugin: {
+ path: file,
+ stopTimeout: 500
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: null
+ },
+ errors: [{
+ message: 'kaboom',
+ locations: [{
+ line: 3,
+ column: 11
+ }],
+ path: ['add']
+ }]
+ }, 'add response')
+})
+
+test('update config', async ({ teardown, equal, pass, same }) => {
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+ const core = {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await server.restart({
+ core,
+ plugin: {
+ path: file,
+ stopTimeout: 500
+ },
+ authorization: {}
+ })
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+})
+
+test('mock undici is supported', async ({ teardown, equal, pass, same }) => {
+ const previousAgent = getGlobalDispatcher()
+ teardown(() => setGlobalDispatcher(previousAgent))
+
+ const mockAgent = new MockAgent({
+ keepAliveTimeout: 10,
+ keepAliveMaxTimeout: 10
+ })
+ setGlobalDispatcher(mockAgent)
+
+ const mockPool = mockAgent.get('http://localhost:42')
+
+ // intercept the request
+ mockPool.intercept({
+ path: '/',
+ method: 'GET'
+ }).reply(200, {
+ hello: 'world'
+ })
+
+ const core = {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core,
+ plugin: {
+ path: join(__dirname, 'fixtures', 'undici-plugin.js')
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ const res = await request(`${server.url}/request`, {
+ method: 'GET'
+ })
+ equal(res.statusCode, 200)
+ same(await res.body.json(), {
+ hello: 'world'
+ })
+})
+
+test('load and reload with the fallback', async ({ teardown, equal, pass, same }) => {
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.js`)
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ }`
+ )
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ plugin: {
+ path: file,
+ stopTimeout: 1000,
+ fallback: true
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ await writeFile(file, `
+ module.exports = async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await server.restart()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+})
+
+test('load and reload ESM', async ({ teardown, equal, pass, same }) => {
+ const file = join(os.tmpdir(), `some-plugin-${process.pid}.mjs`)
+
+ await writeFile(file, `
+ export default async function (app) {
+ }`
+ )
+
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ plugin: {
+ path: file,
+ stopTimeout: 1000
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 400, 'add status code')
+ same(await res.body.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "add" on type "Query".',
+ locations: [{
+ line: 3,
+ column: 13
+ }]
+ }]
+ }, 'add response')
+ }
+
+ await writeFile(file, `
+ export default async function (app) {
+ app.graphql.extendSchema(\`
+ extend type Query {
+ add(x: Int, y: Int): Int
+ }
+ \`)
+ app.graphql.defineResolvers({
+ Query: {
+ add: async (_, { x, y }) => x + y
+ }
+ })
+ }`)
+
+ await server.restart()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ add(x: 2, y: 2)
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'add status code')
+ same(await res.body.json(), {
+ data: {
+ add: 4
+ }
+ }, 'add response')
+ }
+})
diff --git a/packages/db/test/metrics.test.js b/packages/db/test/metrics.test.js
new file mode 100644
index 0000000000..d6dadaec5c
--- /dev/null
+++ b/packages/db/test/metrics.test.js
@@ -0,0 +1,178 @@
+'use strict'
+
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { buildConfig, connInfo } = require('./helper')
+const { request } = require('undici')
+
+test('has /metrics endpoint on default prometheus port', async ({ teardown, equal, fail, match }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ metrics: true,
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request('http://127.0.0.1:9090/metrics'))
+ equal(res.statusCode, 200)
+ match(res.headers['content-type'], /^text\/plain/)
+ const body = await res.body.text()
+ try {
+ testPrometheusOutput(body)
+ } catch (err) {
+ fail()
+ }
+})
+
+test('has /metrics endpoint on configured port', async ({ teardown, equal, fail, match }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ metrics: {
+ port: 9999
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ const res = await (request('http://127.0.0.1:9999/metrics'))
+ equal(res.statusCode, 200)
+ match(res.headers['content-type'], /^text\/plain/)
+ const body = await res.body.text()
+ try {
+ testPrometheusOutput(body)
+ } catch (err) {
+ fail()
+ }
+})
+
+test('support basic auth', async ({ teardown, equal, fail, match }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ metrics: {
+ auth: {
+ username: 'foo',
+ password: 'bar'
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ {
+ const res = await (request('http://127.0.0.1:9090/metrics'))
+ equal(res.statusCode, 401)
+ match(res.headers['content-type'], /^application\/json/)
+ }
+
+ {
+ // wrong credentials
+ const res = await (request('http://127.0.0.1:9090/metrics', {
+ headers: {
+ authorization: `Basic ${Buffer.from('bar:foo').toString('base64')}`
+ }
+ }))
+ equal(res.statusCode, 401)
+ match(res.headers['content-type'], /^application\/json/)
+ }
+
+ {
+ const res = await (request('http://127.0.0.1:9090/metrics', {
+ headers: {
+ authorization: `Basic ${Buffer.from('foo:bar').toString('base64')}`
+ }
+ }))
+ equal(res.statusCode, 200)
+ match(res.headers['content-type'], /^text\/plain/)
+ const body = await res.body.text()
+ try {
+ testPrometheusOutput(body)
+ } catch (err) {
+ fail()
+ }
+ }
+})
+
+function testPrometheusOutput (output) {
+ let metricBlock = []
+ const lines = output.split('\n')
+ for (let i = 0; i < lines.length; i++) {
+ const line = lines[i]
+ if (line === '') {
+ // check this metric set
+ checkMetricBlock(metricBlock)
+ metricBlock = []
+ } else {
+ metricBlock.push(line)
+ }
+ }
+}
+
+function checkMetricBlock (metricBlock) {
+ if (!metricBlock[0].match(/^# HELP/)) {
+ throw new Error('First line should be HELP')
+ }
+
+ if (!metricBlock[1].match(/^# TYPE/)) {
+ throw new Error('Second line should be TYPE')
+ }
+ for (let i = 2; i < metricBlock.length; i++) {
+ const split = metricBlock[i].split(' ')
+ if (split.length !== 2) {
+ throw new Error(`Bad format for metric: ${metricBlock[i]}`)
+ }
+ }
+ return true
+}
+
+test('do not error on restart', async ({ teardown, equal, fail, match }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ metrics: true,
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ await server.restart()
+
+ const res = await (request('http://127.0.0.1:9090/metrics'))
+ equal(res.statusCode, 200)
+ match(res.headers['content-type'], /^text\/plain/)
+ const body = await res.body.text()
+ try {
+ testPrometheusOutput(body)
+ } catch (err) {
+ fail()
+ }
+})
diff --git a/packages/db/test/migrate/helper.mjs b/packages/db/test/migrate/helper.mjs
new file mode 100644
index 0000000000..f2f0e615c4
--- /dev/null
+++ b/packages/db/test/migrate/helper.mjs
@@ -0,0 +1,71 @@
+import { join } from 'desm'
+import createConnectionPool from '@databases/pg'
+import { cleanSQLite } from '../cli/helper.mjs'
+
+const cliPath = join(import.meta.url, '..', '..', 'lib', 'migrate.mjs')
+
+async function connectAndResetDB () {
+ // TODO support other databases
+ const db = await createConnectionPool({
+ connectionString: 'postgres://postgres:postgres@127.0.0.1/postgres',
+ bigIntMode: 'string',
+ max: 1
+ })
+
+ try {
+ await db.query(db.sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE graphs`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE versions`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(db.sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ return db
+}
+
+function removeFileProtocol (str) {
+ return str.replace('file:', '')
+}
+
+function getFixturesConfigFileLocation (filename, subdirectories = []) {
+ return removeFileProtocol(join(import.meta.url, '..', '..', 'fixtures', ...subdirectories, filename))
+}
+
+export {
+ cliPath,
+ cleanSQLite,
+ connectAndResetDB,
+ getFixturesConfigFileLocation
+}
diff --git a/packages/db/test/migrate/migrate.test.mjs b/packages/db/test/migrate/migrate.test.mjs
new file mode 100644
index 0000000000..af23f9dca9
--- /dev/null
+++ b/packages/db/test/migrate/migrate.test.mjs
@@ -0,0 +1,39 @@
+import { test } from 'tap'
+import { execa } from 'execa'
+import { cliPath, connectAndResetDB, getFixturesConfigFileLocation } from './helper.mjs'
+import stripAnsi from 'strip-ansi'
+
+test('migrate up', async ({ equal, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ const { stdout } = await execa('node', [cliPath, '-c', getFixturesConfigFileLocation('simple.json')])
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.do.sql')
+})
+
+test('migrate up & down', async ({ rejects, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ {
+ const { stdout } = await execa('node', [cliPath, '-c', getFixturesConfigFileLocation('simple.json')])
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.do.sql')
+ }
+
+ {
+ const { stdout } = await execa('node', [cliPath, '-c', getFixturesConfigFileLocation('simple.json'), '-t', '000'])
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.undo.sql')
+ }
+})
+
+test('ignore versions', async ({ equal, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ const { stdout } = await execa('node', [cliPath, '-c', getFixturesConfigFileLocation('simple.json')])
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.do.sql')
+})
diff --git a/packages/db/test/migrate/sqlite3.test.mjs b/packages/db/test/migrate/sqlite3.test.mjs
new file mode 100644
index 0000000000..56fa96da98
--- /dev/null
+++ b/packages/db/test/migrate/sqlite3.test.mjs
@@ -0,0 +1,121 @@
+import { test } from 'tap'
+import { fileURLToPath } from 'url'
+import path from 'path'
+import pino from 'pino'
+import split from 'split2'
+import sqlite from '@databases/sqlite'
+import { execute } from '../../lib/migrator.mjs'
+import ConfigManager from '../../lib/config.js'
+import { cleanSQLite } from './helper.mjs'
+
+function urlDirname (url) {
+ return path.dirname(fileURLToPath(url))
+}
+
+function join (url, ...str) {
+ return path.join(urlDirname(url), ...str)
+}
+
+const dbLocation = join(import.meta.url, '..', '..', 'fixtures', 'sqlite', 'db')
+const configFileLocation = join(import.meta.url, '..', '..', 'fixtures', 'sqlite', 'platformatic.db.json')
+
+test('migrate and start', async ({ comment, equal, match, teardown }) => {
+ const cm = new ConfigManager({
+ source: configFileLocation
+ })
+ await cm.parse()
+ const config = cm.current
+ await cleanSQLite(dbLocation)
+
+ const lines = []
+ const logger = pino(split((line) => {
+ lines.push(JSON.parse(line))
+ }))
+
+ await execute(logger, {}, config)
+
+ equal(lines.length, 1)
+ match(lines[0].msg, /001\.do\.sql/)
+})
+
+test('migrate twice', async ({ comment, equal, match, teardown }) => {
+ const cm = new ConfigManager({
+ source: configFileLocation
+ })
+ await cm.parse()
+ const config = cm.current
+
+ await cleanSQLite(dbLocation)
+
+ let lines = []
+ const logger = pino(split((line) => {
+ lines.push(JSON.parse(line))
+ }))
+
+ await execute(logger, { }, config)
+
+ equal(lines.length, 1)
+ match(lines[0].msg, /001\.do\.sql/)
+
+ lines = []
+
+ await execute(logger, { config: configFileLocation }, config)
+
+ equal(lines.length, 0)
+})
+
+test('apply defaults', async ({ comment, equal, match, teardown }) => {
+ const configFileLocation = join(import.meta.url, '..', 'fixtures', 'sqlite', 'no-table.json')
+ const dbLocation = join(import.meta.url, '..', 'fixtures', 'sqlite', 'db')
+ const cm = new ConfigManager({
+ source: configFileLocation
+ })
+ await cm.parse()
+ const config = cm.current
+ await cleanSQLite(dbLocation)
+
+ const lines = []
+ const logger = pino(split((line) => {
+ lines.push(JSON.parse(line))
+ }))
+
+ await execute(logger, {}, config)
+
+ equal(lines.length, 1)
+ match(lines[0].msg, /001\.do\.sql/)
+
+ const db = sqlite(dbLocation)
+
+ const tables = await db.query(sqlite.sql`
+ SELECT name FROM sqlite_master
+ WHERE type='table'
+ `)
+
+ match(tables, [{
+ name: 'versions'
+ }, {
+ name: 'graphs'
+ }])
+
+ await db.dispose()
+})
+
+test('Version table is ignored but migrations fail to run', async (t) => {
+ const dbLocation = join(import.meta.url, '..', 'fixtures', 'sqlite', 'db')
+ const configFileLocation = join(import.meta.url, '..', 'fixtures', 'sqlite', 'ignore.json')
+
+ await cleanSQLite(dbLocation)
+
+ const cm = new ConfigManager({
+ source: configFileLocation
+ })
+ await cm.parse()
+ const config = cm.current
+
+ const lines = []
+ const logger = pino(split((line) => {
+ lines.push(JSON.parse(line))
+ }))
+
+ await execute(logger, {}, config)
+})
diff --git a/packages/db/test/migrate/validations.test.mjs b/packages/db/test/migrate/validations.test.mjs
new file mode 100644
index 0000000000..bde3d4da03
--- /dev/null
+++ b/packages/db/test/migrate/validations.test.mjs
@@ -0,0 +1,30 @@
+import { test } from 'tap'
+import { execa } from 'execa'
+import { cliPath, connectAndResetDB, getFixturesConfigFileLocation } from './helper.mjs'
+import stripAnsi from 'strip-ansi'
+
+test('missing config', async (t) => {
+ await t.rejects(execa('node', [cliPath]))
+})
+
+test('missing connectionString', async (t) => {
+ await t.rejects(execa('node', [cliPath, '-c', getFixturesConfigFileLocation('no-connectionString.json')]))
+})
+
+test('missing migrations', async (t) => {
+ await t.rejects(execa('node', [cliPath, '-c', getFixturesConfigFileLocation('no-migrations.json')]))
+})
+
+test('missing migrations.dir', async (t) => {
+ await t.rejects(execa('node', [cliPath, '-c', getFixturesConfigFileLocation('no-migrations-dir.json')]))
+})
+
+test('not applied migrations', async ({ equal, same, match, teardown }) => {
+ const db = await connectAndResetDB()
+ teardown(() => db.dispose())
+
+ const { stdout } = await execa('node', [cliPath, '-c', getFixturesConfigFileLocation('bad-migrations.json')])
+ const sanitized = stripAnsi(stdout)
+ match(sanitized, '001.do.sql')
+ match(sanitized, 'error: syntax error at end of input')
+})
diff --git a/packages/db/test/routes.test.js b/packages/db/test/routes.test.js
new file mode 100644
index 0000000000..657dd2977f
--- /dev/null
+++ b/packages/db/test/routes.test.js
@@ -0,0 +1,110 @@
+'use strict'
+
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { buildConfig, connInfo } = require('./helper')
+const { request } = require('undici')
+const { join } = require('path')
+
+test('should respond 200 on root endpoint', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ healthCheck: {
+ enabled: true,
+ interval: 2000
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ dashboard: {
+ enabled: false
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ {
+ // No browser (i.e. curl)
+ const res = await (request(`${server.url}/`))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, { message: 'Welcome to Platformatic! Please visit https://oss.platformatic.dev' })
+ }
+
+ {
+ // browser
+ const res = await (request(`${server.url}/`, {
+ headers: {
+ 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36'
+ }
+ }))
+ equal(res.statusCode, 200)
+ equal(res.headers['content-type'], 'text/html; charset=UTF-8')
+ }
+})
+
+test('should not overwrite a plugin which define a root endpoint', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ healthCheck: {
+ enabled: true,
+ interval: 2000
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ dashboard: {
+ enabled: false
+ },
+ plugin: {
+ path: join(__dirname, 'fixtures', 'root-endpoint-plugin.js')
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ const res = await (request(`${server.url}/`))
+ equal(res.statusCode, 200)
+ const body = await res.body.json()
+ same(body, { message: 'Root Plugin' })
+})
+
+test('should not overwrite dashboard endpoint', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0,
+ healthCheck: {
+ enabled: true,
+ interval: 2000
+ }
+ },
+ core: {
+ ...connInfo
+ },
+ authorization: {
+ adminSecret: 'secret'
+ },
+ dashboard: {
+ enabled: true
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ const res = await (request(`${server.url}/`))
+ equal(res.statusCode, 302)
+ equal(res.headers.location, '/dashboard')
+})
diff --git a/packages/db/test/start-and-stop.test.js b/packages/db/test/start-and-stop.test.js
new file mode 100644
index 0000000000..b3f076c2c9
--- /dev/null
+++ b/packages/db/test/start-and-stop.test.js
@@ -0,0 +1,369 @@
+'use strict'
+
+const { connInfo, clear, createBasicPages, createAndPopulateUsersTable, dropUsersTable, buildConfig } = require('./helper')
+const whyIsNodeRuninng = require('why-is-node-running')
+const { test } = require('tap')
+const { buildServer } = require('..')
+const { request } = require('undici')
+const { rm } = require('fs/promises')
+const path = require('path')
+
+test('starts the dashboard', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ },
+ dashboard: {
+ enabled: true,
+ rootPath: true
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ {
+ const res = await (request(`${server.url}/dashboard`))
+ equal(res.statusCode, 200, 'dashboard status code')
+ }
+})
+
+test('should not restart if not authorized', async ({ teardown, equal, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+ const res = await (request(`${server.url}/_admin/restart`, {
+ method: 'POST'
+ }))
+ equal(res.statusCode, 400)
+ same(await res.body.json(), {
+ statusCode: 400,
+ error: 'Bad Request',
+ message: 'headers must have required property \'x-platformatic-admin-secret\''
+ })
+})
+
+test('restarts the server', async ({ teardown, equal, pass, same, match }) => {
+ let dbHandler, sqlHandler
+ let started = false
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ if (!started) {
+ await dropUsersTable(db, sql)
+ }
+ started = true
+ dbHandler = db
+ sqlHandler = sql
+ }
+ },
+ authorization: {
+ adminSecret: 'secret',
+ rules: [{
+ role: 'platformatic-admin',
+ entity: 'user',
+ find: true
+ }]
+ }
+ }))
+ teardown(server.stop)
+
+ await server.listen()
+
+ {
+ // query users and get an error
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ query {
+ users {
+ name
+ age
+ }
+ }
+ `
+ })
+ })
+ const body = await res.body.json()
+ equal(res.statusCode, 400)
+ equal(body.errors.length, 1)
+ match(body.errors[0].message, 'Cannot query field "users" on type "Query".')
+ }
+
+ // Create users table
+ await createAndPopulateUsersTable(dbHandler, sqlHandler)
+
+ {
+ const res = await (request(`${server.url}/_admin/restart`, {
+ method: 'POST',
+ headers: {
+ 'x-platformatic-admin-secret': 'secret'
+ }
+ }))
+ equal(res.statusCode, 200, 'restart status code')
+ same(await res.body.json(), {
+ success: true
+ })
+ }
+
+ {
+ // query users and get data
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'X-PLATFORMATIC-ADMIN-SECRET': 'secret'
+ },
+ body: JSON.stringify({
+ query: `
+ query {
+ users {
+ name
+ age
+ }
+ }
+ `
+ })
+ })
+ const body = await res.body.json()
+ equal(res.statusCode, 200)
+ same(body, {
+ data: {
+ users: [{
+ name: 'Leonardo',
+ age: 40
+ }, {
+ name: 'Matteo',
+ age: 37
+ }]
+ }
+ })
+ }
+})
+
+test('starts, query and stop', async ({ teardown, equal, pass, same }) => {
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(await res.body.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await request(`${server.url}/graphql`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(await res.body.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+})
+
+test('inject', async ({ teardown, equal, pass, same }) => {
+ const { inject, stop } = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ }
+ }))
+ teardown(stop)
+
+ {
+ const res = await inject({
+ url: '/graphql',
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ })
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+})
+
+test('ignore and sqlite3', async ({ teardown, equal, pass, same }) => {
+ const dbLocation = path.posix.join(__dirname, '..', 'fixtures', 'sqlite', 'db')
+ const migrations = path.posix.join(__dirname, '..', 'fixtures', 'sqlite', 'migrations')
+ try {
+ await rm(dbLocation)
+ } catch {
+ // ignore
+ }
+ const server = await buildServer(buildConfig({
+ server: {
+ hostname: '127.0.0.1',
+ port: 0
+ },
+ core: {
+ connectionString: `sqlite://${dbLocation}`
+ },
+ dashboard: {
+ enabled: true,
+ rootPath: true
+ },
+ migrations: {
+ dir: migrations
+ }
+ }))
+ teardown(server.stop)
+ await server.listen()
+ {
+ const res = await (request(`${server.url}/dashboard`))
+ equal(res.statusCode, 200, 'dashboard status code')
+ }
+})
+
+setInterval(() => {
+ whyIsNodeRuninng()
+}, 5000).unref()
diff --git a/packages/db/test/utils.test.js b/packages/db/test/utils.test.js
new file mode 100644
index 0000000000..0127c709e6
--- /dev/null
+++ b/packages/db/test/utils.test.js
@@ -0,0 +1,50 @@
+'use strict'
+
+const { test } = require('tap')
+const { computeSQLiteIgnores } = require('../lib/utils')
+const os = require('os')
+const isWindows = os.platform() === 'win32'
+
+test('compute SQLite ignores (Unix)', { skip: isWindows }, ({ same, equal, plan }) => {
+ plan(3)
+ {
+ const dirOfConfig = '/config'
+ const sqliteFullPath = '/aboslute/path/to/db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, [])
+ }
+ {
+ const dirOfConfig = '/config'
+ const sqliteFullPath = '/config/db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, ['db.sqlite', 'db.sqlite-journal'])
+ }
+ {
+ const dirOfConfig = '/config'
+ const sqliteFullPath = '/config/subdir/db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, ['subdir/db.sqlite', 'subdir/db.sqlite-journal'])
+ }
+})
+
+test('compute SQLite ignores (Windows)', { skip: !isWindows }, ({ same, equal, plan }) => {
+ plan(3)
+ {
+ const dirOfConfig = 'C:\\Users\\matteo\\platformatic'
+ const sqliteFullPath = 'C:\\aboslute\\path\\to\\db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, [])
+ }
+ {
+ const dirOfConfig = 'C:\\Users\\matteo\\platformatic'
+ const sqliteFullPath = 'C:\\Users\\matteo\\platformatic\\db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, ['db.sqlite', 'db.sqlite-journal'])
+ }
+ {
+ const dirOfConfig = 'C:\\Users\\matteo\\platformatic'
+ const sqliteFullPath = 'C:\\Users\\matteo\\platformatic\\subdir\\db.sqlite'
+ const result = computeSQLiteIgnores(sqliteFullPath, dirOfConfig)
+ same(result, ['subdir\\db.sqlite', 'subdir\\db.sqlite-journal'])
+ }
+})
diff --git a/packages/sql-graphql/.taprc b/packages/sql-graphql/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/sql-graphql/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/sql-graphql/LICENSE b/packages/sql-graphql/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/sql-graphql/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-graphql/NOTICE b/packages/sql-graphql/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/sql-graphql/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-graphql/README.md b/packages/sql-graphql/README.md
new file mode 100644
index 0000000000..6b9b2fcada
--- /dev/null
+++ b/packages/sql-graphql/README.md
@@ -0,0 +1,13 @@
+# @platformatic/sql-graphql
+
+Check out the full documentation on [our website](https://oss.platformatic.dev/docs/reference/sql-graphql/queries).
+
+## Install
+
+```sh
+npm install @platformatic/sql-graphql
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/sql-graphql/index.d.ts b/packages/sql-graphql/index.d.ts
new file mode 100644
index 0000000000..d00a200522
--- /dev/null
+++ b/packages/sql-graphql/index.d.ts
@@ -0,0 +1,40 @@
+import { FastifyPluginAsync } from 'fastify'
+import { GraphQLScalarType } from 'graphql'
+import { IResolverObject, IResolverOptions, MercuriusContext } from 'mercurius'
+
+type IEnumResolver = {
+ [key: string]: string | number | boolean
+}
+
+export interface IResolvers {
+ [key: string]:
+ | (() => any)
+ | IResolverObject
+ | IResolverOptions
+ | GraphQLScalarType
+ | IEnumResolver
+ | undefined
+}
+
+export interface SQLGraphQLPluginOptions {
+ /**
+ * If true, serves GraphiQL on /graphiql.
+ */
+ graphiql?: boolean,
+ autoTimestamp?: boolean,
+ /**
+ * Parameter that enables federation metadata support.
+ */
+ federationMetadata?: boolean,
+ /**
+ * Object with graphql resolver functions.
+ */
+ resolvers?: IResolvers,
+ /*
+ * The graphql schema.
+ */
+ schema?: string,
+}
+
+declare const plugin: FastifyPluginAsync
+export default plugin
diff --git a/packages/sql-graphql/index.js b/packages/sql-graphql/index.js
new file mode 100644
index 0000000000..100d2280d0
--- /dev/null
+++ b/packages/sql-graphql/index.js
@@ -0,0 +1,117 @@
+'use strict'
+
+const fp = require('fastify-plugin')
+const constructGraph = require('./lib/entity-to-type')
+const mercurius = require('mercurius')
+const graphql = require('graphql')
+const establishRelations = require('./lib/relationship')
+const scalars = require('graphql-scalars')
+
+async function mapperToGraphql (app, opts) {
+ const mapper = app.platformatic
+ const autoTimestamp = opts.autoTimestamp
+ const queryTopFields = {}
+ const mutationTopFields = {}
+ const resolvers = {}
+ const loaders = {}
+ const federationReplacements = []
+ const relations = []
+
+ const graphOpts = {
+ queryTopFields,
+ mutationTopFields,
+ resolvers,
+ loaders,
+ federationReplacements,
+ federationMetadata: opts.federationMetadata,
+ autoTimestamp
+ }
+
+ const metaMap = new Map()
+ for (const entity of Object.values(mapper.entities)) {
+ relations.push(...entity.relations)
+ const meta = constructGraph(app, entity, graphOpts)
+ metaMap.set(entity, meta)
+ }
+
+ establishRelations(app, relations, resolvers, loaders, queryTopFields, opts.resolvers || {}, metaMap)
+
+ if (opts.resolvers) {
+ for (const key of Object.keys(opts.resolvers)) {
+ if (!resolvers[key]) {
+ resolvers[key] = {}
+ }
+
+ const type = opts.resolvers[key]
+ for (const resolver of Object.keys(type)) {
+ if (type[resolver] === false) {
+ if (resolvers[key][resolver]) {
+ delete resolvers[key][resolver]
+ }
+ if (loaders[key]) {
+ delete loaders[key][resolver]
+ }
+ /* istanbul ignore else */
+ if (key === 'Mutation') {
+ delete mutationTopFields[resolver]
+ } else if (key === 'Query') {
+ delete queryTopFields[resolver]
+ }
+ } else {
+ resolvers[key][resolver] = type[resolver]
+ }
+ }
+ }
+ }
+
+ const query = new graphql.GraphQLObjectType({
+ name: 'Query',
+ fields: queryTopFields
+ })
+
+ const mutation = Object.keys(mutationTopFields).length > 0
+ ? new graphql.GraphQLObjectType({
+ name: 'Mutation',
+ fields: mutationTopFields
+ })
+ : null
+
+ if (!mutation) {
+ delete resolvers.Mutation
+ }
+
+ let sdl = graphql.printSchema(new graphql.GraphQLSchema({ query, mutation }))
+
+ if (opts.federationMetadata) {
+ for (const replacement of federationReplacements) {
+ sdl = sdl.replace(replacement.find, replacement.replace)
+ }
+ sdl = sdl.replace('type Query', 'type Query @extends')
+ }
+
+ if (opts.schema) {
+ sdl += '\n'
+ sdl += opts.schema
+ }
+
+ // Ignoriring because SQLite doesn't support dates
+ /* istanbul ignore next */
+ if (sdl.match(/scalar Date\n/)) {
+ resolvers.Date = scalars.GraphQLDate
+ }
+ if (sdl.indexOf('scalar DateTime') >= 0) {
+ resolvers.DateTime = scalars.GraphQLDateTime
+ }
+
+ opts.graphiql = opts.graphiql !== false
+ await app.register(mercurius, {
+ ...opts,
+ schema: sdl,
+ loaders,
+ resolvers
+ })
+
+ app.log.debug({ schema: sdl }, 'computed schema')
+}
+
+module.exports = fp(mapperToGraphql)
diff --git a/packages/sql-graphql/lib/entity-to-type.js b/packages/sql-graphql/lib/entity-to-type.js
new file mode 100644
index 0000000000..206d10fe57
--- /dev/null
+++ b/packages/sql-graphql/lib/entity-to-type.js
@@ -0,0 +1,248 @@
+'use strict'
+
+const graphql = require('graphql')
+const camelcase = require('camelcase')
+const {
+ sqlTypeToGraphQL,
+ fromSelectionSet
+} = require('./utils')
+
+const ascDesc = new graphql.GraphQLEnumType({
+ name: 'OrderByDirection',
+ values: {
+ ASC: { value: 'ASC' },
+ DESC: { value: 'DESC' }
+ }
+})
+
+function constructGraph (app, entity, opts) {
+ const primaryKey = entity.primaryKey
+
+ const entityName = entity.name
+ const singular = entity.singularName
+ const plural = entity.pluralName
+
+ const {
+ queryTopFields,
+ mutationTopFields,
+ resolvers,
+ federationReplacements,
+ federationMetadata,
+ loaders
+ } = opts
+
+ const fields = {}
+
+ for (const key of Object.keys(entity.fields)) {
+ const field = entity.fields[key]
+ const meta = { field }
+ meta.type = sqlTypeToGraphQL(field.sqlType)
+ if (field.primaryKey) {
+ meta.primaryKeyType = field.type
+ meta.type = graphql.GraphQLID
+ } else if (field.foreignKey) {
+ meta.type = graphql.GraphQLID
+ }
+ fields[field.camelcase] = meta
+ }
+
+ const type = new graphql.GraphQLObjectType({
+ name: entityName,
+ fields
+ })
+
+ resolvers.Query = resolvers.Query || {}
+ resolvers.Mutation = resolvers.Mutation || {}
+ loaders.Query = loaders.Query || {}
+
+ const getBy = camelcase(['get', singular, 'by', primaryKey])
+
+ const whereArgType = new graphql.GraphQLInputObjectType({
+ name: `${entityName}WhereArguments`,
+ fields: Object.keys(fields).reduce((acc, field) => {
+ acc[field] = {
+ type: new graphql.GraphQLInputObjectType({
+ name: `${entityName}WhereArguments${field}`,
+ fields: {
+ eq: { type: fields[field].type },
+ neq: { type: fields[field].type },
+ gt: { type: fields[field].type },
+ gte: { type: fields[field].type },
+ lt: { type: fields[field].type },
+ lte: { type: fields[field].type },
+ in: { type: new graphql.GraphQLList(fields[field].type) },
+ nin: { type: new graphql.GraphQLList(fields[field].type) }
+ }
+ })
+ }
+ return acc
+ }, {})
+ })
+
+ queryTopFields[getBy] = {
+ type,
+ args: {
+ [primaryKey]: { type: new graphql.GraphQLNonNull(fields[primaryKey].type) }
+ }
+ }
+ loaders.Query[getBy] = {
+ loader (queries, ctx) {
+ const keys = []
+ for (const query of queries) {
+ keys.push(query.params[primaryKey])
+ }
+ return loadMany(keys, queries, ctx)
+ },
+ opts: {
+ cache: false
+ }
+ }
+
+ const orderByFields = new graphql.GraphQLEnumType({
+ name: `${entityName}OrderByField`,
+ values: Object.keys(fields).reduce((acc, field) => {
+ acc[field] = {
+ value: field
+ }
+ return acc
+ }, {})
+ })
+ queryTopFields[plural] = {
+ type: new graphql.GraphQLList(type),
+ args: {
+ limit: { type: graphql.GraphQLInt },
+ offset: { type: graphql.GraphQLInt },
+ orderBy: {
+ type: new graphql.GraphQLList(new graphql.GraphQLInputObjectType({
+ name: `${entityName}OrderByArguments`,
+ fields: {
+ field: { type: orderByFields },
+ direction: { type: new graphql.GraphQLNonNull(ascDesc) }
+ }
+ }))
+ },
+ where: { type: whereArgType }
+ }
+ }
+
+ resolvers.Query[plural] = (_, query, ctx, info) => {
+ const requestedFields = info.fieldNodes[0].selectionSet.selections.map((s) => s.name.value)
+ requestedFields.push(primaryKey)
+ return entity.find({ ...query, fields: requestedFields, ctx })
+ }
+
+ const inputType = new graphql.GraphQLInputObjectType({
+ name: `${entityName}Input`,
+ fields: Object.keys(fields).reduce((acc, field) => {
+ const meta = fields[field]
+ const actualField = meta.field
+ if (!actualField.autoTimestamp) {
+ acc[field] = meta
+ }
+ return acc
+ }, {})
+ })
+
+ const save = camelcase(['save', singular])
+
+ mutationTopFields[save] = {
+ type,
+ args: {
+ input: { type: new graphql.GraphQLNonNull(inputType) }
+ }
+ }
+
+ resolvers.Mutation[save] = async (_, { input }, ctx, info) => {
+ const fields = fromSelectionSet(info.fieldNodes[0].selectionSet)
+ return entity.save({ input, ctx, fields: [...fields] })
+ }
+
+ const insert = camelcase(['insert', plural])
+
+ mutationTopFields[insert] = {
+ type: new graphql.GraphQLList(type),
+ args: {
+ inputs: { type: new graphql.GraphQLNonNull(new graphql.GraphQLList(inputType)) }
+ }
+ }
+
+ resolvers.Mutation[insert] = (_, { inputs }, ctx, info) => {
+ const fields = fromSelectionSet(info.fieldNodes[0].selectionSet)
+ return entity.insert({ inputs, ctx, fields: [...fields] })
+ }
+
+ const deleteKey = camelcase(['delete', plural])
+ mutationTopFields[deleteKey] = {
+ type: new graphql.GraphQLList(type),
+ args: {
+ where: { type: whereArgType }
+ }
+ }
+
+ resolvers.Mutation[deleteKey] = (_, args, ctx, info) => {
+ const fields = info.fieldNodes[0].selectionSet.selections.map((s) => s.name.value)
+ return entity.delete({ ...args, fields, ctx })
+ }
+
+ federationReplacements.push({
+ find: new RegExp(`type ${entityName}`),
+ replace: `type ${entityName} @key(fields: "${primaryKey}")`
+ })
+
+ if (federationMetadata) {
+ loaders[entityName] = loaders[entityName] || {}
+ loaders[entityName].__resolveReference = {
+ loader (queries, ctx) {
+ const keys = queries.map(({ obj }) => obj[primaryKey])
+ return loadMany(keys, queries, ctx)
+ },
+ opts: {
+ cache: false
+ }
+ }
+ }
+
+ return {
+ type,
+ entity,
+ loadMany,
+ fields
+ }
+
+ async function loadMany (keys, queries, ctx) {
+ const fields = getFields(queries)
+ const res = await entity.find({
+ where: {
+ [primaryKey]: {
+ in: keys
+ }
+ },
+ fields,
+ ctx
+ })
+
+ const map = {}
+
+ for (const row of res) {
+ map[row[primaryKey]] = row
+ }
+
+ const output = []
+ for (const key of keys) {
+ output.push(map[key])
+ }
+
+ return output
+ }
+
+ function getFields (queries) {
+ const fields = new Set()
+ fields.add(primaryKey)
+ for (const query of queries) {
+ fromSelectionSet(query.info.fieldNodes[0].selectionSet, fields)
+ }
+ return [...fields]
+ }
+}
+
+module.exports = constructGraph
diff --git a/packages/sql-graphql/lib/relationship.js b/packages/sql-graphql/lib/relationship.js
new file mode 100644
index 0000000000..64690d7cd5
--- /dev/null
+++ b/packages/sql-graphql/lib/relationship.js
@@ -0,0 +1,67 @@
+'use strict'
+
+const camelcase = require('camelcase')
+const {
+ fromSelectionSet
+} = require('./utils')
+const assert = require('assert')
+
+module.exports = function establishRelations (app, relations, resolvers, loaders, queryTopFields, relationships, metaMap) {
+ const tablesTypeMap = {}
+ const entities = app.platformatic.entities
+ for (const key of Object.keys(entities)) {
+ const entity = entities[key]
+ tablesTypeMap[entity.table] = metaMap.get(entity)
+ }
+ for (const relation of relations) {
+ assert(relation.table_name, 'table_name is required')
+ assert(relation.foreign_table_name, 'foreign_table_name is required')
+
+ const current = tablesTypeMap[relation.table_name]
+ const foreign = tablesTypeMap[relation.foreign_table_name]
+ assert(foreign !== undefined, `No foreign table named "${relation.foreign_table_name}" was found`)
+
+ // current to foreign
+ {
+ const lowered = lowerCaseFirst(foreign.type)
+ if (!relationships[current.type] || relationships[current.type][lowered] !== false) {
+ current.fields[lowered] = { type: foreign.type }
+ const originalField = camelcase(relation.column_name)
+ delete current.fields[originalField]
+ loaders[current.type] = resolvers[current.type] || {}
+ loaders[current.type][lowered] = {
+ loader (queries, ctx) {
+ const keys = queries.map(({ obj }) => {
+ return obj[originalField]
+ })
+ return foreign.loadMany(keys, queries, ctx)
+ },
+ opts: {
+ cache: false
+ }
+ }
+ }
+ }
+
+ // foreign to current
+ {
+ const lowered = lowerCaseFirst(camelcase(current.entity.table))
+ if (!relationships[foreign.type] || relationships[foreign.type][lowered] !== false) {
+ foreign.fields[lowered] = queryTopFields[lowered]
+ resolvers[foreign.type] = resolvers[foreign.type] || {}
+ resolvers[foreign.type][lowered] = async function (obj, args, ctx, info) {
+ const fields = fromSelectionSet(info.fieldNodes[0].selectionSet, new Set())
+ const toSearch = { ...args, fields: [...fields], ctx }
+ toSearch.where = toSearch.where || {}
+ toSearch.where[camelcase(relation.column_name)] = { eq: obj.id }
+ return current.entity.find(toSearch)
+ }
+ }
+ }
+ }
+}
+
+function lowerCaseFirst (str) {
+ str = str.toString()
+ return str.charAt(0).toLowerCase() + str.slice(1)
+}
diff --git a/packages/sql-graphql/lib/utils.js b/packages/sql-graphql/lib/utils.js
new file mode 100644
index 0000000000..f14c8d079d
--- /dev/null
+++ b/packages/sql-graphql/lib/utils.js
@@ -0,0 +1,77 @@
+'use strict'
+
+const graphql = require('graphql')
+const scalars = require('graphql-scalars')
+
+// The sqlTypeToGraphQL is shared between
+// all database adapters.
+function sqlTypeToGraphQL (sqlType) {
+ // TODO support more types
+ /* istanbul ignore next */
+ switch (sqlType) {
+ case 'int':
+ return graphql.GraphQLInt
+ case 'integer':
+ return graphql.GraphQLInt
+ case 'tinyint':
+ return graphql.GraphQLBoolean
+ case 'smallint':
+ return graphql.GraphQLInt
+ case 'decimal':
+ return graphql.GraphQLInt
+ case 'bigint':
+ return graphql.GraphQLInt
+ case 'int2':
+ return graphql.GraphQLInt
+ case 'int4':
+ return graphql.GraphQLInt
+ case 'varchar':
+ return graphql.GraphQLString
+ case 'text':
+ return graphql.GraphQLString
+ case 'bool':
+ return graphql.GraphQLBoolean
+ case 'real':
+ return graphql.GraphQLFloat
+ case 'float8':
+ return graphql.GraphQLFloat
+ case 'double':
+ return graphql.GraphQLFloat
+ case 'double precision':
+ return graphql.GraphQLFloat
+ case 'numeric':
+ return graphql.GraphQLFloat
+ case 'float4':
+ return graphql.GraphQLFloat
+ case 'date':
+ return scalars.GraphQLDate
+ case 'time':
+ return graphql.GraphQLString
+ case 'timestamp':
+ return scalars.GraphQLDateTime
+ case 'uuid':
+ return graphql.GraphQLString
+ default:
+ return graphql.GraphQLString
+ }
+}
+
+function fromSelectionSet (selectionSet, fields = new Set()) {
+ /* istanbul ignore next */
+ for (const s of selectionSet.selections) {
+ if (s.kind === 'Field') {
+ fields.add(s.name.value)
+ } else if (s.kind === 'InlineFragment') {
+ fromSelectionSet(s.selectionSet, fields)
+ } else {
+ throw new Error('Unsupported kind: ' + s.kind)
+ }
+ }
+ return fields
+}
+
+module.exports = {
+ sqlTypeToGraphQL,
+ fromSelectionSet,
+ typeSym: Symbol('graphlType')
+}
diff --git a/packages/sql-graphql/package.json b/packages/sql-graphql/package.json
new file mode 100644
index 0000000000..f82e2bebc4
--- /dev/null
+++ b/packages/sql-graphql/package.json
@@ -0,0 +1,44 @@
+{
+ "name": "@platformatic/sql-graphql",
+ "version": "0.0.21",
+ "description": "Map SQL dbs to GraphQL",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && npm run test:typescript && npm run test:postgresql && npm run test:mariadb && npm run test:mysql && npm run test:mysql8 && npm run test:sqlite",
+ "test:postgresql": "DB=postgresql tap test/*.test.js",
+ "test:mariadb": "DB=mariadb tap test/*.test.js",
+ "test:mysql": "DB=mysql tap test/*.test.js",
+ "test:mysql8": "DB=mysql8 tap test/*.test.js",
+ "test:sqlite": "DB=sqlite tap test/*.test.js",
+ "test:typescript": "tsd"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "author": "Matteo Collina ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "@platformatic/sql-mapper": "workspace:*",
+ "fastify": "^4.6.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0",
+ "tsd": "^0.23.0"
+ },
+ "dependencies": {
+ "camelcase": "^6.0.0",
+ "fastify-plugin": "^4.1.0",
+ "graphql": "^16.6.0",
+ "graphql-scalars": "^1.13.1",
+ "inflected": "^2.1.0",
+ "mercurius": "^11.0.0"
+ },
+ "tsd": {
+ "directory": "test/types"
+ }
+}
diff --git a/packages/sql-graphql/test/datatypes.test.js b/packages/sql-graphql/test/datatypes.test.js
new file mode 100644
index 0000000000..828aa2c83f
--- /dev/null
+++ b/packages/sql-graphql/test/datatypes.test.js
@@ -0,0 +1,708 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isPg, isMysql, isSQLite } = require('./helper')
+
+test('[PG] simple db simple graphql schema', { skip: !isPg }, async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`CREATE TABLE simple_types (
+ id SERIAL8 PRIMARY KEY,
+ published BOOL,
+ current DOUBLE PRECISION,
+ long_text TEXT,
+ born_at_date DATE,
+ born_at_time TIME,
+ born_at_timestamp TIMESTAMP,
+ uuid UUID UNIQUE,
+ a_real real,
+ a_smallint smallint,
+ a_decimal decimal
+ );`)
+ }
+ })
+ teardown(app.close.bind(app))
+
+ app.register(sqlGraphQL)
+
+ await app.ready()
+
+ const timestamp = new Date()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ saveSimpleType(input: {
+ published: true,
+ current: 42,
+ longText: "abc",
+ bornAtDate: "2021-11-11",
+ bornAtTime: "12:42:00Z",
+ bornAtTimestamp: "${timestamp.toISOString()}",
+ uuid: "12345678-1234-1234-1234-123456789012",
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42 }) {
+ id
+ published
+ current
+ longText
+ bornAtDate
+ bornAtTime
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'saveSimpleType status code')
+ same(res.json(), {
+ data: {
+ saveSimpleType: {
+ id: '1',
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtDate: '2021-11-11',
+ bornAtTime: '12:42:00',
+ bornAtTimestamp: timestamp.toISOString(),
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'saveSimpleType response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getSimpleTypeById(id: 1) {
+ id
+ published
+ current
+ longText
+ bornAtDate
+ bornAtTime
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getSimpleTypeById status code')
+ same(res.json(), {
+ data: {
+ getSimpleTypeById: {
+ id: 1,
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtDate: '2021-11-11',
+ bornAtTime: '12:42:00',
+ bornAtTimestamp: timestamp.toISOString(),
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'getSimpleTypeById response')
+ }
+})
+
+test('[PG] - UUID', { skip: !isPg }, async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`
+ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
+ CREATE TABLE pages (
+ id uuid PRIMARY KEY default uuid_generate_v1(),
+ title VARCHAR(42)
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ let id
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ id = res.json().data.savePage.id
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: "${id}") {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: "${id}", title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+})
+
+test('[MySQL] simple db simple graphql schema', { skip: !isMysql }, async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ const uuidType = db.isMariaDB ? sql.__dangerous__rawValue('uuid') : sql.__dangerous__rawValue('varchar(200)')
+
+ await db.query(sql`CREATE TABLE simple_types (
+ id SERIAL PRIMARY KEY,
+ published BOOL,
+ current DOUBLE PRECISION,
+ long_text TEXT,
+ born_at_date DATE,
+ born_at_time TIME,
+ born_at_timestamp TIMESTAMP,
+ uuid ${uuidType} UNIQUE,
+ a_real real,
+ a_smallint smallint,
+ a_decimal decimal
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const isMariaDB = app.platformatic.db.isMariaDB
+
+ const timestamp = new Date()
+ const bornAtTimestamp = new Date(Math[isMariaDB ? 'floor' : 'round'](timestamp.getTime() / 1000) * 1000).toISOString()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ saveSimpleType(input: {
+ published: true,
+ current: 42,
+ longText: "abc",
+ bornAtDate: "2021-11-11",
+ bornAtTime: "12:42:00",
+ bornAtTimestamp: "${timestamp.toISOString()}",
+ uuid: "12345678-1234-1234-1234-123456789012",
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42 }) {
+ id
+ published
+ current
+ longText
+ bornAtDate
+ bornAtTime
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'saveSimpleType status code')
+ same(res.json(), {
+ data: {
+ saveSimpleType: {
+ id: '1',
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtDate: '2021-11-11',
+ bornAtTime: '12:42:00',
+ bornAtTimestamp,
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'saveSimpleType response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getSimpleTypeById(id: 1) {
+ id
+ published
+ current
+ longText
+ bornAtDate
+ bornAtTime
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getSimpleTypeById status code')
+ same(res.json(), {
+ data: {
+ getSimpleTypeById: {
+ id: 1,
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtDate: '2021-11-11',
+ bornAtTime: '12:42:00',
+ bornAtTimestamp,
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'getSimpleTypeById response')
+ }
+})
+
+test('[MySQL] - UUID', { skip: !isMysql }, async ({ pass, teardown, same, equal, skip }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+ if (!db.isMariaDB) {
+ return
+ }
+
+ await clear(db, sql)
+
+ await db.query(sql`
+ CREATE TABLE pages (
+ id uuid PRIMARY KEY default UUID(),
+ title VARCHAR(42)
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ if (!app.platformatic.db.isMariaDB) {
+ skip('MySQL does not support UUID, only MariaDB does')
+ return
+ }
+
+ let id
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ id = res.json().data.savePage.id
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: "${id}") {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: "${id}", title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+})
+
+test('[SQLite] simple db simple graphql schema', { skip: !isSQLite }, async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`CREATE TABLE simple_types (
+ id INTEGER PRIMARY KEY,
+ published BOOL,
+ current DOUBLE PRECISION,
+ long_text TEXT,
+ born_at_timestamp TIMESTAMP,
+ uuid UUID UNIQUE,
+ a_real real,
+ a_smallint smallint,
+ a_decimal decimal
+ );`)
+ }
+ })
+ teardown(app.close.bind(app))
+
+ app.register(sqlGraphQL)
+
+ await app.ready()
+
+ const timestamp = new Date()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ saveSimpleType(input: {
+ published: true,
+ current: 42,
+ longText: "abc",
+ bornAtTimestamp: "${timestamp.toISOString()}",
+ uuid: "12345678-1234-1234-1234-123456789012",
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42 }) {
+ id
+ published
+ current
+ longText
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'saveSimpleType status code')
+ same(res.json(), {
+ data: {
+ saveSimpleType: {
+ id: '1',
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtTimestamp: timestamp.toISOString(),
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'saveSimpleType response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getSimpleTypeById(id: 1) {
+ id
+ published
+ current
+ longText
+ bornAtTimestamp
+ uuid
+ aReal
+ aSmallint
+ aDecimal
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getSimpleTypeById status code')
+ same(res.json(), {
+ data: {
+ getSimpleTypeById: {
+ id: 1,
+ published: true,
+ current: 42,
+ longText: 'abc',
+ bornAtTimestamp: timestamp.toISOString(),
+ uuid: '12345678-1234-1234-1234-123456789012',
+ aReal: 1.2,
+ aSmallint: 42,
+ aDecimal: 42
+ }
+ }
+ }, 'getSimpleTypeById response')
+ }
+})
+
+test('[SQLite] - UUID', { skip: !isSQLite }, async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`
+ CREATE TABLE pages (
+ id uuid PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ let id
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ id = res.json().data.savePage.id
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: "${id}") {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: "${id}", title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+})
diff --git a/packages/sql-graphql/test/federation.test.js b/packages/sql-graphql/test/federation.test.js
new file mode 100644
index 0000000000..add725cc17
--- /dev/null
+++ b/packages/sql-graphql/test/federation.test.js
@@ -0,0 +1,312 @@
+'use strict'
+
+const { test } = require('tap')
+const Fastify = require('fastify')
+const mercurius = require('mercurius')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const { clear, connInfo, isSQLite } = require('./helper')
+
+async function createTestService (t, schema, resolvers = {}) {
+ const service = Fastify({ logger: { level: 'error' } })
+ service.register(mercurius, {
+ schema,
+ resolvers,
+ federationMetadata: true
+ })
+ await service.listen({ port: 0 })
+ return [service, service.server.address().port]
+}
+
+const categories = {
+ c1: {
+ id: 'c1',
+ name: 'Food'
+ },
+ c2: {
+ id: 'c2',
+ name: 'Places'
+ }
+}
+
+// This works because the database is reset between tests
+const postCategory = {
+ 1: 'c1',
+ 2: 'c2',
+ 3: 'c1',
+ 4: 'c1'
+}
+
+const categoryPost = Object.keys(postCategory).reduce((acc, key) => {
+ acc[postCategory[key]] = acc[postCategory[key]] || []
+ acc[postCategory[key]].push(key)
+ return acc
+}, {})
+
+const posts = [{
+ title: 'Post 1',
+ longText: 'This is a long text 1'
+}, {
+ title: 'Post 2',
+ longText: 'This is a long text 2'
+}, {
+ title: 'Post 3',
+ longText: 'This is a long text 3'
+}, {
+ title: 'Post 4',
+ longText: 'This is a long text 4'
+}]
+
+async function createTestGatewayServer (t, cacheOpts) {
+ const categoryServiceSchema = `
+ type Query @extends {
+ categories: [Category]
+ }
+
+ type Category @key(fields: "id") {
+ id: ID!
+ name: String
+ posts: [Post]
+ }
+
+ type Post @key(fields: "id") @extends {
+ id: ID! @external
+ category: Category
+ }
+`
+ const categoryServiceResolvers = {
+ Query: {
+ categories: (root, args, context, info) => {
+ t.pass('Query.categories resolved')
+ return Object.values(categories)
+ }
+ },
+ Category: {
+ posts: (root, args, context, info) => {
+ t.pass('Category.posts resolved')
+ return categoryPost[root.id]
+ ? categoryPost[root.id].map(id => ({ id }))
+ : []
+ },
+ __resolveReference: (category, args, context, info) => {
+ t.pass('Category.__resolveReference')
+ return categories[category.id]
+ }
+ },
+ Post: {
+ category: (root, args, context, info) => {
+ t.pass('Post.category resolved')
+ return categories[postCategory[root.id]]
+ }
+ }
+ }
+ const [categoryService, categoryServicePort] = await createTestService(t, categoryServiceSchema, categoryServiceResolvers)
+
+ const postService = Fastify()
+ postService.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ t.pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ }
+ }
+ })
+ postService.register(sqlGraphQL, {
+ federationMetadata: true
+ })
+ await postService.listen({ port: 0 })
+ const postServicePort = postService.server.address().port
+
+ await postService.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PostInput]!) {
+ insertPosts(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: posts
+ }
+ }
+ })
+
+ const gateway = Fastify()
+ t.teardown(async () => {
+ await gateway.close()
+ await categoryService.close()
+ await postService.close()
+ })
+ gateway.register(mercurius, {
+ gateway: {
+ services: [{
+ name: 'category',
+ url: `http://localhost:${categoryServicePort}/graphql`
+ }, {
+ name: 'post',
+ url: `http://localhost:${postServicePort}/graphql`
+ }]
+ }
+ })
+
+ return gateway
+}
+
+test('extendable', async (t) => {
+ const app = await createTestGatewayServer(t)
+
+ const query = `query {
+ categories {
+ id
+ name
+ posts {
+ id
+ title
+ longText
+ category {
+ id
+ name
+ }
+ }
+ }
+ posts {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }`
+
+ const expected = {
+ data: {
+ categories: [
+ {
+ id: 'c1',
+ name: 'Food',
+ posts: [
+ {
+ id: '1',
+ title: 'Post 1',
+ longText: 'This is a long text 1',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ },
+ {
+ id: '3',
+ title: 'Post 3',
+ longText: 'This is a long text 3',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ },
+ {
+ id: '4',
+ title: 'Post 4',
+ longText: 'This is a long text 4',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ }
+ ]
+ },
+ {
+ id: 'c2',
+ name: 'Places',
+ posts: [
+ {
+ id: '2',
+ title: 'Post 2',
+ longText: 'This is a long text 2',
+ category: {
+ id: 'c2',
+ name: 'Places'
+ }
+ }
+ ]
+ }
+ ],
+ posts: [
+ {
+ id: '1',
+ title: 'Post 1',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ },
+ {
+ id: '2',
+ title: 'Post 2',
+ category: {
+ id: 'c2',
+ name: 'Places'
+ }
+ },
+ {
+ id: '3',
+ title: 'Post 3',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ },
+ {
+ id: '4',
+ title: 'Post 4',
+ category: {
+ id: 'c1',
+ name: 'Food'
+ }
+ }
+ ]
+ }
+ }
+
+ t.comment('first request')
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: { query }
+ })
+
+ t.same(res.json(), expected)
+ }
+
+ t.comment('second request')
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: { query }
+ })
+
+ t.same(res.json(), expected)
+ }
+})
diff --git a/packages/sql-graphql/test/helper.js b/packages/sql-graphql/test/helper.js
new file mode 100644
index 0000000000..0018ec4909
--- /dev/null
+++ b/packages/sql-graphql/test/helper.js
@@ -0,0 +1,70 @@
+'use strict'
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE users`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE versions`)
+ } catch {
+ }
+ try {
+ await db.query(sql`DROP TABLE graphs`)
+ } catch {
+ }
+}
diff --git a/packages/sql-graphql/test/hooks.test.js b/packages/sql-graphql/test/hooks.test.js
new file mode 100644
index 0000000000..0e5cc5732b
--- /dev/null
+++ b/packages/sql-graphql/test/hooks.test.js
@@ -0,0 +1,660 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlMapper = require('@platformatic/sql-mapper')
+const sqlGraphQL = require('..')
+const fastify = require('fastify')
+const { clear, connInfo, isMysql, isSQLite } = require('./helper')
+
+test('basic hooks', async ({ pass, teardown, same, equal, plan }) => {
+ plan(22)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ Page: {
+ noKey () {
+ // This should never be called
+ },
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+
+ equal(ctx.app, app)
+ if (!input.id) {
+ same(input, {
+ title: 'Hello'
+ })
+
+ return original({
+ input: {
+ title: 'Hello from hook'
+ },
+ fields
+ })
+ } else {
+ same(input, {
+ id: 1,
+ title: 'Hello World'
+ })
+
+ return original({
+ input: {
+ id: 1,
+ title: 'Hello from hook 2'
+ },
+ fields
+ })
+ }
+ },
+ async find (original, args) {
+ pass('find called')
+
+ equal(args.ctx.app, app)
+ same(args.where, {
+ id: {
+ in: ['1']
+ }
+ })
+ args.where = {
+ id: {
+ eq: ['2']
+ }
+ }
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ },
+ async insert (original, args) {
+ pass('insert called')
+
+ equal(args.ctx.app, app)
+ same(args.inputs, [{
+ title: 'Hello'
+ }, {
+ title: 'Hello World'
+ }])
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello from hook'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello from hook 2'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [PageInput]!) {
+ insertPages(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ {
+ title: 'Hello'
+ },
+ {
+ title: 'Hello World'
+ }
+ ]
+ }
+ }
+ })
+
+ equal(res.statusCode, 200, 'insertPages status code')
+ }
+})
+
+test('hooks with relationships', async ({ pass, teardown, same, equal, plan }) => {
+ plan(17)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ },
+ hooks: {
+ Page: {
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+ equal(ctx.app, app)
+
+ return original({
+ input,
+ fields
+ })
+ },
+ async find (original, opts) {
+ pass('find called')
+ equal(opts.ctx.app, app)
+ return original(opts)
+ }
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ category {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories.posts status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }, {
+ id: 2,
+ name: 'Food',
+ pages: []
+ }]
+ }
+ }, 'categories.posts response')
+ }
+})
+
+test('delete hook', async ({ pass, teardown, same, equal, plan }) => {
+ plan(10)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ Page: {
+ async delete (original, args) {
+ pass('delete called')
+
+ equal(args.ctx.app, app)
+ same(args.where, {
+ id: {
+ eq: '1'
+ }
+ })
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { id: { eq: "1" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePage status code')
+ same(res.json(), {
+ data: {
+ deletePages: [{
+ id: 1,
+ title: 'Hello'
+ }]
+ }
+ }, 'deletePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'pages response')
+ }
+})
+
+test('false resolver no schema', async ({ pass, teardown, same, equal, plan, match }) => {
+ plan(5)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL, {
+ schema: `
+ extend type Query {
+ add(a: Int!, b: Int!): Int
+ }
+ `,
+ resolvers: {
+ Mutation: {
+ savePage: false,
+ deletePages: false,
+ insertPages: false
+ },
+ Query: {
+ pages: false,
+ getPageById: false
+ }
+ }
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'pages status code')
+ match(res.json(), {
+ data: null,
+ errors: [{
+ message: 'Cannot query field "getPageById" on type "Query".',
+ locations: [{ line: 3, column: 13 }]
+ }]
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePages(where: { id: { eq: "1" } }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePages status code')
+ }
+})
diff --git a/packages/sql-graphql/test/ignore.test.js b/packages/sql-graphql/test/ignore.test.js
new file mode 100644
index 0000000000..2d8855d428
--- /dev/null
+++ b/packages/sql-graphql/test/ignore.test.js
@@ -0,0 +1,117 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite } = require('./helper')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ await db.query(sql`CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ await db.query(sql`CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );`)
+ }
+}
+
+test('ignore a table', async ({ pass, teardown, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ ignore: {
+ categories: true
+ },
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ __schema {
+ types {
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'introspection query status code')
+ const data = res.json().data
+ equal(data.__schema.types.find((t) => t.name === 'Category'), undefined, 'Category type is ignored')
+ }
+})
+
+test('ignore a column', async ({ pass, teardown, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ ignore: {
+ categories: {
+ name: true
+ }
+ },
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ __schema {
+ types {
+ name
+ fields {
+ name
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'introspection query status code')
+ const data = res.json().data
+ const Category = data.__schema.types.find((t) => t.name === 'Category')
+ equal(Category.fields.find((f) => f.name === 'name'), undefined, 'name column is ignored')
+ }
+})
diff --git a/packages/sql-graphql/test/insert.test.js b/packages/sql-graphql/test/insert.test.js
new file mode 100644
index 0000000000..c6b86156d9
--- /dev/null
+++ b/packages/sql-graphql/test/insert.test.js
@@ -0,0 +1,181 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite, isPg } = require('./helper')
+
+test('batch inserts', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 1, title: 'Page 1' },
+ { id: 2, title: 'Page 2' },
+ { id: 3, title: 'Page 3' }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [
+ { id: 1, title: 'Page 1' },
+ { id: 2, title: 'Page 2' },
+ { id: 3, title: 'Page 3' }
+ ]
+ }
+ }, 'pages response')
+ }
+})
+
+test('[PG] - batch inserts UUID', { skip: !isPg }, async ({ pass, teardown, same, equal, match }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`
+ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
+ CREATE TABLE pages (
+ id uuid PRIMARY KEY default uuid_generate_v1(),
+ title VARCHAR(42)
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ let ids
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ ids = res.json().data.insertPages
+ match(res.json(), {
+ data: {
+ insertPages: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }, 'insertPages response')
+ }
+
+ for (const { id, title } of ids) {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: "${id}") {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getPageById status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id,
+ title
+ }
+ }
+ }, 'getPageById response')
+ }
+})
diff --git a/packages/sql-graphql/test/inserted_updated_at.test.js b/packages/sql-graphql/test/inserted_updated_at.test.js
new file mode 100644
index 0000000000..8d69b18935
--- /dev/null
+++ b/packages/sql-graphql/test/inserted_updated_at.test.js
@@ -0,0 +1,517 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+const { setTimeout } = require('timers/promises')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP,
+ updated_at TIMESTAMP
+ );`)
+ } else if (isMysql) {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP NULL DEFAULT NULL,
+ updated_at TIMESTAMP NULL DEFAULT NULL
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP,
+ updated_at TIMESTAMP
+ );`)
+ }
+}
+
+test('inserted_at updated_at happy path', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ let original
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const data = res.json().data
+ not(data.savePage.insertedAt, null, 'insertedAt')
+ not(data.savePage.updatedAt, null, 'updatedAt')
+ comment(`insertedAt: ${data.savePage.insertedAt}`)
+ comment(`updatedAt: ${data.savePage.updatedAt}`)
+ original = data.savePage
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ const data = res.json().data
+ equal(data.getPageById.insertedAt, original.insertedAt, 'insertedAt')
+ equal(data.getPageById.updatedAt, original.updatedAt, 'updatedAt')
+ comment(`insertedAt: ${data.getPageById.insertedAt}`)
+ comment(`updatedAt: ${data.getPageById.updatedAt}`)
+ }
+
+ await setTimeout(1000) // await 1s
+
+ let updated
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ const data = res.json().data
+ equal(data.savePage.insertedAt, original.insertedAt, 'insertedAt')
+ not(data.savePage.updatedAt, original.updatedAt, 'updatedAt')
+ updated = data.savePage
+ comment(`insertedAt: ${data.savePage.insertedAt}`)
+ comment(`updatedAt: ${data.savePage.updatedAt}`)
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ const data = res.json().data
+ equal(data.getPageById.insertedAt, updated.insertedAt, 'insertedAt')
+ equal(data.getPageById.updatedAt, updated.updatedAt, 'updatedAt')
+ comment(`insertedAt: ${data.getPageById.insertedAt}`)
+ comment(`updatedAt: ${data.getPageById.updatedAt}`)
+ }
+})
+
+test('cannot set inserted_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", insertedAt: "${new Date().toISOString()}" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'savePage status code')
+ const data = res.json()
+ equal(data.errors[0].message, 'Field "insertedAt" is not defined by type "PageInput".')
+ }
+})
+
+test('cannot set updated_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const data = res.json().data
+ not(data.savePage.insertedAt, null, 'insertedAt')
+ not(data.savePage.updatedAt, null, 'updatedAt')
+ comment(`insertedAt: ${data.savePage.insertedAt}`)
+ comment(`updatedAt: ${data.savePage.updatedAt}`)
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World", updatedAt: "${new Date().toISOString()}" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'savePage status code')
+ const data = res.json()
+ equal(data.errors[0].message, 'Field "updatedAt" is not defined by type "PageInput".')
+ }
+})
+
+test('do not assign inserted_at updated_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ autoTimestamp: false,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const data = res.json().data
+ equal(data.savePage.insertedAt, null, 'insertedAt')
+ equal(data.savePage.updatedAt, null, 'updatedAt')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ const data = res.json().data
+ equal(data.getPageById.insertedAt, null, 'insertedAt')
+ equal(data.getPageById.updatedAt, null, 'updatedAt')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ const data = res.json().data
+ equal(data.savePage.insertedAt, null, 'insertedAt')
+ equal(data.savePage.updatedAt, null, 'updatedAt')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ const data = res.json().data
+ equal(data.getPageById.insertedAt, null, 'insertedAt')
+ equal(data.getPageById.updatedAt, null, 'updatedAt')
+ }
+})
+
+test('bulk insert adds inserted_at updated_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const data = res.json().data
+ not(data.savePage.insertedAt, null, 'insertedAt')
+ not(data.savePage.updatedAt, null, 'updatedAt')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const pages = res.json().data.insertPages
+ for (const page of pages) {
+ not(page.insertedAt, null, 'insertedAt')
+ not(page.updatedAt, null, 'updatedAt')
+ equal(page.insertedAt, page.updatedAt, 'insertedAt === updatedAt')
+ }
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ const pages = res.json().data.pages
+ for (const page of pages) {
+ not(page.insertedAt, null, 'insertedAt')
+ not(page.updatedAt, null, 'updatedAt')
+ equal(page.insertedAt, page.updatedAt, 'insertedAt === updatedAt')
+ }
+ }
+})
+
+test('bulk insert with autoTimestamp=false do not hadd inserted_at updated_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ autoTimestamp: false,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ insertedAt
+ updatedAt
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ const pages = res.json().data.insertPages
+ for (const page of pages) {
+ equal(page.insertedAt, null, 'insertedAt')
+ equal(page.updatedAt, null, 'updatedAt')
+ }
+ }
+})
diff --git a/packages/sql-graphql/test/nested.test.js b/packages/sql-graphql/test/nested.test.js
new file mode 100644
index 0000000000..4d80f23429
--- /dev/null
+++ b/packages/sql-graphql/test/nested.test.js
@@ -0,0 +1,862 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isMysql, isSQLite } = require('./helper')
+
+test('nested resolver', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ category {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }
+ }
+ }
+ }, 'pages response')
+ }
+
+ // Without ids
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ title
+ category {
+ name
+ pages {
+ title
+ category {
+ name
+ }
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ title: 'Hello',
+ category: {
+ name: 'Pets',
+ pages: [{
+ title: 'Hello',
+ category: {
+ name: 'Pets'
+ }
+ }]
+ }
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories.posts status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }, {
+ id: 2,
+ name: 'Food',
+ pages: []
+ }]
+ }
+ }, 'categories.posts response')
+ }
+})
+
+test('disable one-too-many', async ({ pass, teardown, same, equal, match }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL, {
+ resolvers: {
+ Category: {
+ pages: false
+ }
+ }
+ })
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'categories.posts status code')
+ match(res.json(), {
+ errors: [{
+ message: 'Cannot query field "pages" on type "Category". Did you mean "name"?'
+ }]
+ }, 'categories.posts response')
+ }
+})
+
+test('disable many-to-one relationship', async ({ pass, teardown, same, equal, match }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL, {
+ resolvers: {
+ Page: {
+ category: false
+ }
+ }
+ })
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'pages status code')
+ match(res.json(), {
+ errors: [{
+ message: 'Cannot query field "category" on type "Page". Did you mean "categoryId"?'
+ }]
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages {
+ id
+ title
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories.posts status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello'
+ }]
+ }, {
+ id: 2,
+ name: 'Food',
+ pages: []
+ }]
+ }
+ }, 'categories.posts response')
+ }
+})
+
+test('nested update', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello", categoryId: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ category {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories.posts status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 1,
+ title: 'Hello',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }]
+ }, {
+ id: 2,
+ name: 'Food',
+ pages: []
+ }]
+ }
+ }, 'categories.posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Updated", id: 1 }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Updated',
+ category: {
+ id: 1,
+ name: 'Pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+})
diff --git a/packages/sql-graphql/test/no-modification-of-entity.test.js b/packages/sql-graphql/test/no-modification-of-entity.test.js
new file mode 100644
index 0000000000..e315074a5b
--- /dev/null
+++ b/packages/sql-graphql/test/no-modification-of-entity.test.js
@@ -0,0 +1,113 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+}
+
+test('no modification of entity', async ({ teardown, equal, pass }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ equal(app.platformatic.entities.page.type, undefined, 'no type in entity')
+
+ for (const field of Object.values(app.platformatic.entities.page.fields)) {
+ equal(field.type, undefined, `no type in field ${field.camelcase}`)
+ }
+})
+
+test('no modification of entity with nested data', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ equal(app.platformatic.entities.page.type, undefined, 'no type in entity')
+
+ for (const field of Object.values(app.platformatic.entities.page.fields)) {
+ equal(field.type, undefined, `no type in field ${field.camelcase}`)
+ }
+
+ equal(app.platformatic.entities.category.type, undefined, 'no type in entity')
+
+ for (const field of Object.values(app.platformatic.entities.category.fields)) {
+ equal(field.type, undefined, `no type in field ${field.camelcase}`)
+ }
+})
diff --git a/packages/sql-graphql/test/order_by.test.js b/packages/sql-graphql/test/order_by.test.js
new file mode 100644
index 0000000000..a69d5189b2
--- /dev/null
+++ b/packages/sql-graphql/test/order_by.test.js
@@ -0,0 +1,397 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+
+test('one-level order by', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ title
+ counter
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { title: 'Page 1', counter: 3 },
+ { title: 'Page 2', counter: 2 },
+ { title: 'Page 3', counter: 1 }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 1, title: 'Page 1', counter: 3 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 3, title: 'Page 3', counter: 1 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages (orderBy: { field: counter, direction: ASC }) {
+ id
+ title
+ counter
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [
+ { id: 3, title: 'Page 3', counter: 1 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 1, title: 'Page 1', counter: 3 }
+ ]
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages (orderBy: { field: counter, direction: DESC }) {
+ id
+ title
+ counter
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [
+ { id: 1, title: 'Page 1', counter: 3 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 3, title: 'Page 3', counter: 1 }
+ ]
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages (orderBy: { field: counter}) {
+ id
+ title
+ counter
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 400, 'pages status code')
+ same(res.json(), {
+ data: null,
+ errors: [{
+ message: 'Field "PageOrderByArguments.direction" of required type "OrderByDirection!" was not provided.',
+ locations: [{
+ line: 3,
+ column: 29
+ }
+ ]
+ }]
+ })
+ }
+})
+
+test('list order by', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ counter INTEGER,
+ counter2 INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ counter INTEGER,
+ counter2 INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PageInput]!) {
+ insertPages (inputs: $inputs) {
+ id
+ counter
+ counter2
+ }
+ }
+ `,
+ variables: {
+ inputs: [
+ { counter: 3, counter2: 3 },
+ { counter: 3, counter2: 2 },
+ { counter: 1, counter2: 1 }
+ ]
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ insertPages: [
+ { id: 1, counter: 3, counter2: 3 },
+ { id: 2, counter: 3, counter2: 2 },
+ { id: 3, counter: 1, counter2: 1 }
+ ]
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ pages (orderBy: [{ field: counter, direction: ASC }, { field: counter2, direction: DESC }]) {
+ id
+ counter,
+ counter2
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [
+ { id: 3, counter: 1, counter2: 1 },
+ { id: 1, counter: 3, counter2: 3 },
+ { id: 2, counter: 3, counter2: 2 }
+ ]
+ }
+ }, 'pages response')
+ }
+})
+
+test('nested order by', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ category_id INTEGER REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ const categories = [{
+ name: 'Pets'
+ }, {
+ name: 'Food'
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [CategoryInput]!) {
+ insertCategories(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: categories
+ }
+ }
+ })
+
+ const pages = [{
+ title: 'foo',
+ categoryId: 1
+ }, {
+ title: 'bar',
+ categoryId: 1
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs: [PageInput]!) {
+ insertPages(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: pages
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ categories {
+ id
+ name
+ pages(orderBy: { field: title, direction: ASC }) {
+ id
+ title
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories.posts status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ id: 1,
+ name: 'Pets',
+ pages: [{
+ id: 2,
+ title: 'bar'
+ }, {
+ id: 1,
+ title: 'foo'
+ }]
+ }, {
+ id: 2,
+ name: 'Food',
+ pages: []
+ }]
+ }
+ }, 'categories.posts response')
+ }
+})
diff --git a/packages/sql-graphql/test/relations.test.js b/packages/sql-graphql/test/relations.test.js
new file mode 100644
index 0000000000..3c84090aca
--- /dev/null
+++ b/packages/sql-graphql/test/relations.test.js
@@ -0,0 +1,60 @@
+'use strict'
+
+const { skip, test } = require('tap')
+const { tmpdir } = require('os')
+const { randomUUID } = require('crypto')
+const { join } = require('path')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { isSQLite } = require('./helper')
+
+if (!isSQLite) {
+ skip('The db is not SQLite')
+ process.exit(0)
+}
+
+test('should fail when an unknown foreign key relationship exists', async ({ pass, rejects, same, teardown }) => {
+ const file = join(tmpdir(), randomUUID())
+ const app = fastify()
+ app.register(sqlMapper, {
+ connectionString: `sqlite://${file}`,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name TEXT NOT NULL
+ );
+ );`)
+
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title TEXT,
+ body TEXT,
+ category_id INTEGER,
+ FOREIGN KEY (category_id) REFERENCES subcategories(id) ON DELETE CASCADE
+ );
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await rejects(app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ saveCategory(input: { name: "pets" }) {
+ id
+ name
+ }
+ }
+ `
+ }
+ }), new Error('No foreign table named "subcategories" was found'))
+})
diff --git a/packages/sql-graphql/test/simple.test.js b/packages/sql-graphql/test/simple.test.js
new file mode 100644
index 0000000000..daf3e8b940
--- /dev/null
+++ b/packages/sql-graphql/test/simple.test.js
@@ -0,0 +1,708 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+}
+
+test('simple db simple graphql schema', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'pages response')
+ }
+})
+
+test('with federationMetadata', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ federationMetadata: true,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { id: 1, title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+})
+
+test('add resolver', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+
+ const schema = `
+ extend type Query {
+ search(title: String!): [Page]
+ }
+ `
+ const resolvers = {
+ Query: {
+ async search (root, args, context, info) {
+ pass('search resolver called')
+ const { db, sql } = context.app.platformatic
+ const res = await db.query(sql`SELECT * FROM pages WHERE title LIKE ${'%' + args.title + '%'}`)
+
+ return res
+ }
+ }
+ }
+
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL, {
+ schema,
+ resolvers
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello World" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello World'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "ABC" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 2,
+ title: 'ABC'
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ search(title: "Hello") {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ search: [{
+ id: 1,
+ title: 'Hello World'
+ }]
+ }
+ }, 'pages response')
+ }
+})
+
+test('override resolver', async ({ pass, teardown, same, equal, plan }) => {
+ plan(3)
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL, {
+ resolvers: {
+ Mutation: {
+ async savePage (root, args, context, info) {
+ pass('savePage resolver called')
+ const { db, sql } = context.app.platformatic
+ if (isSQLite) {
+ const insert = sql`
+ INSERT INTO pages (title)
+ VALUES (${args.input.title})
+ `
+
+ await db.query(insert)
+
+ const res2 = await db.query(sql`
+ SELECT last_insert_rowid()
+ `)
+
+ const id = res2[0]['last_insert_rowid()']
+ return {
+ ...args.input,
+ id
+ }
+ } else if (isMysql && !db.isMariaDB) {
+ const insert = sql`
+ INSERT INTO pages (title)
+ VALUES (${args.input.title})
+ `
+
+ await db.query(insert)
+
+ const res2 = await db.query(sql`
+ SELECT last_insert_id()
+ `)
+
+ const id = res2[0]['last_insert_id()']
+ return {
+ ...args.input,
+ id
+ }
+ } else {
+ const insert = sql`
+ INSERT INTO pages (title)
+ VALUES (${args.input.title})
+ RETURNING *
+ `
+ const res = await db.query(insert)
+ return res[0]
+ }
+ }
+ }
+ }
+ })
+ teardown(app.close.bind(app))
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+})
+
+test('add totally new type and resolver', async ({ pass, teardown, same, equal, plan }) => {
+ plan(4)
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL, {
+ schema: `
+ type Category {
+ id: ID!
+ name: String
+ upper: String
+ }
+
+ extend type Query {
+ getCategory: Category
+ }
+ `,
+ resolvers: {
+ Query: {
+ async getCategory (root, args, context, info) {
+ pass('getCategory resolver called')
+ return {
+ id: 1,
+ name: 'Hello'
+ }
+ }
+ },
+ Category: {
+ upper (root, args, context, info) {
+ pass('name resolver called')
+ return root.name.toUpperCase()
+ }
+ }
+ }
+ })
+ teardown(app.close.bind(app))
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getCategory {
+ id
+ name
+ upper
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getCategory status code')
+ same(res.json(), {
+ data: {
+ getCategory: {
+ id: 1,
+ name: 'Hello',
+ upper: 'HELLO'
+ }
+ }
+ }, 'getCategory response')
+})
+
+test('list', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const posts = [{
+ title: 'Post 1',
+ longText: 'This is a long text 1'
+ }, {
+ title: 'Post 2',
+ longText: 'This is a long text 2'
+ }, {
+ title: 'Post 3',
+ longText: 'This is a long text 3'
+ }, {
+ title: 'Post 4',
+ longText: 'This is a long text 4'
+ }]
+
+ for (const post of posts) {
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePost(input: { title: "${post.title}", longText: "${post.longText}" }) {
+ id
+ title,
+ longText
+ }
+ }
+ `
+ }
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: posts.map((p, i) => {
+ return { ...p, id: i + 1 + '' }
+ })
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts (limit: 2, offset: 1) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: posts.map((p, i) => {
+ return { ...p, id: i + 1 + '' }
+ }).slice(1, 3)
+ }
+ }, 'posts response')
+ }
+})
+
+test('not found', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getPageById status code')
+ same(res.json(), {
+ data: {
+ getPageById: null
+ }
+ }, 'getPageById response')
+ }
+})
+
+test('graphiql is enabled by default', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ const res = await app.inject('/graphiql')
+ equal(res.statusCode, 200)
+})
+
+test('graphiql can be disabled', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlGraphQL, {
+ graphiql: false
+ })
+ teardown(app.close.bind(app))
+
+ const res = await app.inject('/graphiql')
+ equal(res.statusCode, 404)
+})
diff --git a/packages/sql-graphql/test/sqlite.test.js b/packages/sql-graphql/test/sqlite.test.js
new file mode 100644
index 0000000000..d3067c613c
--- /dev/null
+++ b/packages/sql-graphql/test/sqlite.test.js
@@ -0,0 +1,276 @@
+'use strict'
+
+const fastify = require('fastify')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const { isSQLite } = require('./helper')
+const { test, skip } = require('tap')
+const { tmpdir } = require('os')
+const { join } = require('path')
+const { randomUUID } = require('crypto')
+
+if (!isSQLite) {
+ skip('The db is not SQLite')
+ process.exit(0)
+}
+
+test('store, close and load', async ({ pass, same, equal }) => {
+ const file = join(tmpdir(), randomUUID())
+ {
+ const app = fastify()
+ app.register(sqlMapper, {
+ connectionString: `sqlite://${file}`,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Hello" }) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'savePage response')
+
+ await app.close()
+ }
+
+ {
+ const app = fastify()
+ app.register(sqlMapper, {
+ connectionString: `sqlite://${file}`
+ })
+ app.register(sqlGraphQL)
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById(id: 1) {
+ id
+ title
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Hello'
+ }
+ }
+ }, 'pages response')
+ await app.close()
+ }
+})
+
+test('demo', async ({ pass, same, equal, teardown }) => {
+ const file = join(tmpdir(), randomUUID())
+ const app = fastify()
+ app.register(sqlMapper, {
+ connectionString: `sqlite://${file}`,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await db.query(sql`
+ CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name TEXT NOT NULL
+ );
+ );`)
+
+ await db.query(sql`
+ CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title TEXT,
+ body TEXT,
+ category_id INTEGER,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ );`)
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ saveCategory(input: { name: "pets" }) {
+ id
+ name
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'saveCategory status code')
+ same(res.json(), {
+ data: {
+ saveCategory: {
+ id: 1,
+ name: 'pets'
+ }
+ }
+ }, 'saveCategory response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ savePage(input: { title: "Dogs", body: "Dogs are cool", categoryId: "1" }) {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'savePage status code')
+ same(res.json(), {
+ data: {
+ savePage: {
+ id: 1,
+ title: 'Dogs',
+ category: {
+ id: 1,
+ name: 'pets'
+ }
+ }
+ }
+ }, 'savePage response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ getPageById (id: "1") {
+ id
+ title
+ category {
+ id
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'getPageById status code')
+ same(res.json(), {
+ data: {
+ getPageById: {
+ id: 1,
+ title: 'Dogs',
+ category: {
+ id: 1,
+ name: 'pets'
+ }
+ }
+ }
+ }, 'saveCategory response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query allPages {
+ pages {
+ title
+ category {
+ name
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'pages status code')
+ same(res.json(), {
+ data: {
+ pages: [{
+ title: 'Dogs',
+ category: {
+ name: 'pets'
+ }
+ }]
+ }
+ }, 'pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query allCategories {
+ categories {
+ name
+ pages {
+ title
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'categories status code')
+ same(res.json(), {
+ data: {
+ categories: [{
+ name: 'pets',
+ pages: [{
+ title: 'Dogs'
+ }]
+ }]
+ }
+ }, 'categories response')
+ }
+})
diff --git a/packages/sql-graphql/test/types/index.test-d.ts b/packages/sql-graphql/test/types/index.test-d.ts
new file mode 100644
index 0000000000..7af403f657
--- /dev/null
+++ b/packages/sql-graphql/test/types/index.test-d.ts
@@ -0,0 +1,33 @@
+import { expectType } from 'tsd'
+import { fastify, FastifyInstance } from 'fastify'
+import { MercuriusPlugin } from 'mercurius'
+import plugin, { SQLGraphQLPluginOptions } from '../../index'
+
+const pluginOptions: SQLGraphQLPluginOptions = {
+ graphiql: true,
+ autoTimestamp: true,
+ federationMetadata: true,
+ resolvers: {
+ Mutation: {
+ savePage: false,
+ deletePages: false,
+ insertPages: false
+ },
+ Query: {
+ async getCategory (root, args, context, info) {
+ return { id: 1, name: 'Hello' }
+ }
+ },
+ },
+ schema: `
+ extend type Query {
+ add(a: Int!, b: Int!): Int
+ }
+ `
+}
+
+const instance: FastifyInstance = fastify()
+instance.register(plugin, pluginOptions)
+instance.register(async (instance) => {
+ expectType(instance.graphql)
+})
diff --git a/packages/sql-graphql/test/where.test.js b/packages/sql-graphql/test/where.test.js
new file mode 100644
index 0000000000..7d4805fd5c
--- /dev/null
+++ b/packages/sql-graphql/test/where.test.js
@@ -0,0 +1,814 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlGraphQL = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isMysql, isSQLite } = require('./helper')
+
+test('list', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PostInput]!) {
+ insertPosts(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: posts
+ }
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { title: { eq: "Dog" } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { title: { neq: "Dog" } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { gt: 10 } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { lt: 40 } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { lte: 30 } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { gte: 20 } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { in: [20, 30] } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { nin: [10, 40] } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }]
+ }
+ }, 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { counter: { gt: 10, lt: 40 } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }]
+ }
+ }, 'posts response')
+ }
+})
+
+test('nested where', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ `)
+
+ await db.query(sql`
+ CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id INTEGER REFERENCES owners(id)
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const owners = [{
+ name: 'Matteo'
+ }, {
+ name: 'Luca'
+ }]
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [OwnerInput]!) {
+ insertOwners(inputs: $inputs) {
+ id
+ name
+ }
+ }
+ `,
+ variables: {
+ inputs: owners
+ }
+ }
+ })
+ const toAssign = [...posts]
+ for (const owner of res.json().data.insertOwners) {
+ toAssign.shift().ownerId = owner.id
+ toAssign.shift().ownerId = owner.id
+ }
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PostInput]!) {
+ insertPosts(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: posts
+ }
+ }
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ owners {
+ id
+ name
+ posts(where: { counter: { gte: 20 } }) {
+ id
+ title
+ longText
+ }
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'owners status code')
+ same(res.json(), {
+ data: {
+ owners: [{
+ id: '1',
+ name: 'Matteo',
+ posts: [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }]
+ }, {
+ id: '2',
+ name: 'Luca',
+ posts: [{
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }]
+ }]
+ }
+ }, 'owners response')
+ }
+})
+
+test('delete', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PostInput]!) {
+ insertPosts(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: posts
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePosts(where: { title: { eq: "Dog" } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePosts status code')
+ same(res.json(), {
+ data: {
+ deletePosts: [{
+ id: 1,
+ title: 'Dog',
+ longText: 'Foo'
+ }]
+ }
+ }, 'deletePosts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts(where: { title: { eq: "Dog" } }) {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: []
+ }
+ }, 'posts response')
+ }
+})
+
+test('delete all', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlGraphQL)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation batch($inputs : [PostInput]!) {
+ insertPosts(inputs: $inputs) {
+ id
+ title
+ }
+ }
+ `,
+ variables: {
+ inputs: posts
+ }
+ }
+ })
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ mutation {
+ deletePosts {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'deletePosts status code')
+ same(res.json(), {
+ data: {
+ deletePosts: [{
+ id: 1,
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: 2,
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: 3,
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: 4,
+ title: 'Duck',
+ longText: 'A duck tale'
+
+ }]
+ }
+ }, 'deletePosts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/graphql',
+ body: {
+ query: `
+ query {
+ posts {
+ id
+ title
+ longText
+ }
+ }
+ `
+ }
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), {
+ data: {
+ posts: []
+ }
+ }, 'posts response')
+ }
+})
diff --git a/packages/sql-json-schema-mapper/LICENSE b/packages/sql-json-schema-mapper/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/sql-json-schema-mapper/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-json-schema-mapper/NOTICE b/packages/sql-json-schema-mapper/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/sql-json-schema-mapper/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-json-schema-mapper/README.md b/packages/sql-json-schema-mapper/README.md
new file mode 100644
index 0000000000..f0c8c9fbba
--- /dev/null
+++ b/packages/sql-json-schema-mapper/README.md
@@ -0,0 +1,13 @@
+# @platformatic/sql-json-schema-mapper
+
+Utility module for Platformatic DB.
+
+## Install
+
+```sh
+npm install @platformatic/sql-json-schema-mapper
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/sql-json-schema-mapper/index.js b/packages/sql-json-schema-mapper/index.js
new file mode 100644
index 0000000000..53212046af
--- /dev/null
+++ b/packages/sql-json-schema-mapper/index.js
@@ -0,0 +1,82 @@
+'use strict'
+
+function mapSQLTypeToOpenAPIType (sqlType) {
+ // TODO support more types
+ /* istanbul ignore next */
+ switch (sqlType) {
+ case 'int':
+ return 'integer'
+ case 'integer':
+ return 'integer'
+ case 'tinyint':
+ return 'integer'
+ case 'smallint':
+ return 'integer'
+ case 'decimal':
+ return 'integer'
+ case 'bigint':
+ return 'integer'
+ case 'int2':
+ return 'integer'
+ case 'int4':
+ return 'integer'
+ case 'varchar':
+ return 'string'
+ case 'text':
+ return 'string'
+ case 'bool':
+ return 'boolean'
+ case 'real':
+ return 'number'
+ case 'float8':
+ return 'number'
+ case 'double':
+ return 'number'
+ case 'double precision':
+ return 'number'
+ case 'numeric':
+ return 'number'
+ case 'bigint unsigned':
+ return 'integer'
+ case 'float4':
+ return 'number'
+ case 'date':
+ return 'string'
+ case 'time':
+ return 'string'
+ case 'timestamp':
+ return 'string'
+ case 'uuid':
+ return 'string'
+ default:
+ return 'string'
+ }
+}
+
+function mapSQLEntityToJSONSchema (entity) {
+ const fields = entity.fields
+ const properties = {}
+ const required = []
+ for (const name of Object.keys(fields)) {
+ const field = fields[name]
+ const type = mapSQLTypeToOpenAPIType(field.sqlType)
+ properties[field.camelcase] = { type }
+ if (field.isNullable) {
+ properties[field.camelcase].nullable = true
+ }
+ if (!field.isNullable && !field.primaryKey) {
+ // we skip the primary key for creation
+ required.push(field.camelcase)
+ }
+ }
+ return {
+ $id: entity.name,
+ title: entity.name,
+ description: `A ${entity.name}`,
+ type: 'object',
+ properties,
+ required
+ }
+}
+
+module.exports = { mapSQLTypeToOpenAPIType, mapSQLEntityToJSONSchema }
diff --git a/packages/sql-json-schema-mapper/package.json b/packages/sql-json-schema-mapper/package.json
new file mode 100644
index 0000000000..f32f6541a9
--- /dev/null
+++ b/packages/sql-json-schema-mapper/package.json
@@ -0,0 +1,31 @@
+{
+ "name": "@platformatic/sql-json-schema-mapper",
+ "version": "0.0.21",
+ "description": "Map SQL entity to JSON schema",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && npm run test:postgresql && npm run test:mariadb && npm run test:mysql && npm run test:mysql8 && npm run test:sqlite",
+ "test:postgresql": "DB=postgresql tap test/*.test.js",
+ "test:mariadb": "DB=mariadb tap test/*.test.js",
+ "test:mysql": "DB=mysql tap test/*.test.js",
+ "test:mysql8": "DB=mysql8 tap test/*.test.js",
+ "test:sqlite": "DB=sqlite tap test/*.test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "author": "Matteo Collina ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "@platformatic/sql-mapper": "workspace:*",
+ "fastify": "^4.6.0",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0"
+ }
+}
diff --git a/packages/sql-json-schema-mapper/test/helper.js b/packages/sql-json-schema-mapper/test/helper.js
new file mode 100644
index 0000000000..e277dabbb0
--- /dev/null
+++ b/packages/sql-json-schema-mapper/test/helper.js
@@ -0,0 +1,71 @@
+'use strict'
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE users`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE versions`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE graphs`)
+ } catch {
+ }
+}
diff --git a/packages/sql-json-schema-mapper/test/simple.test.js b/packages/sql-json-schema-mapper/test/simple.test.js
new file mode 100644
index 0000000000..d39bcf6e8a
--- /dev/null
+++ b/packages/sql-json-schema-mapper/test/simple.test.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const t = require('tap')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite } = require('./helper')
+const { mapSQLEntityToJSONSchema } = require('..')
+const { test } = t
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42) NOT NULL,
+ description TEXT
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42) NOT NULL,
+ description TEXT
+ );`)
+ }
+}
+
+test('simple db, simple rest API', async (t) => {
+ const { pass, teardown } = t
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const page = app.platformatic.entities.page
+ const pageJsonSchema = mapSQLEntityToJSONSchema(page)
+
+ t.equal(pageJsonSchema.$id, 'Page')
+ t.equal(pageJsonSchema.title, 'Page')
+ t.equal(pageJsonSchema.description, 'A Page')
+ t.equal(pageJsonSchema.type, 'object')
+ t.same(pageJsonSchema.properties.id, { type: 'integer' })
+ t.same(pageJsonSchema.properties.title, { type: 'string' })
+ t.same(pageJsonSchema.properties.description, { type: 'string', nullable: true })
+ t.same(pageJsonSchema.required, ['title'])
+ }
+})
diff --git a/packages/sql-mapper/.taprc b/packages/sql-mapper/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/sql-mapper/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/sql-mapper/LICENSE b/packages/sql-mapper/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/sql-mapper/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-mapper/NOTICE b/packages/sql-mapper/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/sql-mapper/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-mapper/README.md b/packages/sql-mapper/README.md
new file mode 100644
index 0000000000..ff986ae1ee
--- /dev/null
+++ b/packages/sql-mapper/README.md
@@ -0,0 +1,13 @@
+# @platformatic/sql-mapper
+
+Check out the full documentation on [our website](https://oss.platformatic.dev/docs/reference/sql-mapper/introduction).
+
+## Install
+
+```sh
+npm install @platformatic/sql-mapper
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/sql-mapper/lib/entity.js b/packages/sql-mapper/lib/entity.js
new file mode 100644
index 0000000000..830ac3ee63
--- /dev/null
+++ b/packages/sql-mapper/lib/entity.js
@@ -0,0 +1,287 @@
+'use strict'
+
+const camelcase = require('camelcase')
+const { singularize } = require('inflected')
+const {
+ toSingular
+} = require('./utils')
+
+function createMapper (db, sql, log, table, fields, primaryKey, relations, queries, autoTimestamp) {
+ const entityName = toSingular(table)
+
+ // Fields remapping
+ const fieldMapToRetrieve = {}
+ const inputToFieldMap = {}
+ const camelCasedFields = Object.keys(fields).reduce((acc, key) => {
+ const camel = camelcase(key)
+ acc[camel] = fields[key]
+ fieldMapToRetrieve[key] = camel
+ inputToFieldMap[camel] = key
+ fields[key].camelcase = camel
+ return acc
+ }, {})
+ const alwaysRetrieve = relations.map((relation) => relation.column_name)
+
+ function fixInput (input) {
+ const newInput = {}
+ for (const key of Object.keys(input)) {
+ const value = input[key]
+ let newKey = inputToFieldMap[key]
+ if (newKey === undefined) {
+ if (fields[key] !== undefined) {
+ newKey = key
+ } else {
+ throw new Error(`Unknown field ${key}`)
+ }
+ }
+ newInput[newKey] = value
+ }
+ return newInput
+ }
+
+ function fixOutput (output) {
+ if (!output) {
+ return output
+ }
+ const newOutput = {}
+ for (const key of Object.keys(output)) {
+ let value = output[key]
+ const newKey = fieldMapToRetrieve[key]
+ if (key === primaryKey && value !== null && value !== undefined) {
+ value = value.toString()
+ }
+ newOutput[newKey] = value
+ }
+ return newOutput
+ }
+
+ async function save (args) {
+ if (args.input === undefined) {
+ throw new Error('Input not provided.')
+ }
+ // args.input is not array
+ const fieldsToRetrieve = computeFields(args.fields).map((f) => sql.ident(f))
+ const input = fixInput(args.input)
+ let now
+ if (autoTimestamp && fields.updated_at) {
+ now = new Date()
+ input.updated_at = now
+ }
+ if (input[primaryKey]) { // update
+ const res = await queries.updateOne(db, sql, table, input, primaryKey, fieldsToRetrieve)
+ return fixOutput(res)
+ } else { // insert
+ if (autoTimestamp && fields.inserted_at) {
+ /* istanbul ignore next */
+ now = now || new Date()
+ input.inserted_at = now
+ }
+ const res = await queries.insertOne(db, sql, table, input, primaryKey, fields[primaryKey].sqlType.toLowerCase() === 'uuid', fieldsToRetrieve)
+ return fixOutput(res)
+ }
+ }
+
+ async function insert (args) {
+ const fieldsToRetrieve = computeFields(args.fields).map((f) => sql.ident(f))
+ const inputs = args.inputs
+ // This else is skipped on MySQL because of https://github.com/ForbesLindesay/atdatabases/issues/221
+ /* istanbul ignore else */
+ if (autoTimestamp) {
+ const now = new Date()
+ for (const input of inputs) {
+ if (fields.inserted_at) {
+ input.insertedAt = now
+ }
+ if (fields.updated_at) {
+ input.updatedAt = now
+ }
+ }
+ }
+ /* istanbul ignore next */
+ if (queries.insertMany) {
+ // We are not fixing the input here because it is done in the query.
+ const res = await queries.insertMany(db, sql, table, inputs, inputToFieldMap, primaryKey, fieldsToRetrieve, fields)
+ return res.map(fixOutput)
+ } else {
+ // TODO this can be optimized, we can still use a batch insert if we do not want any fields
+ const res = []
+ for (let input of inputs) {
+ input = fixInput(input)
+ const resOne = await queries.insertOne(db, sql, table, input, primaryKey, fields[primaryKey].sqlType.toLowerCase() === 'uuid', fieldsToRetrieve)
+ res.push(fixOutput(resOne))
+ }
+
+ return res
+ }
+ }
+
+ function computeFields (fields) {
+ if (!fields) {
+ return Object.values(inputToFieldMap)
+ }
+
+ const requestedFields = fields.map((field) => inputToFieldMap[field])
+ const set = new Set([...alwaysRetrieve, ...requestedFields])
+ set.delete(undefined)
+ const fieldsToRetrieve = [...set]
+ return fieldsToRetrieve
+ }
+
+ const whereMap = {
+ eq: '=',
+ in: 'IN',
+ nin: 'NOT IN',
+ neq: '<>',
+ gt: '>',
+ gte: '>=',
+ lt: '<',
+ lte: '<='
+ }
+
+ function computeCriteria (opts) {
+ const where = opts.where || {}
+ const criteria = []
+ for (const key of Object.keys(where)) {
+ const value = where[key]
+ const field = inputToFieldMap[key]
+ for (const key of Object.keys(value)) {
+ const operator = whereMap[key]
+ /* istanbul ignore next */
+ if (!operator) {
+ // This should never happen
+ throw new Error(`Unsupported where clause ${JSON.stringify(where[key])}`)
+ }
+ const fieldWrap = fields[field]
+ criteria.push(sql`${sql.ident(field)} ${sql.__dangerous__rawValue(operator)} ${computeCriteriaValue(fieldWrap, value[key])}`)
+ }
+ }
+ return criteria
+ }
+
+ function computeCriteriaValue (fieldWrap, value) {
+ if (Array.isArray(value)) {
+ return sql`(${sql.join(
+ value.map((v) => computeCriteriaValue(fieldWrap, v)),
+ sql`, `
+ )})`
+ }
+
+ /* istanbul ignore next */
+ if (fieldWrap.sqlType === 'int4' || fieldWrap.sqlType === 'int2' || fieldWrap.sqlType === 'float8' || fieldWrap.sqlType === 'float4') {
+ // This cat is needed in PostgreSQL
+ return sql`${Number(value)}`
+ } else {
+ return sql`${value}`
+ }
+ }
+
+ async function find (opts = {}) {
+ const fieldsToRetrieve = computeFields(opts.fields).map((f) => sql.ident(f))
+ const criteria = computeCriteria(opts)
+ let query = sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ `
+
+ if (criteria.length > 0) {
+ query = sql`${query} WHERE ${sql.join(criteria, sql` AND `)}`
+ }
+
+ if (opts.orderBy && opts.orderBy.length > 0) {
+ const orderBy = opts.orderBy.map((order) => {
+ const field = inputToFieldMap[order.field]
+ return sql`${sql.ident(field)} ${sql.__dangerous__rawValue(order.direction)}`
+ })
+ query = sql`${query} ORDER BY ${sql.join(orderBy, sql`, `)}`
+ }
+
+ if (opts.limit && opts.offset !== undefined) {
+ query = sql`${query} LIMIT ${opts.limit} OFFSET ${opts.offset}`
+ }
+
+ const res = await db.query(query)
+ return res.map(fixOutput)
+ }
+
+ async function _delete (opts) {
+ const fieldsToRetrieve = computeFields(opts.fields).map((f) => sql.ident(f))
+ const criteria = computeCriteria(opts)
+ const res = await queries.deleteAll(db, sql, table, criteria, fieldsToRetrieve)
+ return res.map(fixOutput)
+ }
+
+ return {
+ name: entityName,
+ singularName: camelcase(singularize(table)),
+ pluralName: camelcase(table),
+ primaryKey,
+ table,
+ fields,
+ camelCasedFields,
+ fixInput,
+ fixOutput,
+ find,
+ insert,
+ save,
+ delete: _delete
+ }
+}
+
+async function buildEntity (db, sql, log, table, queries, autoTimestamp, ignore) {
+ // Compute the columns
+ const columns = (await queries.listColumns(db, sql, table)).filter((c) => !ignore[c.column_name])
+ const fields = columns.reduce((acc, column) => {
+ acc[column.column_name] = {
+ sqlType: column.udt_name,
+ isNullable: column.is_nullable === 'YES'
+ }
+ if (autoTimestamp && (column.column_name === 'updated_at' || column.column_name === 'inserted_at')) {
+ acc[column.column_name].autoTimestamp = true
+ }
+ return acc
+ }, {})
+
+ const currentRelations = []
+
+ const constraintsList = await queries.listConstraints(db, sql, table)
+ let primaryKey
+
+ for (const constraint of constraintsList) {
+ const field = fields[constraint.column_name]
+
+ /* istanbul ignore next */
+ if (!field) {
+ // This should never happen
+ log.warn({
+ constraint
+ }, `No field for ${constraint.column_name}`)
+ continue
+ }
+
+ if (constraint.constraint_type === 'PRIMARY KEY') {
+ primaryKey = constraint.column_name
+ // Check for SQLite typeless PK
+ /* istanbul ignore next */
+ if (db.isSQLite) {
+ const validTypes = ['integer', 'uuid', 'serial']
+ const pkType = fields[primaryKey].sqlType.toLowerCase()
+ if (!validTypes.includes(pkType)) {
+ throw new Error(`Invalid Primary Key type. Expected "integer", found "${pkType}"`)
+ }
+ }
+ field.primaryKey = true
+ }
+
+ if (constraint.constraint_type === 'FOREIGN KEY') {
+ field.foreignKey = true
+ currentRelations.push(constraint)
+ }
+ }
+
+ const entity = createMapper(db, sql, log, table, fields, primaryKey, currentRelations, queries, autoTimestamp)
+ entity.relations = currentRelations
+
+ return entity
+}
+
+module.exports = buildEntity
diff --git a/packages/sql-mapper/lib/queries/index.js b/packages/sql-mapper/lib/queries/index.js
new file mode 100644
index 0000000000..dfb17f5e66
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/index.js
@@ -0,0 +1,23 @@
+'use strict'
+
+/* istanbul ignore file */
+
+const obj = {}
+
+Object.defineProperty(obj, 'pg', {
+ get: () => require('./pg')
+})
+
+Object.defineProperty(obj, 'mysql', {
+ get: () => require('./mysql')
+})
+
+Object.defineProperty(obj, 'mariadb', {
+ get: () => require('./mariadb')
+})
+
+Object.defineProperty(obj, 'sqlite', {
+ get: () => require('./sqlite')
+})
+
+module.exports = obj
diff --git a/packages/sql-mapper/lib/queries/mariadb.js b/packages/sql-mapper/lib/queries/mariadb.js
new file mode 100644
index 0000000000..f06ff3b823
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/mariadb.js
@@ -0,0 +1,11 @@
+'use strict'
+
+const shared = require('./shared')
+const mysql = require('./mysql-shared')
+
+module.exports = {
+ ...mysql,
+ insertOne: shared.insertOne,
+ insertMany: shared.insertMany,
+ deleteAll: shared.deleteAll
+}
diff --git a/packages/sql-mapper/lib/queries/mysql-shared.js b/packages/sql-mapper/lib/queries/mysql-shared.js
new file mode 100644
index 0000000000..f36249bdd4
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/mysql-shared.js
@@ -0,0 +1,62 @@
+'use strict'
+
+async function listTables (db, sql) {
+ const res = await db.query(sql`
+ SELECT TABLE_NAME
+ FROM information_schema.tables
+ WHERE table_schema = (SELECT DATABASE())
+ `)
+ return res.map(r => r.TABLE_NAME)
+}
+
+async function listColumns (db, sql, table) {
+ const res = await db.query(sql`
+ SELECT column_name as column_name, data_type as udt_name, is_nullable as is_nullable
+ FROM information_schema.columns
+ WHERE table_name = ${table}
+ AND table_schema = (SELECT DATABASE())
+ `)
+ return res
+}
+
+async function listConstraints (db, sql, table) {
+ const res = await db.query(sql`
+ SELECT TABLE_NAME as table_name, COLUMN_NAME as column_name, CONSTRAINT_TYPE as constraint_type, referenced_table_name AS foreign_table_name, referenced_column_name AS foreign_column_name
+ FROM information_schema.table_constraints t
+ JOIN information_schema.key_column_usage k
+ USING (constraint_name, table_schema, table_name)
+ WHERE t.table_name = ${table}
+ AND t.table_schema = (SELECT DATABASE())
+ `)
+
+ return res
+}
+
+async function updateOne (db, sql, table, input, primaryKey, fieldsToRetrieve) {
+ const pairs = Object.keys(input).map((key) => {
+ const value = input[key]
+ return sql`${sql.ident(key)} = ${value}`
+ })
+ const update = sql`
+ UPDATE ${sql.ident(table)}
+ SET ${sql.join(pairs, sql`, `)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(input[primaryKey])}
+ `
+ await db.query(update)
+
+ const select = sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(input[primaryKey])}
+ `
+
+ const res = await db.query(select)
+ return res[0]
+}
+
+module.exports = {
+ listTables,
+ listColumns,
+ listConstraints,
+ updateOne
+}
diff --git a/packages/sql-mapper/lib/queries/mysql.js b/packages/sql-mapper/lib/queries/mysql.js
new file mode 100644
index 0000000000..7e51446f60
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/mysql.js
@@ -0,0 +1,104 @@
+'use strict'
+
+const { insertPrep } = require('./shared')
+const shared = require('./mysql-shared')
+
+function insertOne (db, sql, table, input, primaryKey, useUUID, fieldsToRetrieve) {
+ const keysToSql = Object.keys(input).map((key) => sql.ident(key))
+ const keys = sql.join(
+ keysToSql,
+ sql`, `
+ )
+
+ const valuesToSql = Object.keys(input).map((key) => {
+ return sql.value(input[key])
+ })
+ const values = sql.join(
+ valuesToSql,
+ sql`, `
+ )
+
+ return db.tx(async function (db) {
+ const insert = sql`
+ INSERT INTO ${sql.ident(table)} (${keys})
+ VALUES(${values})
+ `
+ await db.query(insert)
+
+ const res2 = await db.query(sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ WHERE ${sql.ident(primaryKey)} = (
+ SELECT last_insert_id()
+ )
+ `)
+
+ return res2[0]
+ })
+}
+
+function insertMany (db, sql, table, inputs, inputToFieldMap, primaryKey, fieldsToRetrieve, fields) {
+ return db.tx(async function (db) {
+ const { keys, values } = insertPrep(inputs, inputToFieldMap, fields, sql)
+ const insert = sql`
+ insert into ${sql.ident(table)} (${keys})
+ values ${sql.join(values, sql`, `)}
+ `
+
+ await db.query(insert)
+
+ const res = await db.query(sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ ORDER BY ${sql.ident(primaryKey)} DESC
+ LIMIT ${inputs.length}
+ `)
+
+ // To make consistent with shared.insertMany
+ res.sort(function (a, b) {
+ return a.id - b.id
+ })
+ return res
+ })
+}
+
+function deleteAll (db, sql, table, criteria, fieldsToRetrieve) {
+ return db.tx(async function (db) {
+ let selectQuery = sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ `
+ /* istanbul ignore else */
+ if (criteria.length > 0) {
+ selectQuery = sql`
+ ${selectQuery}
+ WHERE ${sql.join(criteria, sql` AND `)}
+ `
+ }
+
+ const res = await db.query(selectQuery)
+
+ let deleteQuery = sql`
+ DELETE FROM ${sql.ident(table)}
+ `
+
+ /* istanbul ignore else */
+ if (criteria.length > 0) {
+ deleteQuery = sql`
+ ${deleteQuery}
+ WHERE ${sql.join(criteria, sql` AND `)}
+ `
+ }
+
+ await db.query(deleteQuery)
+
+ return res
+ })
+}
+
+module.exports = {
+ ...shared,
+ insertOne,
+ insertMany,
+ deleteAll
+}
diff --git a/packages/sql-mapper/lib/queries/pg.js b/packages/sql-mapper/lib/queries/pg.js
new file mode 100644
index 0000000000..5d954a5518
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/pg.js
@@ -0,0 +1,79 @@
+'use strict'
+
+const shared = require('./shared')
+
+async function insertOne (db, sql, table, input, primaryKey, isUuid, fieldsToRetrieve) {
+ const inputKeys = Object.keys(input)
+ if (inputKeys.length === 0) {
+ const insert = sql`
+ INSERT INTO ${sql.ident(table)}
+ DEFAULT VALUES
+ RETURNING ${sql.join(fieldsToRetrieve, sql`, `)}
+ `
+ const res = await db.query(insert)
+ return res[0]
+ }
+
+ return shared.insertOne(db, sql, table, input, primaryKey, isUuid, fieldsToRetrieve)
+}
+
+module.exports.insertOne = insertOne
+module.exports.deleteAll = shared.deleteAll
+module.exports.insertMany = shared.insertMany
+
+async function listTables (db, sql) {
+ return (await db.query(sql`
+ SELECT tablename
+ FROM pg_catalog.pg_tables
+ WHERE
+ schemaname = current_schema()
+ `)).map(t => t.tablename)
+}
+
+module.exports.listTables = listTables
+
+async function listColumns (db, sql, table) {
+ return db.query(sql`
+ SELECT column_name, udt_name, is_nullable
+ FROM information_schema.columns
+ WHERE table_name = ${table}
+ AND table_schema = current_schema()
+ `)
+}
+
+module.exports.listColumns = listColumns
+
+async function listConstraints (db, sql, table) {
+ const query = sql`
+ SELECT constraints.*, usage.*, usage2.table_name AS foreign_table_name, usage2.column_name AS foreign_column_name
+ FROM information_schema.table_constraints constraints
+ JOIN information_schema.key_column_usage usage
+ ON constraints.constraint_name = usage.constraint_name
+ AND constraints.table_name = ${table}
+ JOIN information_schema.constraint_column_usage usage2
+ ON usage.constraint_name = usage2.constraint_name
+ AND usage.table_name = ${table}
+ `
+
+ const constraintsList = await db.query(query)
+ return constraintsList
+}
+
+module.exports.listConstraints = listConstraints
+
+async function updateOne (db, sql, table, input, primaryKey, fieldsToRetrieve) {
+ const pairs = Object.keys(input).map((key) => {
+ const value = input[key]
+ return sql`${sql.ident(key)} = ${value}`
+ })
+ const update = sql`
+ UPDATE ${sql.ident(table)}
+ SET ${sql.join(pairs, sql`, `)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(input[primaryKey])}
+ RETURNING ${sql.join(fieldsToRetrieve, sql`, `)}
+ `
+ const res = await db.query(update)
+ return res[0]
+}
+
+module.exports.updateOne = updateOne
diff --git a/packages/sql-mapper/lib/queries/shared.js b/packages/sql-mapper/lib/queries/shared.js
new file mode 100644
index 0000000000..7c66a1f04d
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/shared.js
@@ -0,0 +1,100 @@
+'use strict'
+
+/* istanbul ignore file */
+
+async function insertOne (db, sql, table, input, primaryKey, isUuid, fieldsToRetrieve) {
+ const inputKeys = Object.keys(input)
+ if (inputKeys.length === 0) {
+ const insert = sql`
+ INSERT INTO ${sql.ident(table)}
+ ()
+ VALUES ()
+ RETURNING ${sql.join(fieldsToRetrieve, sql`, `)}
+ `
+ const res = await db.query(insert)
+ return res[0]
+ }
+
+ const keys = sql.join(
+ inputKeys.map((key) => sql.ident(key)),
+ sql`, `
+ )
+ const values = sql.join(
+ Object.keys(input).map((key) => sql.value(input[key])),
+ sql`, `
+ )
+ const insert = sql`
+ INSERT INTO ${sql.ident(table)} (${keys})
+ VALUES (${values})
+ RETURNING ${sql.join(fieldsToRetrieve, sql`, `)}
+ `
+ const res = await db.query(insert)
+ return res[0]
+}
+
+async function deleteAll (db, sql, table, criteria, fieldsToRetrieve) {
+ let query = sql`
+ DELETE FROM ${sql.ident(table)}
+ `
+
+ if (criteria.length > 0) {
+ query = sql`${query} WHERE ${sql.join(criteria, sql` AND `)}`
+ }
+
+ query = sql`${query} RETURNING ${sql.join(fieldsToRetrieve, sql`, `)}`
+ const res = await db.query(query)
+ return res
+}
+
+async function insertMany (db, sql, table, inputs, inputToFieldMap, primaryKey, fieldsToRetrieve, fields) {
+ const { keys, values } = insertPrep(inputs, inputToFieldMap, fields, sql)
+ const insert = sql`
+ insert into ${sql.ident(table)} (${keys})
+ values ${sql.join(values, sql`, `)}
+ returning ${sql.join(fieldsToRetrieve, sql`, `)}
+ `
+
+ const res = await db.query(insert)
+ return res
+}
+
+function insertPrep (inputs, inputToFieldMap, fields, sql) {
+ const inputSet = new Set()
+ const values = []
+ for (const input of inputs) {
+ const inputValues = []
+ for (const key of Object.keys(input)) {
+ let newKey = key
+ if (inputToFieldMap[key] === undefined) {
+ if (fields[key] === undefined) {
+ throw new Error('Unknown field ' + key)
+ }
+ } else {
+ newKey = inputToFieldMap[key]
+ }
+
+ inputSet.add(newKey)
+
+ const value = input[key] || input[newKey]
+ inputValues.push(sql.value(value))
+ }
+
+ values.push(sql` (${sql.join(
+ inputValues,
+ sql`, `
+ )})`)
+ }
+ const inputKeys = Array.from(inputSet)
+ const keys = sql.join(
+ inputKeys.map((key) => sql.ident(key)),
+ sql`, `
+ )
+ return { keys, values }
+}
+
+module.exports = {
+ insertOne,
+ insertPrep,
+ deleteAll,
+ insertMany
+}
diff --git a/packages/sql-mapper/lib/queries/sqlite.js b/packages/sql-mapper/lib/queries/sqlite.js
new file mode 100644
index 0000000000..c8a45f7f9a
--- /dev/null
+++ b/packages/sql-mapper/lib/queries/sqlite.js
@@ -0,0 +1,169 @@
+'use strict'
+
+const { randomUUID } = require('crypto')
+
+async function listTables (db, sql) {
+ const tables = await db.query(sql`
+ SELECT name FROM sqlite_master
+ WHERE type='table'
+ `)
+ return tables.map(t => t.name)
+}
+
+module.exports.listTables = listTables
+
+async function listColumns (db, sql, table) {
+ const columns = await db.query(sql`
+ SELECT * FROM pragma_table_info(${table})
+ `)
+ for (const column of columns) {
+ column.column_name = column.name
+ // convert varchar(42) in varchar
+ column.udt_name = column.type.replace(/^([^(]+).*/, '$1').toLowerCase()
+ // convert is_nullable
+ column.is_nullable = column.notnull === 0 && column.pk === 0 ? 'YES' : 'NO'
+ }
+ return columns
+}
+
+module.exports.listColumns = listColumns
+
+async function listConstraints (db, sql, table) {
+ const constraints = []
+ const pks = await db.query(sql`
+ SELECT *
+ FROM pragma_table_info(${table})
+ WHERE pk > 0
+ `)
+
+ if (pks.length > 1) {
+ throw new Error(`Table ${table} has ${pks.length} primary keys`)
+ }
+
+ if (pks.length === 1) {
+ constraints.push({
+ column_name: pks[0].name,
+ constraint_type: 'PRIMARY KEY'
+ })
+ }
+
+ const foreignKeys = await db.query(sql`
+ SELECT *
+ FROM pragma_foreign_key_list(${table})
+ `)
+
+ for (const foreignKey of foreignKeys) {
+ constraints.push({
+ table_name: table,
+ column_name: foreignKey.from,
+ constraint_type: 'FOREIGN KEY',
+ foreign_table_name: foreignKey.table,
+ foreign_column_name: foreignKey.to
+ })
+ }
+ return constraints
+}
+
+module.exports.listConstraints = listConstraints
+
+async function insertOne (db, sql, table, input, primaryKey, useUUID, fieldsToRetrieve) {
+ const keysToSql = Object.keys(input).map((key) => sql.ident(key))
+ keysToSql.push(sql.ident(primaryKey))
+ const keys = sql.join(
+ keysToSql,
+ sql`, `
+ )
+
+ const valuesToSql = Object.keys(input).map((key) => {
+ return sql.value(input[key])
+ })
+ let primaryKeyValue
+ // TODO add test for this
+ if (useUUID) {
+ primaryKeyValue = randomUUID()
+ valuesToSql.push(sql.value(primaryKeyValue))
+ } else {
+ valuesToSql.push(sql.value(null))
+ }
+ const values = sql.join(
+ valuesToSql,
+ sql`, `
+ )
+
+ const insert = sql`
+ INSERT INTO ${sql.ident(table)} (${keys})
+ VALUES(${values})
+ `
+ await db.query(insert)
+
+ if (!useUUID) {
+ const res2 = await db.query(sql`
+ SELECT last_insert_rowid()
+ `)
+
+ primaryKeyValue = res2[0]['last_insert_rowid()']
+ }
+
+ const res = await db.query(sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(primaryKeyValue)}
+ `)
+
+ return res[0]
+}
+
+module.exports.insertOne = insertOne
+
+async function updateOne (db, sql, table, input, primaryKey, fieldsToRetrieve) {
+ const pairs = Object.keys(input).map((key) => {
+ const value = input[key]
+ return sql`${sql.ident(key)} = ${value}`
+ })
+
+ const update = sql`
+ UPDATE ${sql.ident(table)}
+ SET ${sql.join(pairs, sql`, `)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(input[primaryKey])}
+ `
+ await db.query(update)
+
+ const select = sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ WHERE ${sql.ident(primaryKey)} = ${sql.value(input[primaryKey])}
+ `
+ const res = await db.query(select)
+ return res[0]
+}
+
+module.exports.updateOne = updateOne
+
+async function deleteAll (db, sql, table, criteria, fieldsToRetrieve) {
+ let query = sql`
+ SELECT ${sql.join(fieldsToRetrieve, sql`, `)}
+ FROM ${sql.ident(table)}
+ `
+
+ /* istanbul ignore else */
+ if (criteria.length > 0) {
+ query = sql`${query} WHERE ${sql.join(criteria, sql` AND `)}`
+ }
+
+ const data = await db.query(query)
+
+ query = sql`
+ DELETE FROM ${sql.ident(table)}
+ `
+
+ /* istanbul ignore else */
+ if (criteria.length > 0) {
+ query = sql`${query} WHERE ${sql.join(criteria, sql` AND `)}`
+ }
+
+ await db.query(query)
+
+ return data
+}
+
+module.exports.deleteAll = deleteAll
diff --git a/packages/sql-mapper/lib/utils.js b/packages/sql-mapper/lib/utils.js
new file mode 100644
index 0000000000..e8d6fd4238
--- /dev/null
+++ b/packages/sql-mapper/lib/utils.js
@@ -0,0 +1,14 @@
+'use strict'
+
+const { singularize } = require('inflected')
+const camelcase = require('camelcase')
+
+function toSingular (str) {
+ str = camelcase(singularize(str))
+ str = str[0].toUpperCase() + str.slice(1)
+ return str
+}
+
+module.exports = {
+ toSingular
+}
diff --git a/packages/sql-mapper/mapper.d.ts b/packages/sql-mapper/mapper.d.ts
new file mode 100644
index 0000000000..b56e08ec75
--- /dev/null
+++ b/packages/sql-mapper/mapper.d.ts
@@ -0,0 +1,308 @@
+import { FastifyPluginAsync } from 'fastify'
+import { SQL, SQLQuery } from '@databases/sql'
+
+interface ILogger {
+ trace(): any,
+ error(): any
+}
+
+export interface Database {
+ /**
+ * An option that is true if a Postgres database is used.
+ */
+ isPg?: boolean,
+ /**
+ * An option that is true if a MariaDB database is used.
+ */
+ isMariaDB?: boolean,
+ /**
+ * An option that is true if a MySQL database is used.
+ */
+ isMySQL?: boolean,
+ /**
+ * An option that is true if a SQLite database is used.
+ */
+ isSQLite?: boolean,
+ /**
+ * Run an SQL Query and get a promise for an array of results. If your query contains multiple statements, only the results of the final statement are returned.
+ */
+ query(query: SQLQuery): Promise,
+ /**
+ * Dispose the connection. Once this is called, any subsequent queries will fail.
+ */
+ dispose(): Promise
+}
+
+export interface DBEntityField {
+ /**
+ * Field type in the database.
+ */
+ sqlType: string,
+ /**
+ * Camel cased field name.
+ */
+ camelcase: string,
+ /**
+ * An option that is true if field is a primary key.
+ */
+ primaryKey?: boolean,
+ /**
+ * An option that is true if field is a foreignKey key.
+ */
+ foreignKey?: boolean,
+ /**
+ * An option that is true if field is nullable.
+ */
+ isNullable: boolean,
+ /**
+ * An option that is true if auto timestamp enabled for this field.
+ */
+ autoTimestamp?: boolean
+}
+
+export interface WhereCondition {
+ [columnName: string]: {
+ /**
+ * Equal to value.
+ */
+ eq?: string,
+ /**
+ * Not equal to value.
+ */
+ neq?: string,
+ /**
+ * Greater than value.
+ */
+ gr?: any,
+ /**
+ * Greater than or equal to value.
+ */
+ gte?: any,
+ /**
+ * Less than value.
+ */
+ lt?: any,
+ /**
+ * Less than or equal to value.
+ */
+ lte?: any,
+ /**
+ * In values.
+ */
+ in?: any[],
+ /**
+ * Not in values.
+ */
+ nin?: any[]
+ }
+}
+
+interface Find {
+ (options?: {
+ /**
+ * SQL where condition.
+ */
+ where?: WhereCondition,
+ /**
+ * List of fields to be returned for each object
+ */
+ fields?: string[],
+ /**
+ * Entity fields to order by.
+ */
+ orderBy?: Array<{ field: string, direction: 'asc' | 'desc' }>,
+ /**
+ * Number of entities to select.
+ */
+ limit?: number,
+ /**
+ * Number of entities to skip.
+ */
+ offset?: number,
+ }): Promise[]>
+}
+
+interface Insert {
+ (options: {
+ /**
+ * Entities to insert.
+ */
+ inputs: EntityFields[],
+ /**
+ * List of fields to be returned for each object
+ */
+ fields?: string[]
+ }): Promise[]>
+}
+
+interface Save {
+ (options: {
+ /**
+ * Entity to save.
+ */
+ input: EntityFields,
+ /**
+ * List of fields to be returned for each object
+ */
+ fields?: string[]
+ }): Promise>
+}
+
+interface Delete {
+ (options?: {
+ /**
+ * SQL where condition.
+ */
+ where: WhereCondition,
+ /**
+ * List of fields to be returned for each object
+ */
+ fields: string[]
+ }): Promise[]>,
+}
+
+export interface Entity {
+ /**
+ * The origin name of the database entity.
+ */
+ name: string,
+ /**
+ * The name of the database object in the singular.
+ */
+ singularName: string,
+ /**
+ * The plural name of the database entity.
+ */
+ pluralName: string,
+ /**
+ * The primary key of the database entity.
+ */
+ primaryKey: string,
+ /**
+ * The table of the database entity.
+ */
+ table: string,
+ /**
+ * Fields of the database entity.
+ */
+ fields: { [columnName: string]: DBEntityField },
+ /**
+ * Camel cased fields of the database entity.
+ */
+ camelCasedFields: { [columnName: string]: DBEntityField },
+ /**
+ * Relations with other database entities.
+ */
+ relations: any[],
+ /**
+ * Converts entities fields names to database column names.
+ */
+ fixInput(input: { [columnName: string]: any }): { [columnName: string]: any },
+ /**
+ * Converts database column names to entities fields names.
+ */
+ fixOutput(input: { [columnName: string]: any }): { [columnName: string]: any },
+ /**
+ * Selects matching entities from the database.
+ */
+ find: Find,
+ /**
+ * Inserts entities to the database.
+ */
+ insert: Insert,
+ /**
+ * Saves entity to the database.
+ */
+ save: Save,
+ /**
+ * Deletes entities from the database.
+ */
+ delete: Delete,
+}
+
+
+export interface EntityHooks {
+ [entityName: string]: {
+ find?: Find,
+ insert?: Insert,
+ save?: Save,
+ delete?: Delete,
+ }
+}
+
+export interface SQLMapperPluginOptions {
+ /**
+ * Database connection string.
+ */
+ connectionString: string,
+ /**
+ * Set to true to enable auto timestamping for updated_at and inserted_at fields.
+ */
+ autoTimestamp?: boolean,
+ /**
+ * A logger object (like [Pino](https://getpino.io))
+ */
+ log?: ILogger,
+ /**
+ * Database table to ignore when mapping to entities.
+ */
+ ignore?: {
+ [tableName: string]: {
+ [columnName: string]: boolean
+ } | boolean
+ },
+ /**
+ * For each entity name (like `Page`) you can customize any of the entity API function. Your custom function will receive the original function as first parameter, and then all the other parameters passed to it.
+ */
+ hooks?: EntityHooks,
+ /**
+ * An async function that is called after the connection is established.
+ */
+ onDatabaseLoad?(db: Database, sql: SQL): any,
+}
+
+export interface Entities {
+ [entityName: string]: Entity
+}
+
+export interface SQLMapperPluginInterface {
+ /**
+ * A Database abstraction layer from [@Databases](https://www.atdatabases.org/)
+ */
+ db: Database,
+ /**
+ * The SQL builder from [@Databases](https://www.atdatabases.org/)
+ */
+ sql: SQL,
+ /**
+ * An object containing a key for each table found in the schema, with basic CRUD operations. See [entity.md](./entity.md) for details.
+ */
+ entities: Entities,
+ /**
+ * Adds hooks to the entity.
+ */
+ addEntityHooks(entityName: string, hooks: EntityHooks): any
+}
+
+declare module 'fastify' {
+ interface FastifyInstance {
+ platformatic: SQLMapperPluginInterface
+ }
+}
+
+/**
+ * Connects to the database and maps the tables to entities.
+ */
+export function connect(options: SQLMapperPluginOptions): Promise
+/**
+ * Fastify plugin that connects to the database and maps the tables to entities.
+ */
+export const plugin: FastifyPluginAsync
+export default plugin
+
+/**
+ * An object that contains utility functions.
+ */
+export module utils {
+ export function toSingular (str: string): string
+}
diff --git a/packages/sql-mapper/mapper.js b/packages/sql-mapper/mapper.js
new file mode 100644
index 0000000000..1ae9ed6555
--- /dev/null
+++ b/packages/sql-mapper/mapper.js
@@ -0,0 +1,155 @@
+'use strict'
+
+const buildEntity = require('./lib/entity')
+const queriesFactory = require('./lib/queries')
+const fp = require('fastify-plugin')
+
+// Ignore the function as it is only used only for MySQL and PostgreSQL
+/* istanbul ignore next */
+async function buildConnection (log, createConnectionPool, connectionString) {
+ const db = await createConnectionPool({
+ connectionString,
+ bigIntMode: 'string',
+ onQueryStart: (_query, { text, values }) => {
+ log.trace({
+ query: {
+ text,
+ values
+ }
+ }, 'start query')
+ },
+ onQueryResults: (_query, { text }, results) => {
+ log.trace({
+ query: {
+ text,
+ results: results.length
+ }
+ }, 'end query')
+ },
+ onQueryError: (_query, { text }, err) => {
+ log.error({
+ query: {
+ text,
+ error: err.message
+ }
+ }, 'query error')
+ }
+ })
+ return db
+}
+
+async function connect ({ connectionString, log, onDatabaseLoad, ignore = {}, autoTimestamp = true, hooks = {} }) {
+ // TODO validate config using the schema
+ if (!connectionString) {
+ throw new Error('connectionString is required')
+ }
+
+ let queries
+ let sql
+ let db
+
+ /* istanbul ignore next */
+ if (connectionString.indexOf('postgres') === 0) {
+ const createConnectionPoolPg = require('@databases/pg')
+ db = await buildConnection(log, createConnectionPoolPg, connectionString)
+ sql = createConnectionPoolPg.sql
+ queries = queriesFactory.pg
+ db.isPg = true
+ } else if (connectionString.indexOf('mysql') === 0) {
+ const createConnectionPoolMysql = require('@databases/mysql')
+ db = await buildConnection(log, createConnectionPoolMysql, connectionString)
+ sql = createConnectionPoolMysql.sql
+ const version = (await db.query(sql`SELECT VERSION()`))[0]['VERSION()']
+ db.version = version
+ db.isMariaDB = version.indexOf('maria') !== -1
+ if (db.isMariaDB) {
+ queries = queriesFactory.mariadb
+ } else {
+ db.isMySQL = true
+ queries = queriesFactory.mysql
+ }
+ } else if (connectionString.indexOf('sqlite') === 0) {
+ const sqlite = require('@databases/sqlite')
+ const path = connectionString.replace('sqlite://', '')
+ db = sqlite(connectionString === 'sqlite://:memory:' ? undefined : path)
+ sql = sqlite.sql
+ queries = queriesFactory.sqlite
+ db.isSQLite = true
+ } else {
+ throw new Error('You must specify either postgres, mysql or sqlite as protocols')
+ }
+
+ const entities = {}
+
+ try {
+ /* istanbul ignore else */
+ if (typeof onDatabaseLoad === 'function') {
+ await onDatabaseLoad(db, sql)
+ }
+
+ const tables = await queries.listTables(db, sql)
+
+ for (const table of tables) {
+ // The following line is a safety net when developing this module,
+ // it should never happen.
+ /* istanbul ignore next */
+ if (typeof table !== 'string') {
+ throw new Error(`Table must be a string, got '${table}'`)
+ }
+ if (ignore[table] === true) {
+ continue
+ }
+
+ const entity = await buildEntity(db, sql, log, table, queries, autoTimestamp, ignore[table] || {})
+ // Check for primary key of all entities
+ if (!entity.primaryKey) {
+ throw new Error(`Cannot find primary key for ${entity.name} entity`)
+ }
+ entities[entity.singularName] = entity
+ if (hooks[entity.name]) {
+ addEntityHooks(entity.singularName, hooks[entity.name])
+ } else if (hooks[entity.singularName]) {
+ addEntityHooks(entity.singularName, hooks[entity.singularName])
+ }
+ }
+ } catch (err) /* istanbul ignore next */ {
+ db.dispose()
+ throw err
+ }
+
+ return {
+ db,
+ sql,
+ entities,
+ addEntityHooks
+ }
+
+ function addEntityHooks (entityName, hooks) {
+ const entity = entities[entityName]
+ if (!entity) {
+ throw new Error('Cannot find entity ' + entityName)
+ }
+ for (const key of Object.keys(hooks)) {
+ if (hooks[key] && entity[key]) {
+ entity[key] = hooks[key].bind(null, entity[key])
+ }
+ }
+ }
+}
+
+async function sqlMapper (app, opts) {
+ const mapper = await connect({
+ log: app.log,
+ ...opts
+ })
+
+ app.onClose(() => mapper.db.dispose())
+ // TODO this would need to be refactored as other plugins
+ // would need to use this same namespace
+ app.decorate('platformatic', mapper)
+}
+
+module.exports = fp(sqlMapper)
+module.exports.connect = connect
+module.exports.plugin = module.exports
+module.exports.utils = require('./lib/utils')
diff --git a/packages/sql-mapper/package.json b/packages/sql-mapper/package.json
new file mode 100644
index 0000000000..3c5b30c593
--- /dev/null
+++ b/packages/sql-mapper/package.json
@@ -0,0 +1,44 @@
+{
+ "name": "@platformatic/sql-mapper",
+ "version": "0.0.21",
+ "description": "A data mapper utility for SQL databases",
+ "main": "mapper.js",
+ "scripts": {
+ "test": "standard | snazzy && npm run test:typescript && npm run test:postgresql && npm run test:mariadb && npm run test:mysql && npm run test:mysql8 && npm run test:sqlite",
+ "test:postgresql": "DB=postgresql tap test/*.test.js",
+ "test:mariadb": "DB=mariadb tap test/*.test.js",
+ "test:mysql": "DB=mysql tap test/*.test.js",
+ "test:mysql8": "DB=mysql8 tap test/*.test.js",
+ "test:sqlite": "DB=sqlite tap test/*.test.js",
+ "test:typescript": "tsd"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/platformatic/platformatic.git"
+ },
+ "author": "Matteo Collina ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/platformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/platformatic/platformatic#readme",
+ "devDependencies": {
+ "fastify": "^4.5.3",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0",
+ "tsd": "^0.23.0"
+ },
+ "dependencies": {
+ "@databases/mysql": "^5.2.0",
+ "@databases/pg": "^5.3.0",
+ "@databases/sql": "^3.2.0",
+ "@databases/sqlite": "^4.0.0",
+ "camelcase": "^6.0.0",
+ "fastify-plugin": "^4.1.0",
+ "inflected": "^2.1.0"
+ },
+ "tsd": {
+ "directory": "test/types"
+ }
+}
diff --git a/packages/sql-mapper/test/entity.test.js b/packages/sql-mapper/test/entity.test.js
new file mode 100644
index 0000000000..29ec08d985
--- /dev/null
+++ b/packages/sql-mapper/test/entity.test.js
@@ -0,0 +1,344 @@
+'use strict'
+
+const { test } = require('tap')
+
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+const { connect } = require('..')
+const fakeLogger = {
+ trace: () => {},
+ error: () => {}
+}
+
+test('entity fields', async ({ equal, not, same, teardown }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ const pageEntity = mapper.entities.page
+ not(pageEntity, undefined)
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ equal(pageEntity.primaryKey, 'id')
+ equal(pageEntity.table, 'pages')
+ equal(pageEntity.camelCasedFields.id.primaryKey, true)
+})
+
+test('entity API', async ({ equal, same, teardown, rejects }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ the_title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ the_title VARCHAR(255) NOT NULL
+ );`)
+ }
+ await db.query(sql`INSERT INTO pages (the_title) VALUES ('foo')`)
+ await db.query(sql`INSERT INTO pages (the_title) VALUES ('bar')`)
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ const pageEntity = mapper.entities.page
+ // fixInput
+ const fixedInput = pageEntity.fixInput({ id: 42, theTitle: 'Fixme' })
+ same(fixedInput, { id: 42, the_title: 'Fixme' })
+
+ // fixOutput
+ const fixedOutput = pageEntity.fixOutput({
+ id: 42,
+ the_title: 'Fixme'
+ })
+
+ same(fixedOutput, { id: 42, theTitle: 'Fixme' })
+
+ // empty fixOutput
+ same(pageEntity.fixOutput(undefined), undefined)
+
+ // find
+ const findResult = await pageEntity.find({ fields: ['theTitle'] })
+ same(findResult, [{ theTitle: 'foo' }, { theTitle: 'bar' }])
+
+ // insert - single
+ const insertResult = await pageEntity.insert({
+ inputs: [{ theTitle: 'foobar' }],
+ fields: ['id', 'theTitle']
+ })
+ same(insertResult, [{ id: '3', theTitle: 'foobar' }])
+
+ // insert - multiple
+ const insertMultipleResult = await pageEntity.insert({
+ inputs: [{ theTitle: 'platformatic' }, { theTitle: 'foobar' }],
+ fields: ['id', 'theTitle']
+ })
+ same(insertMultipleResult, [{ id: '4', theTitle: 'platformatic' }, { id: '5', theTitle: 'foobar' }])
+
+ // save - new record
+ same(await pageEntity.save({
+ input: { theTitle: 'fourth page' },
+ fields: ['id', 'theTitle']
+ }), { id: 6, theTitle: 'fourth page' })
+
+ // save - update record
+ same(await pageEntity.save({
+ input: { id: 4, theTitle: 'foofoo' },
+ fields: ['id', 'theTitle']
+ }), { id: '4', theTitle: 'foofoo' })
+
+ // save - empty object
+ rejects(async () => {
+ await pageEntity.save({})
+ }, Error, 'Input not provided.')
+
+ rejects(async () => {
+ await pageEntity.save({ input: { fakeColumn: 'foobar' } })
+ })
+ // delete
+ same(await pageEntity.delete({
+ where: {
+ id: {
+ eq: 2
+ }
+ },
+ fields: ['id', 'theTitle']
+ }), [{ id: '2', theTitle: 'bar' }])
+})
+
+test('empty save', async ({ equal, same, teardown, rejects }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ the_title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ the_title VARCHAR(255)
+ );`)
+ }
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+
+ const insertResult = await mapper.entities.page.save({
+ input: {},
+ fields: ['id', 'theTitle']
+ })
+ same(insertResult, { id: '1', theTitle: null })
+})
+
+test('[SQLite] - UUID', { skip: !isSQLite }, async ({ pass, teardown, same, equal }) => {
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ ignore: {},
+ hooks: {},
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await db.query(sql`
+ CREATE TABLE pages (
+ id uuid PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ })
+
+ const pageEntity = mapper.entities.page
+
+ let id
+ {
+ const res = await pageEntity.save({ input: { title: 'Hello' } })
+ id = res.id
+ same(res, {
+ id,
+ title: 'Hello'
+ })
+ }
+
+ {
+ const res = await pageEntity.find({ where: { id: { eq: id } } })
+ same(res, [{
+ id,
+ title: 'Hello'
+ }])
+ }
+
+ {
+ const res = await pageEntity.save({ input: { id, title: 'Hello World' } })
+ same(res, {
+ id,
+ title: 'Hello World'
+ })
+ }
+})
+
+test('[sqlite] throws if PK is not INTEGER', { skip: !isSQLite }, async ({ fail, equal, teardown, rejects }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ await db.query(sql`CREATE TABLE pages (
+ id int PRIMARY KEY,
+ title varchar(255) NOT NULL,
+ content text NOT NULL
+ );`)
+ }
+ try {
+ await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ fail()
+ } catch (err) {
+ equal(err.message, 'Invalid Primary Key type. Expected "integer", found "int"')
+ }
+})
+
+test('mixing snake and camel case', async ({ pass, teardown, same, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255),
+ body_content TEXT,
+ category_id BIGINT UNSIGNED,
+ FOREIGN KEY (category_id) REFERENCES categories(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE "categories" (
+ "id" INTEGER PRIMARY KEY,
+ "name" TEXT NOT NULL
+ );
+ `)
+ await db.query(sql`
+ CREATE TABLE "pages" (
+ "id" INTEGER PRIMARY KEY,
+ "title" TEXT NOT NULL,
+ "body_content" TEXT
+ );
+ `)
+ await db.query(sql`
+ ALTER TABLE "pages" ADD COLUMN "category_id" REFERENCES "categories"("id");
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name varchar(255) NOT NULL
+ );
+
+ CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title varchar(255) NOT NULL,
+ body_content text,
+ category_id int NOT NULL REFERENCES categories(id)
+ );
+ `)
+ }
+ }
+
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+
+ const pageEntity = mapper.entities.page
+ const categoryEntity = mapper.entities.category
+
+ const [newCategory] = await categoryEntity.insert({
+ fields: ['id', 'name'],
+ inputs: [{ name: 'fiction' }]
+ })
+
+ {
+ const res = await pageEntity.insert({
+ fields: ['id', 'title'],
+ inputs: [
+ {
+ title: 'A fiction', bodyContent: 'This is our first fiction', categoryId: newCategory.id
+ },
+ {
+ title: 'A fiction', body_content: 'This is our first fiction', category_id: newCategory.id
+ }
+
+ ]
+ })
+ same(res, [{
+ id: '1',
+ title: 'A fiction',
+ categoryId: newCategory.id
+ }, {
+ id: '2',
+ title: 'A fiction',
+ categoryId: newCategory.id
+ }])
+ }
+
+ {
+ const res = await pageEntity.save({
+ fields: ['id', 'title'],
+ input: {
+ title: 'A fiction', body_content: 'This is our first fiction', category_id: newCategory.id
+ }
+ })
+ same(res, {
+ id: '3',
+ title: 'A fiction',
+ categoryId: newCategory.id
+ })
+ }
+})
diff --git a/packages/sql-mapper/test/helper.js b/packages/sql-mapper/test/helper.js
new file mode 100644
index 0000000000..a210620ebd
--- /dev/null
+++ b/packages/sql-mapper/test/helper.js
@@ -0,0 +1,66 @@
+'use strict'
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE users`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE versions`)
+ } catch {
+ }
+}
diff --git a/packages/sql-mapper/test/hooks.test.js b/packages/sql-mapper/test/hooks.test.js
new file mode 100644
index 0000000000..f0f9726f5f
--- /dev/null
+++ b/packages/sql-mapper/test/hooks.test.js
@@ -0,0 +1,325 @@
+'use strict'
+
+const { test } = require('tap')
+const { connect } = require('..')
+const { clear, connInfo, isSQLite } = require('./helper')
+const fakeLogger = {
+ trace: () => {},
+ error: () => {}
+}
+
+test('basic hooks', async ({ pass, teardown, same, equal, plan, fail }) => {
+ plan(14)
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ Page: {
+ noKey () {
+ fail('noKey should never be called')
+ },
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+
+ if (!input.id) {
+ same(input, {
+ title: 'Hello'
+ })
+
+ return original({
+ input: {
+ title: 'Hello from hook'
+ },
+ fields
+ })
+ } else {
+ same(input, {
+ id: 1,
+ title: 'Hello World'
+ })
+
+ return original({
+ input: {
+ id: 1,
+ title: 'Hello from hook 2'
+ },
+ fields
+ })
+ }
+ },
+ async find (original, args) {
+ pass('find called')
+
+ same(args.where, {
+ id: {
+ eq: '1'
+ }
+ })
+ args.where = {
+ id: {
+ eq: '2'
+ }
+ }
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ },
+ async insert (original, args) {
+ pass('insert called')
+
+ same(args.inputs, [{
+ title: 'hello'
+ }, {
+ title: 'world'
+ }])
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+
+ const entity = mapper.entities.page
+
+ same(await entity.save({ input: { title: 'Hello' } }), {
+ id: 1,
+ title: 'Hello from hook'
+ })
+
+ same(await entity.find({ where: { id: { eq: 1 } }, fields: ['id', 'title'] }), [])
+
+ same(await entity.save({ input: { id: 1, title: 'Hello World' } }), {
+ id: 1,
+ title: 'Hello from hook 2'
+ })
+
+ await entity.insert({ inputs: [{ title: 'hello' }, { title: 'world' }], fields: ['id', 'title'] })
+})
+
+test('addEntityHooks', async ({ pass, teardown, same, equal, plan, fail, throws }) => {
+ plan(15)
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ }
+ })
+
+ throws(() => mapper.addEntityHooks('user', {}), 'Cannot find entity user')
+
+ mapper.addEntityHooks('page', {
+ noKey () {
+ fail('noKey should never be called')
+ },
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+
+ if (!input.id) {
+ same(input, {
+ title: 'Hello'
+ })
+
+ return original({
+ input: {
+ title: 'Hello from hook'
+ },
+ fields
+ })
+ } else {
+ same(input, {
+ id: 1,
+ title: 'Hello World'
+ })
+
+ return original({
+ input: {
+ id: 1,
+ title: 'Hello from hook 2'
+ },
+ fields
+ })
+ }
+ },
+ async find (original, args) {
+ pass('find called')
+
+ same(args.where, {
+ id: {
+ eq: '1'
+ }
+ })
+ args.where = {
+ id: {
+ eq: '2'
+ }
+ }
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ },
+ async insert (original, args) {
+ pass('insert called')
+
+ same(args.inputs, [{
+ title: 'hello'
+ }, {
+ title: 'world'
+ }])
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ })
+
+ const entity = mapper.entities.page
+
+ same(await entity.save({ input: { title: 'Hello' } }), {
+ id: 1,
+ title: 'Hello from hook'
+ })
+
+ same(await entity.find({ where: { id: { eq: 1 } }, fields: ['id', 'title'] }), [])
+
+ same(await entity.save({ input: { id: 1, title: 'Hello World' } }), {
+ id: 1,
+ title: 'Hello from hook 2'
+ })
+
+ await entity.insert({ inputs: [{ title: 'hello' }, { title: 'world' }], fields: ['id', 'title'] })
+})
+
+test('basic hooks with smaller cap name', async ({ pass, teardown, same, equal, plan, fail }) => {
+ plan(14)
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ page: {
+ noKey () {
+ fail('noKey should never be called')
+ },
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+
+ if (!input.id) {
+ same(input, {
+ title: 'Hello'
+ })
+
+ return original({
+ input: {
+ title: 'Hello from hook'
+ },
+ fields
+ })
+ } else {
+ same(input, {
+ id: 1,
+ title: 'Hello World'
+ })
+
+ return original({
+ input: {
+ id: 1,
+ title: 'Hello from hook 2'
+ },
+ fields
+ })
+ }
+ },
+ async find (original, args) {
+ pass('find called')
+
+ same(args.where, {
+ id: {
+ eq: '1'
+ }
+ })
+ args.where = {
+ id: {
+ eq: '2'
+ }
+ }
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ },
+ async insert (original, args) {
+ pass('insert called')
+
+ same(args.inputs, [{
+ title: 'hello'
+ }, {
+ title: 'world'
+ }])
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+
+ const entity = mapper.entities.page
+
+ same(await entity.save({ input: { title: 'Hello' } }), {
+ id: 1,
+ title: 'Hello from hook'
+ })
+
+ same(await entity.find({ where: { id: { eq: 1 } }, fields: ['id', 'title'] }), [])
+
+ same(await entity.save({ input: { id: 1, title: 'Hello World' } }), {
+ id: 1,
+ title: 'Hello from hook 2'
+ })
+
+ await entity.insert({ inputs: [{ title: 'hello' }, { title: 'world' }], fields: ['id', 'title'] })
+})
diff --git a/packages/sql-mapper/test/inserted_at_updated_at.test.js b/packages/sql-mapper/test/inserted_at_updated_at.test.js
new file mode 100644
index 0000000000..1c1276ac3e
--- /dev/null
+++ b/packages/sql-mapper/test/inserted_at_updated_at.test.js
@@ -0,0 +1,132 @@
+'use strict'
+
+const { test } = require('tap')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+const { setTimeout } = require('timers/promises')
+const { connect } = require('..')
+const fakeLogger = {
+ trace: () => {},
+ error: () => {}
+}
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP,
+ updated_at TIMESTAMP
+ );`)
+ } else if (isMysql) {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP NULL DEFAULT NULL,
+ updated_at TIMESTAMP NULL DEFAULT NULL
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ inserted_at TIMESTAMP,
+ updated_at TIMESTAMP
+ );`)
+ }
+}
+
+test('inserted_at updated_at happy path', async ({ pass, teardown, same, equal, not, comment, notSame }) => {
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+
+ const entity = mapper.entities.page
+
+ equal(entity.fields.inserted_at.autoTimestamp, true)
+ equal(entity.fields.updated_at.autoTimestamp, true)
+
+ const original = await entity.save({
+ input: { title: 'Hello' }
+ })
+ not(original.insertedAt, null, 'insertedAt')
+ not(original.updatedAt, null, 'updatedAt')
+ comment(`insertedAt: ${original.insertedAt}`)
+ comment(`updatedAt: ${original.updatedAt}`)
+
+ {
+ const [data] = await entity.find({ where: { id: { eq: original.id } } })
+ same(data.insertedAt, original.insertedAt, 'insertedAt')
+ same(data.updatedAt, original.updatedAt, 'updatedAt')
+ comment(`insertedAt: ${data.insertedAt}`)
+ comment(`updatedAt: ${data.updatedAt}`)
+ }
+
+ await setTimeout(1000) // await 1s
+
+ let updated
+ {
+ const data = await entity.save({
+ input: { id: original.id, title: 'Hello World' }
+ })
+ same(data.insertedAt, original.insertedAt, 'insertedAt')
+ notSame(data.updatedAt, original.updatedAt, 'updatedAt')
+ updated = data
+ comment(`insertedAt: ${data.insertedAt}`)
+ comment(`updatedAt: ${data.updatedAt}`)
+ }
+
+ {
+ const [data] = await entity.find({ where: { id: { eq: original.id } } })
+ same(data.insertedAt, updated.insertedAt, 'insertedAt')
+ same(data.updatedAt, updated.updatedAt, 'updatedAt')
+ comment(`insertedAt: ${data.insertedAt}`)
+ comment(`updatedAt: ${data.updatedAt}`)
+ }
+})
+
+test('bulk insert adds inserted_at updated_at', async ({ pass, teardown, same, equal, not, comment }) => {
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+
+ const entity = mapper.entities.page
+
+ {
+ const pages = await entity.insert({
+ inputs: [
+ { title: 'Page 1' },
+ { title: 'Page 2' },
+ { title: 'Page 3' }
+ ]
+ })
+ for (const page of pages) {
+ not(page.insertedAt, null, 'insertedAt')
+ not(page.updatedAt, null, 'updatedAt')
+ same(page.insertedAt, page.updatedAt, 'insertedAt === updatedAt')
+ }
+ }
+
+ {
+ const pages = await entity.find()
+ for (const page of pages) {
+ not(page.insertedAt, null, 'insertedAt')
+ not(page.updatedAt, null, 'updatedAt')
+ same(page.insertedAt, page.updatedAt, 'insertedAt === updatedAt')
+ }
+ }
+})
diff --git a/packages/sql-mapper/test/mapper.test.js b/packages/sql-mapper/test/mapper.test.js
new file mode 100644
index 0000000000..7118898261
--- /dev/null
+++ b/packages/sql-mapper/test/mapper.test.js
@@ -0,0 +1,288 @@
+
+const { test } = require('tap')
+const { connect, plugin } = require('..')
+const { clear, connInfo, isPg, isMysql, isSQLite } = require('./helper')
+const fastify = require('fastify')
+
+const fakeLogger = {
+ trace: () => {},
+ error: () => {}
+}
+
+test('should throw if no connection string is provided', async ({ equal }) => {
+ try {
+ await connect({
+ connectionString: false
+ })
+ } catch (err) {
+ equal(err.message, 'connectionString is required')
+ }
+})
+
+test('[PG] return entities', { skip: !isPg }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ const pageEntity = mapper.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('[mysql] return entities', { skip: !isMysql }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ const pageEntity = mapper.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('[sqlite] return entities', { skip: !isSQLite }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ teardown(async () => await clear(db, sql))
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ const pageEntity = mapper.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('ignore tables', async ({ teardown, has }) => {
+ async function onDatabaseLoad (db, sql) {
+ teardown(async () => await clear(db, sql))
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS users (
+ id SERIAL PRIMARY KEY,
+ username VARCHAR(255) NOT NULL
+ );`)
+ }
+ const mapper = await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: { users: true },
+ hooks: {}
+ })
+ has(mapper.entities.users, undefined)
+})
+
+test('[PG] return entities with Fastify', { skip: !isPg }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ teardown(async () => await clear(db, sql))
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const app = fastify()
+ teardown(() => app.close())
+ app.register(plugin, {
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad
+ })
+ await app.ready()
+ const pageEntity = app.platformatic.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('[mysql] return entities', { skip: !isMysql }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ teardown(async () => await clear(db, sql))
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const app = fastify()
+ teardown(() => app.close())
+ app.register(plugin, {
+ connectionString: connInfo.connectionString,
+ onDatabaseLoad
+ })
+ await app.ready()
+ const pageEntity = app.platformatic.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('[sqlite] return entities', { skip: !isSQLite }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ teardown(async () => await clear(db, sql))
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ const app = fastify()
+ teardown(() => app.close())
+ app.register(plugin, {
+ connectionString: connInfo.connectionString,
+ onDatabaseLoad
+ })
+ await app.ready()
+ const pageEntity = app.platformatic.entities.page
+ equal(pageEntity.name, 'Page')
+ equal(pageEntity.singularName, 'page')
+ equal(pageEntity.pluralName, 'pages')
+ pass()
+})
+
+test('missing connectionString', async ({ rejects }) => {
+ const app = fastify()
+ app.register(plugin)
+
+ await rejects(app.ready(), /connectionString/)
+})
+
+test('[pg] throws if no primary key', { skip: !isPg }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE IF NOT EXISTS pages (
+ id SERIAL,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ try {
+ await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ } catch (err) {
+ equal(err.message, 'Cannot find primary key for Page entity')
+ }
+})
+
+test('[mysql] throws if no primary key', { skip: !isMysql }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+ teardown(() => db.dispose())
+
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL,
+ title VARCHAR(255) NOT NULL
+ );`)
+ }
+ try {
+ await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ } catch (err) {
+ equal(err.message, 'Cannot find primary key for Page entity')
+ }
+})
+
+test('[sqlite] throws if primary key is not defined', { skip: !isSQLite }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+
+ await db.query(sql`CREATE TABLE pages(
+ id INTEGER NOT NULL,
+ title TEXT NOT NULL
+ );
+ `)
+ }
+ try {
+ await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ } catch (err) {
+ equal(err.message, 'Cannot find primary key for Page entity')
+ }
+})
+
+test('[sqlite] throws with multiple primary keys', { skip: !isSQLite }, async ({ pass, teardown, equal }) => {
+ async function onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+
+ await db.query(sql`CREATE TABLE pages(
+ id INTEGER NOT NULL,
+ author_id INTEGER NOT NULL,
+ title TEXT NOT NULL,
+ PRIMARY KEY (id, author_id)
+ );
+ `)
+ }
+ try {
+ await connect({
+ connectionString: connInfo.connectionString,
+ log: fakeLogger,
+ onDatabaseLoad,
+ ignore: {},
+ hooks: {}
+ })
+ } catch (err) {
+ equal(err.message, 'Table pages has 2 primary keys')
+ }
+})
diff --git a/packages/sql-mapper/test/types/mapper.test-d.ts b/packages/sql-mapper/test/types/mapper.test-d.ts
new file mode 100644
index 0000000000..5df6e8874f
--- /dev/null
+++ b/packages/sql-mapper/test/types/mapper.test-d.ts
@@ -0,0 +1,64 @@
+import { expectType } from 'tsd'
+import { SQL, SQLQuery } from '@databases/sql'
+import { fastify, FastifyInstance } from 'fastify'
+import {
+ connect,
+ plugin,
+ utils,
+ Entity,
+ DBEntityField,
+ Database,
+ WhereCondition,
+ SQLMapperPluginInterface,
+ EntityHooks,
+} from '../../mapper'
+
+const pluginOptions: SQLMapperPluginInterface = await connect({ connectionString: '' })
+expectType(pluginOptions.db)
+expectType(pluginOptions.sql)
+expectType<(entityName: string, hooks: EntityHooks) => any>(pluginOptions.addEntityHooks)
+expectType<{ [entityName: string]: Entity }>(pluginOptions.entities)
+
+interface EntityFields {
+ id: number,
+ name: string,
+}
+
+const entity: Entity = pluginOptions.entities.entityName
+expectType(entity.name)
+expectType(entity.singularName)
+expectType(entity.pluralName)
+expectType(entity.primaryKey)
+expectType(entity.table)
+expectType(entity.relations)
+expectType<{ [columnName: string]: DBEntityField }>(entity.fields)
+expectType<{ [columnName: string]: DBEntityField }>(entity.camelCasedFields)
+expectType<(input: { [columnName: string]: any }) => { [columnName: string]: any }>(entity.fixInput)
+expectType<(input: { [columnName: string]: any }) => { [columnName: string]: any }>(entity.fixOutput)
+expectType[]>(await entity.find())
+expectType[]>(await entity.insert({ inputs: [{ id: 1, name: 'test' }] }))
+expectType>(await entity.save({ input: { id: 1, name: 'test' } }))
+expectType[]>(await entity.delete())
+
+expectType(await connect({ connectionString: '' }))
+expectType(await connect({ connectionString: '', autoTimestamp: true }))
+expectType(await connect({ connectionString: '', hooks: {} }))
+expectType(await connect({ connectionString: '', hooks: {
+ Page: {
+ async find(options: any): Promise { return [] },
+ async insert(options: { inputs: any[], fields?: string[] }): Promise { return [] },
+ async save(options: { input: any, fields?: string[] }): Promise { return {} },
+ async delete(options?: { where: WhereCondition, fields: string[] }): Promise { return [] },
+ }
+}}))
+expectType(await connect({ connectionString: '', ignore: {} }))
+expectType(await connect({ connectionString: '', onDatabaseLoad(db: Database, sql: SQL) {
+ expectType<(query: SQLQuery) => Promise>(db.query)
+ expectType<() => Promise>(db.dispose)
+}}))
+
+const instance: FastifyInstance = fastify()
+instance.register(plugin, { connectionString: '', autoTimestamp: true })
+instance.register((instance) => { expectType(instance.platformatic) })
+
+expectType<(str: string) => string>(utils.toSingular)
diff --git a/packages/sql-mapper/test/where.test.js b/packages/sql-mapper/test/where.test.js
new file mode 100644
index 0000000000..74df3a0f01
--- /dev/null
+++ b/packages/sql-mapper/test/where.test.js
@@ -0,0 +1,316 @@
+'use strict'
+
+const { test } = require('tap')
+const { connect } = require('..')
+const { clear, connInfo, isMysql, isSQLite } = require('./helper')
+const fakeLogger = {
+ trace: () => {},
+ error: () => {}
+}
+
+test('list', async ({ pass, teardown, same, equal }) => {
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+
+ const entity = mapper.entities.post
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ await entity.insert({
+ inputs: posts
+ })
+
+ same(await entity.find({ where: { title: { eq: 'Dog' } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }])
+
+ same(await entity.find({ limit: 1, offset: 0, fields: ['id', 'title', 'longText'] }), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }])
+
+ same(await entity.find({ limit: 1, offset: 0, orderBy: [{ field: 'id', direction: 'desc' }], fields: ['id', 'title'] }), [{
+ id: '4',
+ title: 'Duck'
+ }])
+
+ same(await entity.find({ where: { title: { neq: 'Dog' } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }])
+
+ same(await entity.find({ where: { counter: { gt: 10 } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }])
+
+ same(await entity.find({ where: { counter: { lt: 40 } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }])
+
+ same(await entity.find({ where: { counter: { lte: 30 } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }])
+
+ same(await entity.find({ where: { counter: { gte: 20 } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }])
+
+ same(await entity.find({ where: { counter: { in: [20, 30] } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }])
+
+ same(await entity.find({ where: { counter: { nin: [10, 40] } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }])
+
+ same(await entity.find({ where: { counter: { gt: 10, lt: 40 } }, fields: ['id', 'title', 'longText'] }), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }])
+})
+
+test('foreign keys', async ({ pass, teardown, same, equal }) => {
+ const mapper = await connect({
+ ...connInfo,
+ log: fakeLogger,
+ async onDatabaseLoad (db, sql) {
+ teardown(() => db.dispose())
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ `)
+
+ await db.query(sql`
+ CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id INTEGER REFERENCES owners(id)
+ );`)
+ }
+ }
+ })
+
+ const owners = [{
+ name: 'Matteo'
+ }, {
+ name: 'Luca'
+ }]
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ {
+ const res = await mapper.entities.owner.insert({
+ inputs: owners
+ })
+ const toAssign = [...posts]
+ for (const owner of res) {
+ toAssign.shift().ownerId = owner.id
+ toAssign.shift().ownerId = owner.id
+ }
+ await mapper.entities.post.insert({
+ inputs: posts
+ })
+ }
+
+ {
+ const owners = await mapper.entities.owner.find()
+ equal(owners.length, 2)
+
+ for (const owner of owners) {
+ owner.posts = await mapper.entities.post.find({ where: { ownerId: { eq: owner.id } }, fields: ['id', 'title', 'longText'] })
+ }
+ same(owners, [{
+ id: '1',
+ name: 'Matteo',
+ posts: [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo',
+ ownerId: '1'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar',
+ ownerId: '1'
+ }]
+ }, {
+ id: '2',
+ name: 'Luca',
+ posts: [{
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz',
+ ownerId: '2'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale',
+ ownerId: '2'
+ }]
+ }])
+ }
+})
diff --git a/packages/sql-openapi/.taprc b/packages/sql-openapi/.taprc
new file mode 100644
index 0000000000..c1917e8701
--- /dev/null
+++ b/packages/sql-openapi/.taprc
@@ -0,0 +1 @@
+jobs: 1
diff --git a/packages/sql-openapi/LICENSE b/packages/sql-openapi/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/packages/sql-openapi/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-openapi/NOTICE b/packages/sql-openapi/NOTICE
new file mode 100644
index 0000000000..a7d8a8414a
--- /dev/null
+++ b/packages/sql-openapi/NOTICE
@@ -0,0 +1,13 @@
+ Copyright 2022 Platformatic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/packages/sql-openapi/README.md b/packages/sql-openapi/README.md
new file mode 100644
index 0000000000..590b15aa79
--- /dev/null
+++ b/packages/sql-openapi/README.md
@@ -0,0 +1,13 @@
+# @platformatic/sql-openapi
+
+Check out the full documentation on [our website](https://oss.platformatic.dev/docs/reference/sql-rest/introduction).
+
+## Install
+
+```sh
+npm install @platformatic/sql-openapi
+```
+
+## License
+
+Apache 2.0
diff --git a/packages/sql-openapi/index.d.ts b/packages/sql-openapi/index.d.ts
new file mode 100644
index 0000000000..ac969dcbae
--- /dev/null
+++ b/packages/sql-openapi/index.d.ts
@@ -0,0 +1,12 @@
+import { FastifyPluginAsync } from 'fastify'
+import { OpenAPIV3 } from 'openapi-types'
+
+export interface SQLOpenApiPluginOptions extends Partial {
+ /**
+ * Set true to expose documentation route.
+ */
+ exposeRoute?: boolean
+}
+
+declare const plugin: FastifyPluginAsync
+export default plugin
diff --git a/packages/sql-openapi/index.js b/packages/sql-openapi/index.js
new file mode 100644
index 0000000000..0762d92f27
--- /dev/null
+++ b/packages/sql-openapi/index.js
@@ -0,0 +1,64 @@
+'use strict'
+
+const Swagger = require('@fastify/swagger')
+const deepmerge = require('@fastify/deepmerge')({ all: true })
+const camelcase = require('camelcase')
+const { singularize } = require('inflected')
+const { mapSQLEntityToJSONSchema } = require('@platformatic/sql-json-schema-mapper')
+const entityPlugin = require('./lib/entity-to-routes')
+const fp = require('fastify-plugin')
+
+async function setupOpenAPI (app, opts) {
+ const openapiConfig = deepmerge({
+ exposeRoute: true,
+ info: {
+ title: 'Platformatic DB',
+ description: 'Exposing a SQL database as REST'
+ }
+ }, opts)
+ app.log.trace({ openapi: openapiConfig })
+ await app.register(Swagger, {
+ exposeRoute: openapiConfig.exposeRoute,
+ openapi: {
+ ...openapiConfig
+ },
+ refResolver: {
+ buildLocalReference (json, baseUri, fragment, i) {
+ // TODO figure out if we need def-${i}
+ /* istanbul ignore next */
+ return json.$id || `def-${i}`
+ }
+ }
+ })
+
+ for (const entity of Object.values(app.platformatic.entities)) {
+ const entitySchema = mapSQLEntityToJSONSchema(entity)
+ // TODO remove reverseRelationships from the entity
+ /* istanbul ignore next */
+ entity.reverseRelationships = entity.reverseRelationships || []
+
+ app.addSchema(entitySchema)
+
+ for (const relation of Object.values(entity.relations)) {
+ const targetEntityName = singularize(camelcase(relation.foreign_table_name))
+ const targetEntity = app.platformatic.entities[targetEntityName]
+ const reverseRelationship = {
+ sourceEntity: entity.name,
+ relation
+ }
+ /* istanbul ignore next */
+ targetEntity.reverseRelationships = targetEntity.reverseRelationships || []
+ targetEntity.reverseRelationships.push(reverseRelationship)
+ }
+ }
+
+ for (const entity of Object.values(app.platformatic.entities)) {
+ // TODO support ignore
+ app.register(entityPlugin, {
+ entity,
+ prefix: '/' + entity.pluralName
+ })
+ }
+}
+
+module.exports = fp(setupOpenAPI)
diff --git a/packages/sql-openapi/lib/entity-to-routes.js b/packages/sql-openapi/lib/entity-to-routes.js
new file mode 100644
index 0000000000..fe584486bf
--- /dev/null
+++ b/packages/sql-openapi/lib/entity-to-routes.js
@@ -0,0 +1,286 @@
+'use strict'
+
+const { mapSQLTypeToOpenAPIType } = require('@platformatic/sql-json-schema-mapper')
+const camelcase = require('camelcase')
+const { singularize } = require('inflected')
+
+async function entityPlugin (app, opts) {
+ const entity = opts.entity
+
+ const entitySchema = {
+ $ref: entity.name + '#'
+ }
+ const primaryKeyParams = getPrimaryKeyParams(entity)
+ const entityLinks = {}
+ const primaryKeyCamelcase = camelcase(entity.primaryKey)
+
+ for (const relation of entity.relations) {
+ const ownField = camelcase(relation.column_name)
+ const relatedEntity = app.platformatic.entities[camelcase(singularize(relation.foreign_table_name))]
+ const relatedEntityPrimaryKeyCamelcase = capitalize(camelcase(relatedEntity.primaryKey))
+ const getEntityById = `Get${relatedEntity.name}With${relatedEntityPrimaryKeyCamelcase}`
+ entityLinks[getEntityById] = {
+ operationId: `get${relatedEntity.name}By${relatedEntityPrimaryKeyCamelcase}`,
+ parameters: {
+ [primaryKeyCamelcase]: `$response.body#/${ownField}`
+ }
+ }
+ }
+
+ for (const relationship of entity.reverseRelationships) {
+ const relation = relationship.relation
+ const theirField = camelcase(relation.column_name)
+ const ownField = camelcase(relation.foreign_column_name)
+ const relatedEntity = app.platformatic.entities[camelcase(singularize(relation.table_name))]
+ const getAllEntities = `GetAll${capitalize(relatedEntity.pluralName)}`
+ entityLinks[getAllEntities] = {
+ operationId: `getAll${capitalize(relatedEntity.pluralName)}`,
+ parameters: {
+ [`where.${theirField}.eq`]: `$response.body#/${ownField}`
+ }
+ }
+ }
+
+ const whereArgs = Object.keys(entity.fields).sort().map((name) => {
+ return entity.fields[name]
+ }).reduce((acc, field) => {
+ const baseKey = `where.${field.camelcase}.`
+ for (const modifier of ['eq', 'neq', 'gt', 'gte', 'lt', 'lte']) {
+ const key = baseKey + modifier
+ acc[key] = { type: mapSQLTypeToOpenAPIType(field.sqlType) }
+ }
+
+ for (const modifier of ['in', 'nin']) {
+ const key = baseKey + modifier
+ acc[key] = { type: 'string' }
+ }
+
+ return acc
+ }, {})
+
+ const ordeByArgs = Object.keys(entity.fields).sort().map((name) => {
+ return entity.fields[name]
+ }).reduce((acc, field) => {
+ const key = `orderby.${field.camelcase}`
+ acc[key] = { type: 'string', enum: ['asc', 'desc'] }
+ return acc
+ }, {})
+
+ app.addHook('preValidation', async (req) => {
+ if (req.query.fields) {
+ req.query.fields = req.query.fields.split(',')
+ }
+ })
+
+ const fields = {
+ type: 'array',
+ items: {
+ type: 'string',
+ enum: Object.keys(entity.fields).map((field) => entity.fields[field].camelcase).sort()
+ }
+ }
+
+ app.get('/', {
+ schema: {
+ operationId: 'getAll' + entity.name,
+ querystring: {
+ type: 'object',
+ properties: {
+ limit: { type: 'integer' },
+ offset: { type: 'integer' },
+ fields,
+ ...whereArgs,
+ ...ordeByArgs
+ },
+ additionalProperties: false
+ },
+ response: {
+ 200: {
+ type: 'array',
+ items: entitySchema
+ }
+ }
+ }
+ }, async function (request, reply) {
+ const query = request.query
+ const { limit, offset, fields } = query
+ // TODO computing this where clause will be slow
+ // refactor to use a barebone for(;;) loop
+ const where = Object.keys(query).reduce((acc, key) => {
+ if (key.indexOf('where.') === 0) {
+ const [, field, modifier] = key.split('.')
+ acc[field] = acc[field] || {}
+ let value = query[key]
+ if (modifier === 'in' || modifier === 'nin') {
+ // TODO handle escaping of ,
+ value = query[key].split(',')
+ if (mapSQLTypeToOpenAPIType(entity.fields[field].sqlType) === 'integer') {
+ value = value.map((v) => parseInt(v))
+ }
+ }
+ acc[field][modifier] = value
+ return acc
+ }
+ return acc
+ }, {})
+ const orderBy = Object.keys(query).reduce((acc, key) => {
+ if (key.indexOf('orderby.') === 0) {
+ const [, field] = key.split('.')
+ acc[field] = acc[field] || {}
+ acc.push({ field, direction: query[key] })
+ }
+ return acc
+ }, [])
+ const ctx = { app: this, reply }
+ const res = await entity.find({ limit, offset, fields, orderBy, where, ctx })
+ return res
+ })
+
+ app.post('/', {
+ schema: {
+ body: entitySchema,
+ response: {
+ 200: entitySchema
+ }
+ },
+ links: {
+ 200: entityLinks
+ }
+ }, async function (request, reply) {
+ const ctx = { app: this, reply }
+ const res = await entity.save({ input: request.body, ctx })
+ reply.header('location', `${app.prefix}/${res.id}`)
+ return res
+ })
+
+ app.get(`/:${primaryKeyCamelcase}`, {
+ schema: {
+ operationId: `get${entity.name}By${capitalize(primaryKeyCamelcase)}`,
+ params: primaryKeyParams,
+ querystring: {
+ type: 'object',
+ properties: {
+ fields
+ }
+ },
+ response: {
+ 200: entitySchema
+ }
+ },
+ links: {
+ 200: entityLinks
+ }
+ }, async function (request, reply) {
+ const ctx = { app: this, reply }
+ const res = await entity.find({
+ ctx,
+ where: {
+ [primaryKeyCamelcase]: {
+ eq: request.params[primaryKeyCamelcase]
+ }
+ },
+ fields: request.query.fields
+ })
+ if (res.length === 0) {
+ return reply.callNotFound()
+ }
+ return res[0]
+ })
+
+ for (const method of ['POST', 'PUT']) {
+ app.route({
+ url: `/:${primaryKeyCamelcase}`,
+ method,
+ schema: {
+ body: entitySchema,
+ params: primaryKeyParams,
+ querystring: {
+ type: 'object',
+ properties: {
+ fields
+ }
+ },
+ response: {
+ 200: entitySchema
+ }
+ },
+ links: {
+ 200: entityLinks
+ },
+ async handler (request, reply) {
+ const id = request.params[primaryKeyCamelcase]
+ const ctx = { app: this, reply }
+ const res = await entity.save({
+ ctx,
+ input: {
+ ...request.body,
+ [primaryKeyCamelcase]: id
+ },
+ where: {
+ [primaryKeyCamelcase]: {
+ eq: id
+ }
+ },
+ fields: request.query.fields
+ })
+ if (!res) {
+ return reply.callNotFound()
+ }
+ reply.header('location', `${app.prefix}/${res[primaryKeyCamelcase]}`)
+ return res
+ }
+ })
+ }
+
+ app.delete(`/:${primaryKeyCamelcase}`, {
+ schema: {
+ params: primaryKeyParams,
+ querystring: {
+ type: 'object',
+ properties: {
+ fields
+ }
+ },
+ response: {
+ 200: entitySchema
+ }
+ }
+ }, async function (request, reply) {
+ const ctx = { app: this, reply }
+ const res = await entity.delete({
+ ctx,
+ where: {
+ [primaryKeyCamelcase]: {
+ eq: request.params[primaryKeyCamelcase]
+ }
+ },
+ fields: request.query.fields
+ })
+ if (res.length === 0) {
+ return reply.callNotFound()
+ }
+ return res[0]
+ })
+}
+
+function getPrimaryKeyParams (entity) {
+ const primaryKey = entity.primaryKey
+ const fields = entity.fields
+ const field = fields[primaryKey]
+ const properties = {
+ [field.camelcase]: { type: mapSQLTypeToOpenAPIType(field.sqlType) }
+ }
+ const required = [field.camelcase]
+
+ return {
+ type: 'object',
+ properties,
+ required
+ }
+}
+
+function capitalize (str) {
+ return str.charAt(0).toUpperCase() + str.slice(1)
+}
+
+module.exports = entityPlugin
diff --git a/packages/sql-openapi/package.json b/packages/sql-openapi/package.json
new file mode 100644
index 0000000000..d05a91e508
--- /dev/null
+++ b/packages/sql-openapi/package.json
@@ -0,0 +1,46 @@
+{
+ "name": "@platformatic/sql-openapi",
+ "version": "0.0.21",
+ "description": "Map a SQL database to OpenAPI, for Fastify",
+ "main": "index.js",
+ "scripts": {
+ "test": "standard | snazzy && npm run test:typescript && npm run test:postgresql && npm run test:mariadb && npm run test:mysql && npm run test:mysql8 && npm run test:sqlite",
+ "test:postgresql": "DB=postgresql tap test/*.test.js",
+ "test:mariadb": "DB=mariadb tap test/*.test.js",
+ "test:mysql": "DB=mysql tap test/*.test.js",
+ "test:mysql8": "DB=mysql8 tap test/*.test.js",
+ "test:sqlite": "DB=sqlite tap test/*.test.js",
+ "test:typescript": "tsd"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/plaformatic/platformatic.git"
+ },
+ "author": "Matteo Collina ",
+ "license": "Apache-2.0",
+ "bugs": {
+ "url": "https://github.com/plaformatic/platformatic/issues"
+ },
+ "homepage": "https://github.com/plaformatic/platformatic#readme",
+ "devDependencies": {
+ "@platformatic/sql-mapper": "workspace:*",
+ "fastify": "^4.6.0",
+ "mercurius": "^11.0.0",
+ "openapi-types": "^12.0.2",
+ "snazzy": "^9.0.0",
+ "standard": "^17.0.0",
+ "tap": "^16.0.0",
+ "tsd": "^0.24.0"
+ },
+ "dependencies": {
+ "@platformatic/sql-json-schema-mapper": "workspace:*",
+ "@fastify/deepmerge": "^1.1.0",
+ "@fastify/swagger": "^7.4.1",
+ "camelcase": "^6.0.0",
+ "fastify-plugin": "^4.1.0",
+ "inflected": "^2.1.0"
+ },
+ "tsd": {
+ "directory": "test/types"
+ }
+}
diff --git a/packages/sql-openapi/test/helper.js b/packages/sql-openapi/test/helper.js
new file mode 100644
index 0000000000..e277dabbb0
--- /dev/null
+++ b/packages/sql-openapi/test/helper.js
@@ -0,0 +1,71 @@
+'use strict'
+
+// Needed to work with dates & postgresql
+// See https://node-postgres.com/features/types/
+process.env.TZ = 'UTC'
+
+const connInfo = {}
+
+if (!process.env.DB || process.env.DB === 'postgresql') {
+ connInfo.connectionString = 'postgres://postgres:postgres@127.0.0.1/postgres'
+ module.exports.isPg = true
+} else if (process.env.DB === 'mariadb') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3307/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'mysql8') {
+ connInfo.connectionString = 'mysql://root@127.0.0.1:3308/graph'
+ connInfo.poolSize = 10
+ module.exports.isMysql = true
+} else if (process.env.DB === 'sqlite') {
+ connInfo.connectionString = 'sqlite://:memory:'
+ module.exports.isSQLite = true
+}
+
+module.exports.connInfo = connInfo
+
+module.exports.clear = async function (db, sql) {
+ try {
+ await db.query(sql`DROP TABLE pages`)
+ } catch (err) {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE categories`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE posts`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE simple_types`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE owners`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE users`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE versions`)
+ } catch {
+ }
+
+ try {
+ await db.query(sql`DROP TABLE graphs`)
+ } catch {
+ }
+}
diff --git a/packages/sql-openapi/test/hooks.test.js b/packages/sql-openapi/test/hooks.test.js
new file mode 100644
index 0000000000..e73306e709
--- /dev/null
+++ b/packages/sql-openapi/test/hooks.test.js
@@ -0,0 +1,207 @@
+'use strict'
+
+const { test } = require('tap')
+const sqlOpenAPI = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite } = require('./helper')
+
+test('basic hooks', async ({ pass, teardown, same, equal, plan, not }) => {
+ plan(19)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ Page: {
+ noKey () {
+ // This should never be called
+ },
+ async save (original, { input, ctx, fields }) {
+ pass('save called')
+
+ not(ctx.reply, undefined, 'ctx.reply is defined')
+ not(ctx.app, undefined, 'ctx.app is defined')
+ if (!input.id) {
+ same(input, {
+ title: 'Hello'
+ })
+
+ return original({
+ input: {
+ title: 'Hello from hook'
+ },
+ fields
+ })
+ } else {
+ same(input, {
+ id: 1,
+ title: 'Hello World'
+ })
+
+ return original({
+ input: {
+ id: 1,
+ title: 'Hello from hook 2'
+ },
+ fields
+ })
+ }
+ },
+ async find (original, args) {
+ pass('find called')
+
+ not(args.ctx.reply, undefined, 'ctx.reply is defined')
+ not(args.ctx.app, undefined, 'ctx.app is defined')
+ same(args.where, {
+ id: {
+ eq: 1
+ }
+ })
+ args.where = {
+ id: {
+ in: ['2']
+ }
+ }
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ title: 'Hello'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello from hook'
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1?fields=id,title'
+ })
+ equal(res.statusCode, 404, 'GET /pages/1 status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1',
+ body: {
+ title: 'Hello World'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello from hook 2'
+ }, 'POST /pages/1 response')
+ }
+})
+
+test('delete hook', async ({ pass, teardown, same, equal, plan, not }) => {
+ plan(9)
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+ },
+ hooks: {
+ Page: {
+ async delete (original, args) {
+ pass('delete called')
+
+ not(args.ctx.app, undefined, 'ctx.app is defined')
+ same(args.where, {
+ id: {
+ eq: '1'
+ }
+ })
+ same(args.fields, ['id', 'title'])
+ return original(args)
+ }
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ title: 'Hello'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'DELETE',
+ url: '/pages/1?fields=id,title'
+ })
+ equal(res.statusCode, 200, 'DELETE /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'DELETE /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 404, 'GET /pages/1 status code')
+ }
+})
diff --git a/packages/sql-openapi/test/ignore.test.js b/packages/sql-openapi/test/ignore.test.js
new file mode 100644
index 0000000000..3f05ca7899
--- /dev/null
+++ b/packages/sql-openapi/test/ignore.test.js
@@ -0,0 +1,91 @@
+'use strict'
+
+const { test } = require('tap')
+const fastify = require('fastify')
+const sqlOpenAPI = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const { clear, connInfo, isSQLite } = require('./helper')
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ await db.query(sql`CREATE TABLE categories (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ await db.query(sql`CREATE TABLE categories (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(42)
+ );`)
+ }
+}
+
+test('ignore a table', async ({ pass, teardown, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ ignore: {
+ categories: true
+ },
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ equal(res.statusCode, 200, 'GET /documentation/json status code')
+ const data = res.json()
+ equal(data.paths['categories/'], undefined, 'category/ paths are ignored')
+ }
+})
+
+test('ignore a column', async ({ pass, teardown, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ ignore: {
+ categories: {
+ name: true
+ }
+ },
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ equal(res.statusCode, 200, 'GET /documentation/json status code')
+ const data = res.json()
+ equal(data.components.schemas.Category.properties.name, undefined, 'name property is ignored')
+ }
+})
diff --git a/packages/sql-openapi/test/order_by.test.js b/packages/sql-openapi/test/order_by.test.js
new file mode 100644
index 0000000000..920c172dcb
--- /dev/null
+++ b/packages/sql-openapi/test/order_by.test.js
@@ -0,0 +1,176 @@
+'use strict'
+
+const t = require('tap')
+const fastify = require('fastify')
+const sqlOpenAPI = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const { clear, connInfo, isSQLite } = require('./helper')
+const { resolve } = require('path')
+const { test } = t
+
+Object.defineProperty(t, 'fullname', {
+ value: 'platformatic/db/openapi/orderby'
+})
+
+test('one-level order by', async (t) => {
+ const { pass, teardown, same, equal, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'orderby-openapi-1.cjs')
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const pages = [
+ { title: 'Page 1', counter: 3 },
+ { title: 'Page 2', counter: 2 },
+ { title: 'Page 3', counter: 1 }
+ ]
+ const expected = [
+ { id: 1, title: 'Page 1', counter: 3 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 3, title: 'Page 3', counter: 1 }
+ ]
+
+ for (const body of pages) {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ same(res.json(), expected.shift(), 'POST /pages response')
+ }
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages?orderby.counter=asc&fields=id,title,counter'
+ })
+ equal(res.statusCode, 200, 'POST /pages?orderby.counter=asc status code')
+ same(res.json(), [
+ { id: 3, title: 'Page 3', counter: 1 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 1, title: 'Page 1', counter: 3 }
+ ], 'POST /pages?orderby.counter=asc response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages?orderby.counter=desc&fields=id,title,counter'
+ })
+ equal(res.statusCode, 200, 'POST /pages?orderby.counter=desc status code')
+ same(res.json(), [
+ { id: 1, title: 'Page 1', counter: 3 },
+ { id: 2, title: 'Page 2', counter: 2 },
+ { id: 3, title: 'Page 3', counter: 1 }
+ ], 'POST /pages?orderby.counter=desc response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const json = res.json()
+ // console.log(JSON.stringify(json, null, 2))
+ matchSnapshot(json, 'GET /documentation/json response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages?orderby.counter=xxxx'
+ })
+ equal(res.statusCode, 400, 'POST /pages?orderby.counter=desc status code')
+ same(res.json(), {
+ statusCode: 400,
+ error: 'Bad Request',
+ message: 'querystring/orderby.counter must be equal to one of the allowed values'
+ }
+ , 'POST /pages?orderby.counter=desc response')
+ }
+})
+
+test('list order by', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ counter INTEGER,
+ counter2 INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ counter INTEGER,
+ counter2 INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.platformatic.entities.page.insert({
+ inputs: [
+ { counter: 3, counter2: 3 },
+ { counter: 3, counter2: 2 },
+ { counter: 1, counter2: 1 }
+ ]
+ })
+ same(res, [
+ { id: 1, counter: 3, counter2: 3 },
+ { id: 2, counter: 3, counter2: 2 },
+ { id: 3, counter: 1, counter2: 1 }
+ ])
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages?orderby.counter=asc&orderby.counter2=desc&fields=id,counter,counter2'
+ })
+ equal(res.statusCode, 200, 'POST /pages?orderby.counter=asc&orderby.counter2=desc status code')
+ same(res.json(), [
+ { id: 3, counter: 1, counter2: 1 },
+ { id: 1, counter: 3, counter2: 3 },
+ { id: 2, counter: 3, counter2: 2 }
+ ], 'POST /pages?orderby.counter=asc&orderby.counter2=desc response')
+ }
+})
diff --git a/packages/sql-openapi/test/simple.test.js b/packages/sql-openapi/test/simple.test.js
new file mode 100644
index 0000000000..077b115388
--- /dev/null
+++ b/packages/sql-openapi/test/simple.test.js
@@ -0,0 +1,444 @@
+'use strict'
+
+const t = require('tap')
+const sqlOpenAPI = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const fastify = require('fastify')
+const { clear, connInfo, isSQLite } = require('./helper')
+const { resolve } = require('path')
+const { test } = t
+
+Object.defineProperty(t, 'fullname', {
+ value: 'platformatic/db/openapi/simple'
+})
+
+async function createBasicPages (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42) NOT NULL
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42) NOT NULL
+ );`)
+ }
+}
+
+test('simple db, simple rest API', async (t) => {
+ const { pass, teardown, same, equal, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'simple-openapi-1.cjs')
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ title: 'Hello'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ equal(res.headers.location, '/pages/1', 'POST /api/pages location')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 200, 'GET /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'GET /pages/1 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1',
+ body: {
+ title: 'Hello World'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello World'
+ }, 'POST /pages/1 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 200, 'GET /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello World'
+ }, 'GET /pages/1 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ tilte: 'Hello' // typo, wrong field
+ }
+ })
+ equal(res.statusCode, 400, 'POST /pages status code')
+ equal(res.headers.location, undefined, 'no location header')
+ same(res.json(), {
+ statusCode: 400,
+ error: 'Bad Request',
+ message: "body must have required property 'title'"
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const json = res.json()
+ // console.log(JSON.stringify(json, null, 2))
+ matchSnapshot(json, 'GET /documentation/json response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1?fields=title'
+ })
+ same(res.json(), {
+ title: 'Hello World'
+ }, 'GET /pages/1?fields=title response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1?fields=title',
+ body: {
+ title: 'Hello fields'
+ }
+ })
+ same(res.json(), {
+ title: 'Hello fields'
+ }, 'POST /pages/1?fields=title response')
+ }
+})
+
+async function createBasicPagesNullable (db, sql) {
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE pages (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE pages (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42)
+ );`)
+ }
+}
+
+test('nullable fields', async (t) => {
+ const { pass, teardown, same, equal, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'simple-openapi-2.cjs')
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPagesNullable(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ // empty object
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ equal(res.headers.location, '/pages/1', 'POST /api/pages location')
+ same(res.json(), {
+ id: 1,
+ title: null
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const openapi = res.json()
+ // console.log(JSON.stringify(openapi, null, 2))
+ matchSnapshot(openapi, 'GET /documentation/json response')
+ }
+})
+
+test('list', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT
+ );`)
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const posts = [{
+ title: 'Post 1',
+ longText: 'This is a long text 1'
+ }, {
+ title: 'Post 2',
+ longText: 'This is a long text 2'
+ }, {
+ title: 'Post 3',
+ longText: 'This is a long text 3'
+ }, {
+ title: 'Post 4',
+ longText: 'This is a long text 4'
+ }]
+
+ for (const body of posts) {
+ await app.inject({
+ method: 'POST',
+ url: '/posts',
+ body
+ })
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts'
+ })
+ equal(res.statusCode, 200, '/posts status code')
+ same(res.json(), posts.map((p, i) => {
+ return { ...p, id: i + 1 + '' }
+ }), '/posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?limit=2&offset=1'
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), posts.map((p, i) => {
+ return { ...p, id: i + 1 + '' }
+ }).slice(1, 3), 'posts response')
+ }
+})
+
+test('not found', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 404, 'GET /pages/1 status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages/1',
+ body: {
+ title: 'Hello World'
+ }
+ })
+ equal(res.statusCode, 404, 'POST /pages/1 status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'DELETE',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 404, 'DELETE /pages/1 status code')
+ }
+})
+
+test('delete', async ({ pass, teardown, same, equal }) => {
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 404, 'GET /pages/1 status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ title: 'Hello'
+ }
+ })
+ equal(res.statusCode, 200, 'POST /pages status code')
+ equal(res.headers.location, '/pages/1', 'POST /api/pages location')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'POST /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'DELETE',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 200, 'DELETE /pages/1 status code')
+ same(res.json(), {
+ id: 1,
+ title: 'Hello'
+ }, 'DELETE /pages response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/pages/1'
+ })
+ equal(res.statusCode, 404, 'GET /pages/1 status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/pages',
+ body: {
+ title: 'Hello fields'
+ }
+ })
+ const { id } = res.json()
+
+ const res2 = await app.inject({
+ method: 'DELETE',
+ url: `/pages/${id}?fields=title`
+ })
+ same(res2.json(), {
+ title: 'Hello fields'
+ }, 'DELETE /pages?fields=title response')
+ }
+})
+
+test('simple db, simple rest API', async (t) => {
+ const { pass, teardown, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'simple-openapi-3.cjs')
+
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+ await createBasicPages(db, sql)
+ }
+ })
+ app.register(sqlOpenAPI, {
+ info: {
+ title: 'Simple Title',
+ description: 'Simple Description',
+ version: '42.42.42'
+ }
+ })
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const json = res.json()
+ matchSnapshot(json, 'GET /documentation/json response')
+})
diff --git a/packages/sql-openapi/test/tap-snapshots/orderby-openapi-1.cjs b/packages/sql-openapi/test/tap-snapshots/orderby-openapi-1.cjs
new file mode 100644
index 0000000000..93d7173087
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/orderby-openapi-1.cjs
@@ -0,0 +1,531 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/orderby one-level order by > GET /documentation/json response 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Page": Object {
+ "description": "A Page",
+ "properties": Object {
+ "counter": Object {
+ "nullable": true,
+ "type": "integer",
+ },
+ "id": Object {
+ "type": "integer",
+ },
+ "title": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ },
+ "required": Array [],
+ "title": "Page",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Exposing a SQL database as REST",
+ "title": "Platformatic DB",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/pages/": Object {
+ "get": Object {
+ "operationId": "getAllPage",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.counter",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ "/pages/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPageById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/tap-snapshots/simple-openapi-1.cjs b/packages/sql-openapi/test/tap-snapshots/simple-openapi-1.cjs
new file mode 100644
index 0000000000..e187d8a3be
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/simple-openapi-1.cjs
@@ -0,0 +1,447 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/simple simple db, simple rest API > GET /documentation/json response 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Page": Object {
+ "description": "A Page",
+ "properties": Object {
+ "id": Object {
+ "type": "integer",
+ },
+ "title": Object {
+ "type": "string",
+ },
+ },
+ "required": Array [
+ "title",
+ ],
+ "title": "Page",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Exposing a SQL database as REST",
+ "title": "Platformatic DB",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/pages/": Object {
+ "get": Object {
+ "operationId": "getAllPage",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ "/pages/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPageById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/tap-snapshots/simple-openapi-2.cjs b/packages/sql-openapi/test/tap-snapshots/simple-openapi-2.cjs
new file mode 100644
index 0000000000..66200437f8
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/simple-openapi-2.cjs
@@ -0,0 +1,446 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/simple nullable fields > GET /documentation/json response 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Page": Object {
+ "description": "A Page",
+ "properties": Object {
+ "id": Object {
+ "type": "integer",
+ },
+ "title": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ },
+ "required": Array [],
+ "title": "Page",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Exposing a SQL database as REST",
+ "title": "Platformatic DB",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/pages/": Object {
+ "get": Object {
+ "operationId": "getAllPage",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ "/pages/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPageById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/tap-snapshots/simple-openapi-3.cjs b/packages/sql-openapi/test/tap-snapshots/simple-openapi-3.cjs
new file mode 100644
index 0000000000..16b3abc5c8
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/simple-openapi-3.cjs
@@ -0,0 +1,448 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/simple simple db, simple rest API > GET /documentation/json response 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Page": Object {
+ "description": "A Page",
+ "properties": Object {
+ "id": Object {
+ "type": "integer",
+ },
+ "title": Object {
+ "type": "string",
+ },
+ },
+ "required": Array [
+ "title",
+ ],
+ "title": "Page",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Simple Description",
+ "title": "Simple Title",
+ "version": "42.42.42",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/pages/": Object {
+ "get": Object {
+ "operationId": "getAllPage",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ "/pages/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPageById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Page",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/tap-snapshots/where-openapi-1.cjs b/packages/sql-openapi/test/tap-snapshots/where-openapi-1.cjs
new file mode 100644
index 0000000000..49d02d73c8
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/where-openapi-1.cjs
@@ -0,0 +1,616 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/where list > matches expected OpenAPI defs 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Post": Object {
+ "description": "A Post",
+ "properties": Object {
+ "counter": Object {
+ "nullable": true,
+ "type": "integer",
+ },
+ "id": Object {
+ "type": "integer",
+ },
+ "longText": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ "title": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ },
+ "required": Array [],
+ "title": "Post",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Exposing a SQL database as REST",
+ "title": "Platformatic DB",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/posts/": Object {
+ "get": Object {
+ "operationId": "getAllPost",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.counter",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.longText",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ "/posts/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPostById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {},
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/tap-snapshots/where-openapi-2.cjs b/packages/sql-openapi/test/tap-snapshots/where-openapi-2.cjs
new file mode 100644
index 0000000000..378e377b46
--- /dev/null
+++ b/packages/sql-openapi/test/tap-snapshots/where-openapi-2.cjs
@@ -0,0 +1,1181 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports['platformatic/db/openapi/where nested where > matches expected OpenAPI defs 1'] = `
+Object {
+ "components": Object {
+ "schemas": Object {
+ "Owner": Object {
+ "description": "A Owner",
+ "properties": Object {
+ "id": Object {
+ "type": "integer",
+ },
+ "name": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ },
+ "required": Array [],
+ "title": "Owner",
+ "type": "object",
+ },
+ "Post": Object {
+ "description": "A Post",
+ "properties": Object {
+ "counter": Object {
+ "nullable": true,
+ "type": "integer",
+ },
+ "id": Object {
+ "type": "integer",
+ },
+ "longText": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ "ownerId": Object {
+ "nullable": true,
+ "type": "integer",
+ },
+ "title": Object {
+ "nullable": true,
+ "type": "string",
+ },
+ },
+ "required": Array [],
+ "title": "Post",
+ "type": "object",
+ },
+ },
+ },
+ "info": Object {
+ "description": "Exposing a SQL database as REST",
+ "title": "Platformatic DB",
+ },
+ "openapi": "3.0.3",
+ "paths": Object {
+ "/owners/": Object {
+ "get": Object {
+ "operationId": "getAllOwner",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "name",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.name.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.name",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetAllPosts": Object {
+ "operationId": "getAllPosts",
+ "parameters": Object {
+ "where.ownerId.eq": "$response.body#/id",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "/owners/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "name",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getOwnerById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "name",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetAllPosts": Object {
+ "operationId": "getAllPosts",
+ "parameters": Object {
+ "where.ownerId.eq": "$response.body#/id",
+ },
+ },
+ },
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "name",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetAllPosts": Object {
+ "operationId": "getAllPosts",
+ "parameters": Object {
+ "where.ownerId.eq": "$response.body#/id",
+ },
+ },
+ },
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "id",
+ "name",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Owner",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetAllPosts": Object {
+ "operationId": "getAllPosts",
+ "parameters": Object {
+ "where.ownerId.eq": "$response.body#/id",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "/posts/": Object {
+ "get": Object {
+ "operationId": "getAllPost",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "limit",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "offset",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "ownerId",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.counter.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.id.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.longText.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.eq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.neq",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.gt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.gte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.lt",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.lte",
+ "required": false,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.ownerId.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.eq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.neq",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.gte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lt",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.lte",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.in",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "where.title.nin",
+ "required": false,
+ "schema": Object {
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.counter",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.id",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.longText",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.ownerId",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ Object {
+ "in": "query",
+ "name": "orderby.title",
+ "required": false,
+ "schema": Object {
+ "enum": Array [
+ "asc",
+ "desc",
+ ],
+ "type": "string",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "items": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ "type": "array",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "post": Object {
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetOwnerWithId": Object {
+ "operationId": "getOwnerById",
+ "parameters": Object {
+ "id": "$response.body#/ownerId",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ "/posts/{id}": Object {
+ "delete": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "ownerId",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ },
+ },
+ },
+ "get": Object {
+ "operationId": "getPostById",
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "ownerId",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetOwnerWithId": Object {
+ "operationId": "getOwnerById",
+ "parameters": Object {
+ "id": "$response.body#/ownerId",
+ },
+ },
+ },
+ },
+ },
+ },
+ "post": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "ownerId",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetOwnerWithId": Object {
+ "operationId": "getOwnerById",
+ "parameters": Object {
+ "id": "$response.body#/ownerId",
+ },
+ },
+ },
+ },
+ },
+ },
+ "put": Object {
+ "parameters": Array [
+ Object {
+ "in": "query",
+ "name": "fields",
+ "required": false,
+ "schema": Object {
+ "items": Object {
+ "enum": Array [
+ "counter",
+ "id",
+ "longText",
+ "ownerId",
+ "title",
+ ],
+ "type": "string",
+ },
+ "type": "array",
+ },
+ },
+ Object {
+ "in": "path",
+ "name": "id",
+ "required": true,
+ "schema": Object {
+ "type": "integer",
+ },
+ },
+ ],
+ "requestBody": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ },
+ "responses": Object {
+ "200": Object {
+ "content": Object {
+ "application/json": Object {
+ "schema": Object {
+ "$ref": "#/components/schemas/Post",
+ },
+ },
+ },
+ "description": "Default Response",
+ "links": Object {
+ "GetOwnerWithId": Object {
+ "operationId": "getOwnerById",
+ "parameters": Object {
+ "id": "$response.body#/ownerId",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+}
+`
diff --git a/packages/sql-openapi/test/types/index.test-d.ts b/packages/sql-openapi/test/types/index.test-d.ts
new file mode 100644
index 0000000000..af3db7d76c
--- /dev/null
+++ b/packages/sql-openapi/test/types/index.test-d.ts
@@ -0,0 +1,38 @@
+import { fastify, FastifyInstance } from 'fastify'
+import plugin, { SQLOpenApiPluginOptions } from '../../index'
+
+const instance: FastifyInstance = fastify()
+const document: SQLOpenApiPluginOptions = {
+ exposeRoute: true,
+ info: {
+ title: 'Test swagger',
+ description: 'testing the fastify swagger api',
+ version: '0.1.0'
+ },
+ servers: [
+ {
+ url: 'http://localhost'
+ }
+ ],
+ tags: [
+ { name: 'tag' }
+ ],
+ components: {
+ securitySchemes: {
+ apiKey: {
+ type: 'apiKey',
+ name: 'apiKey',
+ in: 'header'
+ }
+ }
+ },
+ security: [{
+ apiKey: []
+ }],
+ externalDocs: {
+ description: 'Find more info here',
+ url: 'https://swagger.io'
+ }
+}
+
+instance.register(plugin, document)
diff --git a/packages/sql-openapi/test/where.test.js b/packages/sql-openapi/test/where.test.js
new file mode 100644
index 0000000000..5fa0f899d3
--- /dev/null
+++ b/packages/sql-openapi/test/where.test.js
@@ -0,0 +1,464 @@
+'use strict'
+
+const t = require('tap')
+const fastify = require('fastify')
+const sqlOpenAPI = require('..')
+const sqlMapper = require('@platformatic/sql-mapper')
+const { clear, connInfo, isSQLite, isMysql } = require('./helper')
+const { resolve } = require('path')
+const { test } = t
+
+Object.defineProperty(t, 'fullname', {
+ value: 'platformatic/db/openapi/where'
+})
+
+test('list', async (t) => {
+ const { pass, teardown, same, equal, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'where-openapi-1.cjs')
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isSQLite) {
+ await db.query(sql`CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ } else {
+ await db.query(sql`CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER
+ );`)
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const openapi = res.json()
+ // console.log(JSON.stringify(openapi, null, 2))
+ matchSnapshot(openapi, 'matches expected OpenAPI defs')
+ }
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ for (const body of posts) {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/posts',
+ body
+ })
+ equal(res.statusCode, 200, 'POST /posts status code')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?fields=id,title,longText status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: 2,
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: 3,
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: 4,
+ title: 'Duck',
+ longText: 'A duck tale'
+ }], 'GET /posts?fields=id,title,longText response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.title.eq=Dog&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.title.eq=Dog status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }], 'GET /posts?where.title.eq=Dog response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.title.neq=Dog&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.title.neq=Dog status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }], 'GET /posts?where.title.neq=Dog response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.gt=10&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.counter.gt=10 status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }], 'GET /posts?where.counter.gt=10 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.lt=40&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.counter.lt=40 status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }], 'GET /posts?where.counter.lt=40 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.lte=30&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.counter.lte=30 posts status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }], 'GET /posts?where.counter.lte=30 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.gte=20&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.counter.gte=20 status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: '4',
+ title: 'Duck',
+ longText: 'A duck tale'
+ }], 'GET /posts?where.counter.gte=20 response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.in=20,30&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'posts status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }], 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.nin=10,40&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, '/posts status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }], '/posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.counter.gt=10&where.counter.lt=40&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, '/posts status code')
+ same(res.json(), [{
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: '3',
+ title: 'Mouse',
+ longText: 'Baz'
+ }], 'posts response')
+ }
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.title.in=Dog,Cat&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.title.in=Dog,Cat status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }], 'GET /posts?where.title.in=Dog,Cat response')
+ }
+
+ // Skip unknown properties now
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/posts?where.foo.in=Dog,Cat&fields=id,title,longText'
+ })
+ equal(res.statusCode, 200, 'GET /posts?where.title.in=Dog,Cat status code')
+ same(res.json(), [{
+ id: '1',
+ title: 'Dog',
+ longText: 'Foo'
+ }, {
+ id: '2',
+ title: 'Cat',
+ longText: 'Bar'
+ }, {
+ id: 3,
+ title: 'Mouse',
+ longText: 'Baz'
+ }, {
+ id: 4,
+ title: 'Duck',
+ longText: 'A duck tale'
+ }], 'GET /posts?where.title.in=Dog,Cat response')
+ }
+})
+
+test('nested where', async (t) => {
+ const { pass, teardown, same, equal, matchSnapshot } = t
+ t.snapshotFile = resolve(__dirname, 'tap-snapshots', 'where-openapi-2.cjs')
+ const app = fastify()
+ app.register(sqlMapper, {
+ ...connInfo,
+ async onDatabaseLoad (db, sql) {
+ pass('onDatabaseLoad called')
+
+ await clear(db, sql)
+
+ if (isMysql) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else if (isSQLite) {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id INTEGER PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ `)
+
+ await db.query(sql`
+ CREATE TABLE posts (
+ id INTEGER PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id BIGINT UNSIGNED,
+ FOREIGN KEY (owner_id) REFERENCES owners(id) ON DELETE CASCADE
+ );
+ `)
+ } else {
+ await db.query(sql`
+ CREATE TABLE owners (
+ id SERIAL PRIMARY KEY,
+ name VARCHAR(255)
+ );
+ CREATE TABLE posts (
+ id SERIAL PRIMARY KEY,
+ title VARCHAR(42),
+ long_text TEXT,
+ counter INTEGER,
+ owner_id INTEGER REFERENCES owners(id)
+ );`)
+ }
+ }
+ })
+ app.register(sqlOpenAPI)
+ teardown(app.close.bind(app))
+
+ await app.ready()
+
+ {
+ const res = await app.inject({
+ method: 'GET',
+ url: '/documentation/json'
+ })
+ const openapi = res.json()
+ // console.log(JSON.stringify(openapi, null, 2))
+ matchSnapshot(openapi, 'matches expected OpenAPI defs')
+ }
+
+ const owners = [{
+ name: 'Matteo'
+ }, {
+ name: 'Luca'
+ }]
+
+ const posts = [{
+ title: 'Dog',
+ longText: 'Foo',
+ counter: 10
+ }, {
+ title: 'Cat',
+ longText: 'Bar',
+ counter: 20
+ }, {
+ title: 'Mouse',
+ longText: 'Baz',
+ counter: 30
+ }, {
+ title: 'Duck',
+ longText: 'A duck tale',
+ counter: 40
+ }]
+
+ {
+ const toAssign = [...posts]
+ for (const body of owners) {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/owners',
+ body
+ })
+ equal(res.statusCode, 200, 'POST /owners status code')
+ const ownerId = res.json().id
+ // works because we have 2 owners and 4 posts
+ toAssign.shift().ownerId = ownerId
+ toAssign.shift().ownerId = ownerId
+ }
+
+ for (const body of posts) {
+ const res = await app.inject({
+ method: 'POST',
+ url: '/posts',
+ body
+ })
+ equal(res.statusCode, 200, 'POST /posts status code')
+ }
+ }
+
+ {
+ const res1 = await app.inject({
+ method: 'GET',
+ url: '/owners?fields=id,name'
+ })
+
+ equal(res1.statusCode, 200, 'GET /owners status code')
+ const expected = [...posts]
+ for (const owner of res1.json()) {
+ const res2 = await app.inject({
+ method: 'GET',
+ url: `/posts?where.ownerId.eq=${owner.id}&fields=title,longText,counter`
+ })
+ equal(res2.statusCode, 200, 'GET /posts status code')
+ same(res2.json(), [expected.shift(), expected.shift()], 'GET /posts response')
+ }
+ }
+})
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
new file mode 100644
index 0000000000..8c98610466
--- /dev/null
+++ b/pnpm-lock.yaml
@@ -0,0 +1,7395 @@
+lockfileVersion: 5.4
+
+importers:
+
+ .:
+ specifiers:
+ '@fastify/pre-commit': ^2.0.2
+ desm: ^1.2.0
+ dependencies:
+ desm: 1.3.0
+ devDependencies:
+ '@fastify/pre-commit': 2.0.2
+
+ packages/authenticate:
+ specifiers:
+ '@platformatic/config': workspace:*
+ c8: ^7.12.0
+ colorette: ^2.0.19
+ commist: ^3.0.0
+ es-main: ^1.2.0
+ minimist: ^1.2.6
+ open: ^8.4.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.3.0
+ undici: ^5.8.2
+ dependencies:
+ '@platformatic/config': link:../config
+ colorette: 2.0.19
+ commist: 3.1.2
+ es-main: 1.2.0
+ minimist: 1.2.6
+ open: 8.4.0
+ undici: 5.10.0
+ devDependencies:
+ c8: 7.12.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/cli:
+ specifiers:
+ '@platformatic/authenticate': workspace:*
+ '@platformatic/db': workspace:*
+ c8: ^7.11.0
+ colorette: ^2.0.19
+ commist: ^3.1.2
+ desm: ^1.2.0
+ execa: ^6.1.0
+ help-me: ^4.1.0
+ minimist: ^1.2.6
+ snazzy: ^9.0.0
+ split2: ^4.1.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ dependencies:
+ '@platformatic/authenticate': link:../authenticate
+ '@platformatic/db': link:../db
+ colorette: 2.0.19
+ commist: 3.1.2
+ desm: 1.3.0
+ help-me: 4.1.0
+ minimist: 1.2.6
+ devDependencies:
+ c8: 7.12.0
+ execa: 6.1.0
+ snazzy: 9.0.0
+ split2: 4.1.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/config:
+ specifiers:
+ '@iarna/toml': ^2.2.5
+ ajv: ^8.11.0
+ c8: ^7.11.0
+ dotenv: ^16.0.1
+ json5: ^2.2.1
+ minimatch: ^5.1.0
+ pupa: ^3.1.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ undici: ^5.8.0
+ yaml: ^2.1.1
+ dependencies:
+ '@iarna/toml': 2.2.5
+ ajv: 8.11.0
+ c8: 7.12.0
+ dotenv: 16.0.2
+ json5: 2.2.1
+ minimatch: 5.1.0
+ pupa: 3.1.0
+ undici: 5.10.0
+ yaml: 2.1.1
+ devDependencies:
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/db:
+ specifiers:
+ '@databases/pg': ^5.3.0
+ '@databases/sqlite': ^4.0.0
+ '@fastify/basic-auth': ^4.0.0
+ '@fastify/cors': ^8.0.0
+ '@fastify/deepmerge': ^1.1.0
+ '@fastify/restartable': ^1.2.1
+ '@fastify/static': ^6.5.0
+ '@fastify/swagger': ^7.4.1
+ '@fastify/under-pressure': ^8.0.0
+ '@platformatic/config': workspace:*
+ '@platformatic/db-authorization': workspace:*
+ '@platformatic/db-core': workspace:*
+ '@platformatic/db-dashboard': workspace:*
+ '@platformatic/sql-graphql': workspace:*
+ '@platformatic/sql-json-schema-mapper': workspace:*
+ '@platformatic/sql-mapper': workspace:*
+ c8: ^7.11.0
+ close-with-grace: ^1.1.0
+ commist: ^3.1.2
+ desm: ^1.2.0
+ dtsgenerator: ^3.16.1
+ env-schema: ^5.0.0
+ es-main: ^1.2.0
+ execa: ^6.1.0
+ fastify: ^4.6.0
+ fastify-isolate: ^0.7.0
+ fastify-metrics: ^9.2.1
+ fastify-plugin: ^4.1.0
+ fastify-print-routes: ^2.0.0
+ graphql: ^16.6.0
+ help-me: ^4.1.0
+ minimatch: ^5.1.0
+ minimist: ^1.2.6
+ pino: ^8.4.1
+ pino-pretty: ^9.0.0
+ postgrator: ^7.1.0
+ snazzy: ^9.0.0
+ split2: ^4.1.0
+ standard: ^17.0.0
+ strip-ansi: ^7.0.1
+ tap: ^16.0.0
+ tsd: 0.24.1
+ ua-parser-js: ^1.0.2
+ undici: ^5.8.0
+ why-is-node-running: ^2.2.2
+ dependencies:
+ '@fastify/basic-auth': 4.0.0
+ '@fastify/cors': 8.1.0
+ '@fastify/deepmerge': 1.1.0
+ '@fastify/restartable': 1.2.1
+ '@fastify/static': 6.5.0
+ '@fastify/swagger': 7.5.1
+ '@fastify/under-pressure': 8.1.0
+ '@platformatic/config': link:../config
+ '@platformatic/db-authorization': link:../db-authorization
+ '@platformatic/db-core': link:../db-core
+ '@platformatic/db-dashboard': link:../db-dashboard
+ close-with-grace: 1.1.0
+ commist: 3.1.2
+ desm: 1.3.0
+ dtsgenerator: 3.16.1
+ env-schema: 5.0.0
+ es-main: 1.2.0
+ execa: 6.1.0
+ fastify: 4.6.0
+ fastify-isolate: 0.7.0
+ fastify-metrics: 9.2.2_fastify@4.6.0
+ fastify-plugin: 4.2.1
+ fastify-print-routes: 2.0.4
+ graphql: 16.6.0
+ help-me: 4.1.0
+ minimatch: 5.1.0
+ minimist: 1.2.6
+ pino: 8.5.0
+ pino-pretty: 9.1.0
+ postgrator: 7.1.0
+ ua-parser-js: 1.0.2
+ devDependencies:
+ '@databases/pg': 5.4.1
+ '@databases/sqlite': 4.0.1
+ '@platformatic/sql-graphql': link:../sql-graphql
+ '@platformatic/sql-json-schema-mapper': link:../sql-json-schema-mapper
+ '@platformatic/sql-mapper': link:../sql-mapper
+ c8: 7.12.0
+ snazzy: 9.0.0
+ split2: 4.1.0
+ standard: 17.0.0
+ strip-ansi: 7.0.1
+ tap: 16.3.0
+ tsd: 0.24.1
+ undici: 5.10.0
+ why-is-node-running: 2.2.2
+
+ packages/db-authorization:
+ specifiers:
+ '@fastify/cookie': ^8.0.0
+ '@fastify/error': ^3.0.0
+ '@fastify/jwt': ^6.3.1
+ '@fastify/session': ^10.0.0
+ '@platformatic/db-core': workspace:*
+ fast-jwt: ^1.7.1
+ fastify: ^4.6.0
+ fastify-plugin: ^4.1.0
+ get-jwks: ^8.0.0
+ mercurius: ^11.0.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ undici: ^5.6.1
+ dependencies:
+ '@fastify/error': 3.0.0
+ '@fastify/jwt': 6.3.2
+ fastify-plugin: 4.2.1
+ get-jwks: 8.0.0
+ undici: 5.10.0
+ devDependencies:
+ '@fastify/cookie': 8.1.0
+ '@fastify/session': 10.0.0
+ '@platformatic/db-core': link:../db-core
+ fast-jwt: 1.7.1
+ fastify: 4.6.0
+ mercurius: 11.0.0_graphql@16.6.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/db-core:
+ specifiers:
+ '@platformatic/sql-graphql': workspace:*
+ '@platformatic/sql-mapper': workspace:*
+ '@platformatic/sql-openapi': workspace:*
+ fastify: ^4.6.0
+ fastify-plugin: ^4.1.0
+ mercurius: ^11.0.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ dependencies:
+ '@platformatic/sql-graphql': link:../sql-graphql
+ '@platformatic/sql-mapper': link:../sql-mapper
+ '@platformatic/sql-openapi': link:../sql-openapi
+ fastify-plugin: 4.2.1
+ devDependencies:
+ fastify: 4.6.0
+ mercurius: 11.0.0_graphql@16.6.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/db-dashboard:
+ specifiers:
+ '@fastify/static': ^6.5.0
+ '@graphiql/toolkit': ^0.8.0
+ '@playwright/test': ^1.24.2
+ '@types/react': ^18.0.17
+ '@types/react-dom': ^18.0.6
+ '@vitejs/plugin-react': ^2.0.0
+ bulma: ^0.9.4
+ graphiql: ^2.0.0
+ happy-dom: ^6.0.4
+ history: ^5.3.0
+ json-format-highlight: ^1.0.4
+ jsoneditor: ^9.9.0
+ playwright: ^1.24.2
+ react: ^18.2.0
+ react-dom: ^18.2.0
+ react-hot-toast: ^2.3.0
+ react-router-dom: ^6.3.0
+ react-test-renderer: ^18.2.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ swagger-ui-react: 4.13.0
+ vite: ^3.0.4
+ vitest: ^0.23.0
+ dependencies:
+ '@fastify/static': 6.5.0
+ devDependencies:
+ '@graphiql/toolkit': 0.8.0_graphql@16.6.0
+ '@playwright/test': 1.25.2
+ '@types/react': 18.0.20
+ '@types/react-dom': 18.0.6
+ '@vitejs/plugin-react': 2.1.0_vite@3.1.1
+ bulma: 0.9.4
+ graphiql: 2.0.7_xst6jk3wj5nubsgv7xxqfuksc4
+ happy-dom: 6.0.4
+ history: 5.3.0
+ json-format-highlight: 1.0.4
+ jsoneditor: 9.9.0
+ playwright: 1.25.2
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-hot-toast: 2.4.0_owo25xnefcwdq3zjgtohz6dbju
+ react-router-dom: 6.4.0_biqbaboplfbrettd7655fr4n2y
+ react-test-renderer: 18.2.0_react@18.2.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ swagger-ui-react: 4.13.0_biqbaboplfbrettd7655fr4n2y
+ vite: 3.1.1
+ vitest: 0.23.2_happy-dom@6.0.4
+
+ packages/sql-graphql:
+ specifiers:
+ '@platformatic/sql-mapper': workspace:*
+ camelcase: ^6.0.0
+ fastify: ^4.6.0
+ fastify-plugin: ^4.1.0
+ graphql: ^16.6.0
+ graphql-scalars: ^1.13.1
+ inflected: ^2.1.0
+ mercurius: ^11.0.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ tsd: ^0.23.0
+ dependencies:
+ camelcase: 6.3.0
+ fastify-plugin: 4.2.1
+ graphql: 16.6.0
+ graphql-scalars: 1.18.0_graphql@16.6.0
+ inflected: 2.1.0
+ mercurius: 11.0.0_graphql@16.6.0
+ devDependencies:
+ '@platformatic/sql-mapper': link:../sql-mapper
+ fastify: 4.6.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+ tsd: 0.23.0
+
+ packages/sql-json-schema-mapper:
+ specifiers:
+ '@platformatic/sql-mapper': workspace:*
+ fastify: ^4.6.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ devDependencies:
+ '@platformatic/sql-mapper': link:../sql-mapper
+ fastify: 4.6.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+
+ packages/sql-mapper:
+ specifiers:
+ '@databases/mysql': ^5.2.0
+ '@databases/pg': ^5.3.0
+ '@databases/sql': ^3.2.0
+ '@databases/sqlite': ^4.0.0
+ camelcase: ^6.0.0
+ fastify: ^4.5.3
+ fastify-plugin: ^4.1.0
+ inflected: ^2.1.0
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ tsd: ^0.23.0
+ dependencies:
+ '@databases/mysql': 5.2.1
+ '@databases/pg': 5.4.1
+ '@databases/sql': 3.2.0
+ '@databases/sqlite': 4.0.1
+ camelcase: 6.3.0
+ fastify-plugin: 4.2.1
+ inflected: 2.1.0
+ devDependencies:
+ fastify: 4.6.0
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+ tsd: 0.23.0
+
+ packages/sql-openapi:
+ specifiers:
+ '@fastify/deepmerge': ^1.1.0
+ '@fastify/swagger': ^7.4.1
+ '@platformatic/sql-json-schema-mapper': workspace:*
+ '@platformatic/sql-mapper': workspace:*
+ camelcase: ^6.0.0
+ fastify: ^4.6.0
+ fastify-plugin: ^4.1.0
+ inflected: ^2.1.0
+ mercurius: ^11.0.0
+ openapi-types: ^12.0.2
+ snazzy: ^9.0.0
+ standard: ^17.0.0
+ tap: ^16.0.0
+ tsd: ^0.24.0
+ dependencies:
+ '@fastify/deepmerge': 1.1.0
+ '@fastify/swagger': 7.5.1
+ '@platformatic/sql-json-schema-mapper': link:../sql-json-schema-mapper
+ camelcase: 6.3.0
+ fastify-plugin: 4.2.1
+ inflected: 2.1.0
+ devDependencies:
+ '@platformatic/sql-mapper': link:../sql-mapper
+ fastify: 4.6.0
+ mercurius: 11.0.0_graphql@16.6.0
+ openapi-types: 12.0.2
+ snazzy: 9.0.0
+ standard: 17.0.0
+ tap: 16.3.0
+ tsd: 0.24.1
+
+packages:
+
+ /@ampproject/remapping/2.2.0:
+ resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@jridgewell/gen-mapping': 0.1.1
+ '@jridgewell/trace-mapping': 0.3.15
+ dev: true
+
+ /@babel/code-frame/7.18.6:
+ resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/highlight': 7.18.6
+
+ /@babel/compat-data/7.19.1:
+ resolution: {integrity: sha512-72a9ghR0gnESIa7jBN53U32FOVCEoztyIlKaNoU05zRhEecduGK9L9c3ww7Mp06JiR+0ls0GBPFJQwwtjn9ksg==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /@babel/core/7.19.1:
+ resolution: {integrity: sha512-1H8VgqXme4UXCRv7/Wa1bq7RVymKOzC7znjyFM8KiEzwFqcKUKYNoQef4GhdklgNvoBXyW4gYhuBNCM5o1zImw==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@ampproject/remapping': 2.2.0
+ '@babel/code-frame': 7.18.6
+ '@babel/generator': 7.19.0
+ '@babel/helper-compilation-targets': 7.19.1_@babel+core@7.19.1
+ '@babel/helper-module-transforms': 7.19.0
+ '@babel/helpers': 7.19.0
+ '@babel/parser': 7.19.1
+ '@babel/template': 7.18.10
+ '@babel/traverse': 7.19.1
+ '@babel/types': 7.19.0
+ convert-source-map: 1.8.0
+ debug: 4.3.4
+ gensync: 1.0.0-beta.2
+ json5: 2.2.1
+ semver: 6.3.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@babel/generator/7.19.0:
+ resolution: {integrity: sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ '@jridgewell/gen-mapping': 0.3.2
+ jsesc: 2.5.2
+ dev: true
+
+ /@babel/helper-annotate-as-pure/7.18.6:
+ resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-compilation-targets/7.19.1_@babel+core@7.19.1:
+ resolution: {integrity: sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0
+ dependencies:
+ '@babel/compat-data': 7.19.1
+ '@babel/core': 7.19.1
+ '@babel/helper-validator-option': 7.18.6
+ browserslist: 4.21.4
+ semver: 6.3.0
+ dev: true
+
+ /@babel/helper-environment-visitor/7.18.9:
+ resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /@babel/helper-function-name/7.19.0:
+ resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/template': 7.18.10
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-hoist-variables/7.18.6:
+ resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-module-imports/7.18.6:
+ resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-module-transforms/7.19.0:
+ resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-environment-visitor': 7.18.9
+ '@babel/helper-module-imports': 7.18.6
+ '@babel/helper-simple-access': 7.18.6
+ '@babel/helper-split-export-declaration': 7.18.6
+ '@babel/helper-validator-identifier': 7.19.1
+ '@babel/template': 7.18.10
+ '@babel/traverse': 7.19.1
+ '@babel/types': 7.19.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@babel/helper-plugin-utils/7.19.0:
+ resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /@babel/helper-simple-access/7.18.6:
+ resolution: {integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-split-export-declaration/7.18.6:
+ resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/helper-string-parser/7.18.10:
+ resolution: {integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /@babel/helper-validator-identifier/7.19.1:
+ resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==}
+ engines: {node: '>=6.9.0'}
+
+ /@babel/helper-validator-option/7.18.6:
+ resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /@babel/helpers/7.19.0:
+ resolution: {integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/template': 7.18.10
+ '@babel/traverse': 7.19.1
+ '@babel/types': 7.19.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@babel/highlight/7.18.6:
+ resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-validator-identifier': 7.19.1
+ chalk: 2.4.2
+ js-tokens: 4.0.0
+
+ /@babel/parser/7.19.1:
+ resolution: {integrity: sha512-h7RCSorm1DdTVGJf3P2Mhj3kdnkmF/EiysUkzS2TdgAYqyjFdMQJbVuXOBej2SBJaXan/lIVtT6KkGbyyq753A==}
+ engines: {node: '>=6.0.0'}
+ hasBin: true
+ dependencies:
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/plugin-syntax-jsx/7.18.6_@babel+core@7.19.1:
+ resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/helper-plugin-utils': 7.19.0
+ dev: true
+
+ /@babel/plugin-transform-react-jsx-development/7.18.6_@babel+core@7.19.1:
+ resolution: {integrity: sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.1
+ dev: true
+
+ /@babel/plugin-transform-react-jsx-self/7.18.6_@babel+core@7.19.1:
+ resolution: {integrity: sha512-A0LQGx4+4Jv7u/tWzoJF7alZwnBDQd6cGLh9P+Ttk4dpiL+J5p7NSNv/9tlEFFJDq3kjxOavWmbm6t0Gk+A3Ig==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/helper-plugin-utils': 7.19.0
+ dev: true
+
+ /@babel/plugin-transform-react-jsx-source/7.18.6_@babel+core@7.19.1:
+ resolution: {integrity: sha512-utZmlASneDfdaMh0m/WausbjUjEdGrQJz0vFK93d7wD3xf5wBtX219+q6IlCNZeguIcxS2f/CvLZrlLSvSHQXw==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/helper-plugin-utils': 7.19.0
+ dev: true
+
+ /@babel/plugin-transform-react-jsx/7.19.0_@babel+core@7.19.1:
+ resolution: {integrity: sha512-UVEvX3tXie3Szm3emi1+G63jyw1w5IcMY0FSKM+CRnKRI5Mr1YbCNgsSTwoTwKphQEG9P+QqmuRFneJPZuHNhg==}
+ engines: {node: '>=6.9.0'}
+ peerDependencies:
+ '@babel/core': ^7.0.0-0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/helper-annotate-as-pure': 7.18.6
+ '@babel/helper-module-imports': 7.18.6
+ '@babel/helper-plugin-utils': 7.19.0
+ '@babel/plugin-syntax-jsx': 7.18.6_@babel+core@7.19.1
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/runtime-corejs3/7.19.1:
+ resolution: {integrity: sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ core-js-pure: 3.25.1
+ regenerator-runtime: 0.13.9
+ dev: true
+
+ /@babel/runtime/7.19.0:
+ resolution: {integrity: sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ regenerator-runtime: 0.13.9
+ dev: true
+
+ /@babel/template/7.18.10:
+ resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/code-frame': 7.18.6
+ '@babel/parser': 7.19.1
+ '@babel/types': 7.19.0
+ dev: true
+
+ /@babel/traverse/7.19.1:
+ resolution: {integrity: sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/code-frame': 7.18.6
+ '@babel/generator': 7.19.0
+ '@babel/helper-environment-visitor': 7.18.9
+ '@babel/helper-function-name': 7.19.0
+ '@babel/helper-hoist-variables': 7.18.6
+ '@babel/helper-split-export-declaration': 7.18.6
+ '@babel/parser': 7.19.1
+ '@babel/types': 7.19.0
+ debug: 4.3.4
+ globals: 11.12.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@babel/types/7.19.0:
+ resolution: {integrity: sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA==}
+ engines: {node: '>=6.9.0'}
+ dependencies:
+ '@babel/helper-string-parser': 7.18.10
+ '@babel/helper-validator-identifier': 7.19.1
+ to-fast-properties: 2.0.0
+ dev: true
+
+ /@bcoe/v8-coverage/0.2.3:
+ resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
+
+ /@braintree/sanitize-url/6.0.0:
+ resolution: {integrity: sha512-mgmE7XBYY/21erpzhexk4Cj1cyTQ9LzvnTxtzM17BJ7ERMNE6W72mQRo0I1Ud8eFJ+RVVIcBNhLFZ3GX4XFz5w==}
+ dev: true
+
+ /@codemirror/language/0.20.2:
+ resolution: {integrity: sha512-WB3Bnuusw0xhVvhBocieYKwJm04SOk5bPoOEYksVHKHcGHFOaYaw+eZVxR4gIqMMcGzOIUil0FsCmFk8yrhHpw==}
+ dependencies:
+ '@codemirror/state': 0.20.1
+ '@codemirror/view': 0.20.7
+ '@lezer/common': 0.16.1
+ '@lezer/highlight': 0.16.0
+ '@lezer/lr': 0.16.3
+ style-mod: 4.0.0
+ dev: true
+
+ /@codemirror/state/0.20.1:
+ resolution: {integrity: sha512-ms0tlV5A02OK0pFvTtSUGMLkoarzh1F8mr6jy1cD7ucSC2X/VLHtQCxfhdSEGqTYlQF2hoZtmLv+amqhdgbwjQ==}
+ dev: true
+
+ /@codemirror/view/0.20.7:
+ resolution: {integrity: sha512-pqEPCb9QFTOtHgAH5XU/oVy9UR/Anj6r+tG5CRmkNVcqSKEPmBU05WtN/jxJCFZBXf6HumzWC9ydE4qstO3TxQ==}
+ dependencies:
+ '@codemirror/state': 0.20.1
+ style-mod: 4.0.0
+ w3c-keyname: 2.2.6
+ dev: true
+
+ /@databases/connection-pool/1.1.0:
+ resolution: {integrity: sha512-/12/SNgl0V77mJTo5SX3yGPz4c9XGQwAlCfA0vlfs/0HcaErNpYXpmhj0StET07w6TmTJTnaUgX2EPcQK9ez5A==}
+ dependencies:
+ '@databases/queue': 1.0.1
+ is-promise: 4.0.0
+
+ /@databases/escape-identifier/1.0.3:
+ resolution: {integrity: sha512-Su36iSVzaHxpVdISVMViUX/32sLvzxVgjZpYhzhotxZUuLo11GVWsiHwqkvUZijTLUxcDmUqEwGJO3O/soLuZA==}
+ dependencies:
+ '@databases/validate-unicode': 1.0.0
+
+ /@databases/lock/2.1.0:
+ resolution: {integrity: sha512-ReWnFE5qeCuO2SA5h5fDh/hE/vMolA+Epe6xkAQP1FL2nhnsTCYwN2JACk/kWctR4OQoh0njBjPZ0yfIptclcA==}
+ dependencies:
+ '@databases/queue': 1.0.1
+
+ /@databases/mysql-config/3.1.0:
+ resolution: {integrity: sha512-0WCxSymQgFvR6df/83jmPrP3+aTy/bHjtbgWlKMv4hc2ayT8J9RLArBVx6Fz8o7KK+g+5tyfayaIPDYcEJKhkQ==}
+ dependencies:
+ cosmiconfig: 5.2.1
+ funtypes: 4.2.0
+ dev: false
+
+ /@databases/mysql/5.2.1:
+ resolution: {integrity: sha512-DwH6mJX2K1P2Pf+93BIrhDp89Y/RvmYY74CDcMxj680lk8kV2tUN7udBkMePnDHqRG7wLxQ91deEezMeFMLpJg==}
+ dependencies:
+ '@babel/code-frame': 7.18.6
+ '@databases/escape-identifier': 1.0.3
+ '@databases/mysql-config': 3.1.0
+ '@databases/push-to-async-iterable': 3.0.0
+ '@databases/shared': 3.0.1
+ '@databases/sql': 3.2.0
+ '@types/mysql': 2.15.21
+ mysql2: 2.3.3
+ dev: false
+
+ /@databases/pg-config/3.1.1:
+ resolution: {integrity: sha512-D2hLZMPAJak6CRTiH/Jl30htmoVvDXna7uHcBavadImfBZqLT5vcXFnkrpujSsvka8fSnTOnu3zPCm1tdiGTuA==}
+ dependencies:
+ cosmiconfig: 5.2.1
+ funtypes: 4.2.0
+
+ /@databases/pg-connection-string/1.0.0:
+ resolution: {integrity: sha512-8czOF9jlv7PlS7BPjnL82ynpDs1t8cu+C2jvdtMr37e8daPKMS7n1KfNE9xtr2Gq4QYKjynep097eYa5yIwcLA==}
+
+ /@databases/pg-data-type-id/3.0.0:
+ resolution: {integrity: sha512-VqW1csN8pRsWJxjPsGIC9FQ8wyenfmGv0P//BaeDMAu/giM3IXKxKM8fkScUSQ00uqFK/L1iHS5g6dgodF3XzA==}
+
+ /@databases/pg-errors/1.0.0:
+ resolution: {integrity: sha512-Yz3exbptZwOn4ZD/MSwY6z++XVyOFsMh5DERvSw3awRwJFnfdaqdeiIxxX0MVjM6KPihF0xxp8lPO7vTc5ydpw==}
+
+ /@databases/pg/5.4.1:
+ resolution: {integrity: sha512-V4BvwEwcrpZVEhCuBX4rwIGr1Idk68UMp+7rpqxFsl9SVIvHentX2wq7Nyclp7sxnQddEH8KsTCXt0eleRaihA==}
+ dependencies:
+ '@babel/code-frame': 7.18.6
+ '@databases/escape-identifier': 1.0.3
+ '@databases/pg-config': 3.1.1
+ '@databases/pg-connection-string': 1.0.0
+ '@databases/pg-data-type-id': 3.0.0
+ '@databases/pg-errors': 1.0.0
+ '@databases/push-to-async-iterable': 3.0.0
+ '@databases/shared': 3.0.1
+ '@databases/split-sql-query': 1.0.3_@databases+sql@3.2.0
+ '@databases/sql': 3.2.0
+ '@types/cuid': 1.3.1
+ assert-never: 1.2.1
+ cuid: 2.1.8
+ pg: 8.8.0
+ pg-cursor: 2.7.4_pg@8.8.0
+ transitivePeerDependencies:
+ - pg-native
+
+ /@databases/push-to-async-iterable/3.0.0:
+ resolution: {integrity: sha512-xwu/yNgINdMU+fn6UwFsxh+pa6UrVPafY+0qm0RK0/nKyjllfDqSbwK4gSmdmLEwPYxKwch9CAE3P8NxN1hPSg==}
+ dependencies:
+ '@databases/queue': 1.0.1
+
+ /@databases/queue/1.0.1:
+ resolution: {integrity: sha512-dqRU+/aQ4lhFzjPIkIhjB0+UEKMb76FoBgHOJUTcEblgatr/IhdhHliT3VVwcImXh35Mz297PAXE4yFM4eYWUQ==}
+
+ /@databases/shared/3.0.1:
+ resolution: {integrity: sha512-1e4475XBUuNIoR5zd7MplJ316hqqWRchZDEC0jB5O3D5i4dCTDuQVQuW14FYFyCvOays692BHZf41FUZH4bEyA==}
+ dependencies:
+ '@databases/connection-pool': 1.1.0
+ '@databases/lock': 2.1.0
+ '@databases/queue': 1.0.1
+ '@databases/split-sql-query': 1.0.3_@databases+sql@3.2.0
+ '@databases/sql': 3.2.0
+ cuid: 2.1.8
+
+ /@databases/split-sql-query/1.0.3_@databases+sql@3.2.0:
+ resolution: {integrity: sha512-Q3UYX85e34yE9KXa095AJtJhBQ0NpLfC0kS9ydFKuNB25cto4YddY52RuXN81m2t0pS1Atg31ylNpKfNCnUPdA==}
+ peerDependencies:
+ '@databases/sql': '*'
+ dependencies:
+ '@databases/sql': 3.2.0
+
+ /@databases/sql/3.2.0:
+ resolution: {integrity: sha512-xQZzKIa0lvcdo0MYxnyFMVS1TRla9lpDSCYkobJl19vQEOJ9TqE4o8QBGRJNUfhSkbQIWyvMeBl3KBBbqyUVQQ==}
+
+ /@databases/sqlite/4.0.1:
+ resolution: {integrity: sha512-gOZg9JgIlcA/+J6ZAwp6w4oW95qXWTkrCSFWGaDe25DkKSRzoGrSnCv9bgX78fsecIbA3suACnFSNY3E1hxcbQ==}
+ dependencies:
+ '@databases/escape-identifier': 1.0.3
+ '@databases/sql': 3.2.0
+ '@types/sqlite3': 3.1.8
+ sqlite3: 5.1.1
+ then-queue: 1.3.0
+ transitivePeerDependencies:
+ - bluebird
+ - encoding
+ - supports-color
+
+ /@databases/validate-unicode/1.0.0:
+ resolution: {integrity: sha512-dLKqxGcymeVwEb/6c44KjOnzaAafFf0Wxa8xcfEjx/qOl3rdijsKYBAtIGhtVtOlpPf/PFKfgTuFurSPn/3B/g==}
+
+ /@esbuild/linux-loong64/0.15.7:
+ resolution: {integrity: sha512-IKznSJOsVUuyt7cDzzSZyqBEcZe+7WlBqTVXiF1OXP/4Nm387ToaXZ0fyLwI1iBlI/bzpxVq411QE2/Bt2XWWw==}
+ engines: {node: '>=12'}
+ cpu: [loong64]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /@eslint/eslintrc/1.3.2:
+ resolution: {integrity: sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ ajv: 6.12.6
+ debug: 4.3.4
+ espree: 9.4.0
+ globals: 13.17.0
+ ignore: 5.2.0
+ import-fresh: 3.3.0
+ js-yaml: 4.1.0
+ minimatch: 3.1.2
+ strip-json-comments: 3.1.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@fastify/accept-negotiator/1.0.0:
+ resolution: {integrity: sha512-4R/N2KfYeld7A5LGkai+iUFMahXcxxYbDp+XS2B1yuL3cdmZLJ9TlCnNzT3q5xFTqsYm0GPpinLUwfSwjcVjyA==}
+ engines: {node: '>=14'}
+
+ /@fastify/ajv-compiler/3.2.0:
+ resolution: {integrity: sha512-JrqgKmZoh1AJojDZk699DupQ9+tz5gSy7/w+5DrkXy5whM5IcqdV3SjG5qnOqgVJT1nPtUMDY0xYus2j6vwJiw==}
+ dependencies:
+ ajv: 8.11.0
+ ajv-formats: 2.1.1_ajv@8.11.0
+ fast-uri: 2.1.0
+
+ /@fastify/basic-auth/4.0.0:
+ resolution: {integrity: sha512-qx8n2BTutuCmzMw/7yQmvr1MOsCmasV/CljYg2CCBgAi4ljps627a6FpFX+xFj5wfjmYrvZQRcCMP5Oa67CJpA==}
+ dependencies:
+ basic-auth: 2.0.1
+ fastify-plugin: 3.0.1
+ http-errors: 2.0.0
+ dev: false
+
+ /@fastify/cookie/8.1.0:
+ resolution: {integrity: sha512-+BxpyK4KLAjDpXdWxOjl8yaKtAoqYZR+CE9+cNtdMDoACb8hcpGx9npkrdINl62EpCu06oIPluq8A4NUsi78ZA==}
+ dependencies:
+ cookie: 0.5.0
+ fastify-plugin: 4.2.1
+ dev: true
+
+ /@fastify/cors/8.1.0:
+ resolution: {integrity: sha512-1OmjwyxQZ8GePxa5t1Rpsn2qS56+1ouKMvZufpgJWhXtoCeM/ffA+PsNW8pyslPr4W0E27gVoFqtvHwhXW1U2w==}
+ dependencies:
+ fastify-plugin: 4.2.1
+ mnemonist: 0.39.2
+ dev: false
+
+ /@fastify/deepmerge/1.1.0:
+ resolution: {integrity: sha512-E8Hfdvs1bG6u0N4vN5Nty6JONUfTdOciyD5rn8KnEsLKIenvOVcr210BQR9t34PRkNyjqnMLGk3e0BsaxRdL+g==}
+
+ /@fastify/error/3.0.0:
+ resolution: {integrity: sha512-dPRyT40GiHRzSCll3/Jn2nPe25+E1VXc9tDwRAIKwFCxd5Np5wzgz1tmooWG3sV0qKgrBibihVoCna2ru4SEFg==}
+
+ /@fastify/fast-json-stringify-compiler/4.1.0:
+ resolution: {integrity: sha512-cTKBV2J9+u6VaKDhX7HepSfPSzw+F+TSd+k0wzifj4rG+4E5PjSFJCk19P8R6tr/72cuzgGd+mbB3jFT6lvAgw==}
+ dependencies:
+ fast-json-stringify: 5.3.0
+
+ /@fastify/jwt/6.3.2:
+ resolution: {integrity: sha512-LD41UNS55o1HyUlCiR1G2fDtqebvsSaxfWsVuerSLmyGWbCjfzNOPjEs4yUvMj2NjbV73ZzR3ZtR64vr1TeFYA==}
+ dependencies:
+ '@fastify/error': 3.0.0
+ '@lukeed/ms': 2.0.0
+ fast-jwt: 1.7.1
+ fastify-plugin: 4.2.1
+ steed: 1.1.3
+ dev: false
+
+ /@fastify/pre-commit/2.0.2:
+ resolution: {integrity: sha512-NCruP+jjsaj+kwDKAS1zQc2XVCVcBPnH2q4cYbgzoUqvblmXE/SD0MaVgZ4o9MUMnCG+z1YLBGJb1eXNi0SEUw==}
+ requiresBuild: true
+ dependencies:
+ cross-spawn: 7.0.3
+ which: 2.0.2
+ dev: true
+
+ /@fastify/restartable/1.2.1:
+ resolution: {integrity: sha512-CxB8qrRn3n6n3cUi27i1oI/MtOXeuZSgXbUNTUZP3t0eYQEsLQTh+WhKH5nfb7dT3Op5BVMefuWpRDUXmmHFhQ==}
+ dependencies:
+ '@fastify/error': 3.0.0
+ fastify: 4.6.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@fastify/session/10.0.0:
+ resolution: {integrity: sha512-poeyhxX4UW6w5LDi1sKqhaZDAkMvhuVP5b+KcbTVCdDp91heZGr8inpgE5p5hDx6cSyK8Z7VXyBRbQTwGwulFA==}
+ dependencies:
+ fastify-plugin: 4.2.1
+ safe-stable-stringify: 2.3.1
+ dev: true
+
+ /@fastify/static/6.5.0:
+ resolution: {integrity: sha512-WEk6iqgejA6ivjkvbJ47A+uMci225z5lZwLXCXZS3ZYR/kYje1gzzarkKKGL6TWpBw6smkOzxA7dfEoY0347Nw==}
+ dependencies:
+ '@fastify/accept-negotiator': 1.0.0
+ content-disposition: 0.5.4
+ fastify-plugin: 4.2.1
+ glob: 8.0.3
+ p-limit: 3.1.0
+ readable-stream: 4.1.0
+ send: 0.18.0
+ transitivePeerDependencies:
+ - supports-color
+
+ /@fastify/swagger/7.5.1:
+ resolution: {integrity: sha512-940WBMPF2ocYZrgLGvVFkggeLxq4A4h/0PEkwsSFDIFGk0YyLS1IxhjwifNsltUVi6HVL5y7TDHqX4Rgt8/GsQ==}
+ dependencies:
+ '@fastify/static': 6.5.0
+ fastify-plugin: 4.2.1
+ json-schema-resolver: 1.3.0
+ openapi-types: 12.0.2
+ rfdc: 1.3.0
+ yaml: 2.1.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /@fastify/under-pressure/8.1.0:
+ resolution: {integrity: sha512-2Nm0UyVxTdssvNTbYVZFSH4C2LwgQcphOxgX6hIQce8jH1sm1gx/wusDBr6+0lyzwE06V537kwA7JeAvysK3xw==}
+ dependencies:
+ '@fastify/error': 3.0.0
+ fastify-plugin: 4.2.1
+ dev: false
+
+ /@fastify/websocket/7.0.1:
+ resolution: {integrity: sha512-LHOUKqRr1iV4w47sVQAT5IrVPsTiuEAqOS1QgtRymJYPeVfIC65LTxg22eoR7kyYW4W84oWJk0gtsBwUGhDEVg==}
+ dependencies:
+ fastify-plugin: 4.2.1
+ ws: 8.8.1
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
+
+ /@gar/promisify/1.1.3:
+ resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==}
+ optional: true
+
+ /@graphiql/react/0.13.1_xst6jk3wj5nubsgv7xxqfuksc4:
+ resolution: {integrity: sha512-1E5oZJV1Wf5uZJQTKQvhv7/mjpH4RaYasVbPY82ZX1aQ53bf0H9wluHLHW5Q8Wtb/NF72EWr7DdygFIE+JqYBg==}
+ peerDependencies:
+ graphql: ^15.5.0 || ^16.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
+ dependencies:
+ '@graphiql/toolkit': 0.8.0_graphql@16.6.0
+ '@reach/combobox': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/dialog': 0.17.0_7ey2zzynotv32rpkwno45fsx4e
+ '@reach/listbox': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/menu-button': 0.17.0_pumtretovylab5lwhztzjp2kuy
+ '@reach/tooltip': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/visually-hidden': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ codemirror: 5.65.8
+ codemirror-graphql: 2.0.0_4qrm3am6isowdlonkgtqtogfxe
+ copy-to-clipboard: 3.3.2
+ graphql: 16.6.0
+ graphql-language-service: 5.1.0_graphql@16.6.0
+ markdown-it: 12.3.2
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ set-value: 4.1.0
+ transitivePeerDependencies:
+ - '@codemirror/language'
+ - '@types/node'
+ - '@types/react'
+ - graphql-ws
+ - react-is
+ dev: true
+
+ /@graphiql/toolkit/0.8.0_graphql@16.6.0:
+ resolution: {integrity: sha512-DbMFhEKejpPzB6k8W3Mj+Rl8geXiw49USDF9Wdi06EEk1XLVh1iebDqveYY+4lViITsV4+BeGikxlqi8umfP4g==}
+ peerDependencies:
+ graphql: ^15.5.0 || ^16.0.0
+ graphql-ws: '>= 4.5.0'
+ peerDependenciesMeta:
+ graphql-ws:
+ optional: true
+ dependencies:
+ '@n1ru4l/push-pull-async-iterable-iterator': 3.2.0
+ graphql: 16.6.0
+ meros: 1.2.0
+ transitivePeerDependencies:
+ - '@types/node'
+ dev: true
+
+ /@graphql-typed-document-node/core/3.1.1_graphql@16.6.0:
+ resolution: {integrity: sha512-NQ17ii0rK1b34VZonlmT2QMJFI70m0TRwbknO/ihlbatXyaktDhN/98vBiUU6kNBPljqGqyIrl2T4nY2RpFANg==}
+ peerDependencies:
+ graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0
+ dependencies:
+ graphql: 16.6.0
+
+ /@humanwhocodes/config-array/0.10.4:
+ resolution: {integrity: sha512-mXAIHxZT3Vcpg83opl1wGlVZ9xydbfZO3r5YfRSH6Gpp2J/PfdBP0wbDa2sO6/qRbcalpoevVyW6A/fI6LfeMw==}
+ engines: {node: '>=10.10.0'}
+ dependencies:
+ '@humanwhocodes/object-schema': 1.2.1
+ debug: 4.3.4
+ minimatch: 3.1.2
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@humanwhocodes/gitignore-to-minimatch/1.0.2:
+ resolution: {integrity: sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA==}
+ dev: true
+
+ /@humanwhocodes/module-importer/1.0.1:
+ resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==}
+ engines: {node: '>=12.22'}
+ dev: true
+
+ /@humanwhocodes/object-schema/1.2.1:
+ resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==}
+ dev: true
+
+ /@iarna/toml/2.2.5:
+ resolution: {integrity: sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==}
+ dev: false
+
+ /@istanbuljs/load-nyc-config/1.1.0:
+ resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ camelcase: 5.3.1
+ find-up: 4.1.0
+ get-package-type: 0.1.0
+ js-yaml: 3.14.1
+ resolve-from: 5.0.0
+ dev: true
+
+ /@istanbuljs/schema/0.1.3:
+ resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==}
+ engines: {node: '>=8'}
+
+ /@jridgewell/gen-mapping/0.1.1:
+ resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@jridgewell/set-array': 1.1.2
+ '@jridgewell/sourcemap-codec': 1.4.14
+ dev: true
+
+ /@jridgewell/gen-mapping/0.3.2:
+ resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@jridgewell/set-array': 1.1.2
+ '@jridgewell/sourcemap-codec': 1.4.14
+ '@jridgewell/trace-mapping': 0.3.15
+ dev: true
+
+ /@jridgewell/resolve-uri/3.1.0:
+ resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==}
+ engines: {node: '>=6.0.0'}
+
+ /@jridgewell/set-array/1.1.2:
+ resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==}
+ engines: {node: '>=6.0.0'}
+ dev: true
+
+ /@jridgewell/sourcemap-codec/1.4.14:
+ resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==}
+
+ /@jridgewell/trace-mapping/0.3.15:
+ resolution: {integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==}
+ dependencies:
+ '@jridgewell/resolve-uri': 3.1.0
+ '@jridgewell/sourcemap-codec': 1.4.14
+
+ /@lezer/common/0.16.1:
+ resolution: {integrity: sha512-qPmG7YTZ6lATyTOAWf8vXE+iRrt1NJd4cm2nJHK+v7X9TsOF6+HtuU/ctaZy2RCrluxDb89hI6KWQ5LfQGQWuA==}
+ dev: true
+
+ /@lezer/highlight/0.16.0:
+ resolution: {integrity: sha512-iE5f4flHlJ1g1clOStvXNLbORJoiW4Kytso6ubfYzHnaNo/eo5SKhxs4wv/rtvwZQeZrK3we8S9SyA7OGOoRKQ==}
+ dependencies:
+ '@lezer/common': 0.16.1
+ dev: true
+
+ /@lezer/lr/0.16.3:
+ resolution: {integrity: sha512-pau7um4eAw94BEuuShUIeQDTf3k4Wt6oIUOYxMmkZgDHdqtIcxWND4LRxi8nI9KuT4I1bXQv67BCapkxt7Ywqw==}
+ dependencies:
+ '@lezer/common': 0.16.1
+ dev: true
+
+ /@lukeed/ms/2.0.0:
+ resolution: {integrity: sha512-NOlhE40rGptwLwJhE0ZW259hcoa+nkpQRQ1FUKV4Sr2z1Eh2WfkHQ3jjBNF7YEqOrF0TOpqnyU1wClvWBrXByg==}
+ engines: {node: '>=8'}
+ dev: false
+
+ /@mapbox/node-pre-gyp/1.0.10:
+ resolution: {integrity: sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==}
+ hasBin: true
+ dependencies:
+ detect-libc: 2.0.1
+ https-proxy-agent: 5.0.1
+ make-dir: 3.1.0
+ node-fetch: 2.6.7
+ nopt: 5.0.0
+ npmlog: 5.0.1
+ rimraf: 3.0.2
+ semver: 7.3.7
+ tar: 6.1.11
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+
+ /@matteo.collina/isolates/2.1.0:
+ resolution: {integrity: sha512-jZZCJsPszJJsYtXxzug6ACL+iOzE+9lM27ZDqFBDy1iMl9Q3q/v1R5n+nmJ6TmYVPDVbYij8EFLyXARPBsLkWg==}
+ engines: {node: '>= 15.5.0'}
+ requiresBuild: true
+ dependencies:
+ bindings: 1.5.0
+ dev: false
+ optional: true
+
+ /@n1ru4l/push-pull-async-iterable-iterator/3.2.0:
+ resolution: {integrity: sha512-3fkKj25kEjsfObL6IlKPAlHYPq/oYwUkkQ03zsTTiDjD7vg/RxjdiLeCydqtxHZP0JgsXL3D/X5oAkMGzuUp/Q==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /@nodelib/fs.scandir/2.1.5:
+ resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
+ engines: {node: '>= 8'}
+ dependencies:
+ '@nodelib/fs.stat': 2.0.5
+ run-parallel: 1.2.0
+ dev: true
+
+ /@nodelib/fs.stat/2.0.5:
+ resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==}
+ engines: {node: '>= 8'}
+ dev: true
+
+ /@nodelib/fs.walk/1.2.8:
+ resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ '@nodelib/fs.scandir': 2.1.5
+ fastq: 1.13.0
+ dev: true
+
+ /@npmcli/fs/1.1.1:
+ resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==}
+ dependencies:
+ '@gar/promisify': 1.1.3
+ semver: 7.3.7
+ optional: true
+
+ /@npmcli/move-file/1.1.2:
+ resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==}
+ engines: {node: '>=10'}
+ dependencies:
+ mkdirp: 1.0.4
+ rimraf: 3.0.2
+ optional: true
+
+ /@playwright/test/1.25.2:
+ resolution: {integrity: sha512-6qPznIR4Fw02OMbqXUPMG6bFFg1hDVNEdihKy0t9K0dmRbus1DyP5Q5XFQhGwEHQkLG5hrSfBuu9CW/foqhQHQ==}
+ engines: {node: '>=14'}
+ hasBin: true
+ dependencies:
+ '@types/node': 18.7.18
+ playwright-core: 1.25.2
+ dev: true
+
+ /@reach/auto-id/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-ud8iPwF52RVzEmkHq1twuqGuPA+moreumUHdtgvU3sr3/15BNhwp3KyDLrKKSz0LP1r3V4pSdyF9MbYM8BoSjA==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/combobox/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-2mYvU5agOBCQBMdlM4cri+P1BbNwp05P1OuDyc33xJSNiBG7BMy4+ZSHJ0X4fyle6rHwSgCAOCLOeWV1XUYjoQ==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/auto-id': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/descendants': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/popover': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/portal': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/descendants/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-c7lUaBfjgcmKFZiAWqhG+VnXDMEhPkI4kAav/82XKZD6NVvFjsQOTH+v3tUkskrAPV44Yuch0mFW/u5Ntifr7Q==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/dialog/0.17.0_7ey2zzynotv32rpkwno45fsx4e:
+ resolution: {integrity: sha512-AnfKXugqDTGbeG3c8xDcrQDE4h9b/vnc27Sa118oQSquz52fneUeX9MeFb5ZEiBJK8T5NJpv7QUTBIKnFCAH5A==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/portal': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-focus-lock: 2.9.1_w5j4k42lgipnm43s3brx6h3c34
+ react-remove-scroll: 2.5.5_w5j4k42lgipnm43s3brx6h3c34
+ tslib: 2.4.0
+ transitivePeerDependencies:
+ - '@types/react'
+ dev: true
+
+ /@reach/dropdown/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-qBTIGInhxtPHtdj4Pl2XZgZMz3e37liydh0xR3qc48syu7g71sL4nqyKjOzThykyfhA3Pb3/wFgsFJKGTSdaig==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/auto-id': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/descendants': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/popover': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/listbox/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-AMnH1P6/3VKy2V/nPb4Es441arYR+t4YRdh9jdcFVrCOD6y7CQrlmxsYjeg9Ocdz08XpdoEBHM3PKLJqNAUr7A==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/auto-id': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/descendants': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/machine': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/popover': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ dev: true
+
+ /@reach/machine/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-9EHnuPgXzkbRENvRUzJvVvYt+C2jp7PGN0xon7ffmKoK8rTO6eA/bb7P0xgloyDDQtu88TBUXKzW0uASqhTXGA==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@xstate/fsm': 1.4.0
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/menu-button/0.17.0_pumtretovylab5lwhztzjp2kuy:
+ resolution: {integrity: sha512-YyuYVyMZKamPtivoEI6D0UEILYH3qZtg4kJzEAuzPmoR/aHN66NZO75Fx0gtjG1S6fZfbiARaCOZJC0VEiDOtQ==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ react-is: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/dropdown': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/popover': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-is: 17.0.2
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/observe-rect/1.2.0:
+ resolution: {integrity: sha512-Ba7HmkFgfQxZqqaeIWWkNK0rEhpxVQHIoVyW1YDSkGsGIXzcaW4deC8B0pZrNSSyLTdIk7y+5olKt5+g0GmFIQ==}
+ dev: true
+
+ /@reach/popover/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-yYbBF4fMz4Ml4LB3agobZjcZ/oPtPsNv70ZAd7lEC2h7cvhF453pA+zOBGYTPGupKaeBvgAnrMjj7RnxDU5hoQ==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/portal': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/rect': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tabbable: 4.0.0
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/portal/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-+IxsgVycOj+WOeNPL2NdgooUdHPSY285wCtj/iWID6akyr4FgGUK7sMhRM9aGFyrGpx2vzr+eggbUmAVZwOz+A==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/rect/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-3YB7KA5cLjbLc20bmPkJ06DIfXSK06Cb5BbD2dHgKXjUkT9WjZaLYIbYCO8dVjwcyO3GCNfOmPxy62VsPmZwYA==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/observe-rect': 1.2.0
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/tooltip/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-HP8Blordzqb/Cxg+jnhGmWQfKgypamcYLBPlcx6jconyV5iLJ5m93qipr1giK7MqKT2wlsKWy44ZcOrJ+Wrf8w==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ '@reach/auto-id': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/portal': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/rect': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/utils': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ '@reach/visually-hidden': 0.17.0_biqbaboplfbrettd7655fr4n2y
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/utils/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-M5y8fCBbrWeIsxedgcSw6oDlAMQDkl5uv3VnMVJ7guwpf4E48Xlh1v66z/1BgN/WYe2y8mB/ilFD2nysEfdGeA==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tiny-warning: 1.0.3
+ tslib: 2.4.0
+ dev: true
+
+ /@reach/visually-hidden/0.17.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-T6xF3Nv8vVnjVkGU6cm0+kWtvliLqPAo8PcZ+WxkKacZsaHTjaZb4v1PaCcyQHmuTNT/vtTVNOJLG0SjQOIb7g==}
+ peerDependencies:
+ react: ^16.8.0 || 17.x
+ react-dom: ^16.8.0 || 17.x
+ dependencies:
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /@remix-run/router/1.0.0:
+ resolution: {integrity: sha512-SCR1cxRSMNKjaVYptCzBApPDqGwa3FGdjVHc+rOToocNPHQdIYLZBfv/3f+KvYuXDkUGVIW9IAzmPNZDRL1I4A==}
+ engines: {node: '>=14'}
+ dev: true
+
+ /@sphinxxxx/color-conversion/2.2.2:
+ resolution: {integrity: sha512-XExJS3cLqgrmNBIP3bBw6+1oQ1ksGjFh0+oClDKFYpCCqx/hlqwWO5KO/S63fzUo67SxI9dMrF0y5T/Ey7h8Zw==}
+ dev: true
+
+ /@tootallnate/once/1.1.2:
+ resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==}
+ engines: {node: '>= 6'}
+ optional: true
+
+ /@tsd/typescript/4.8.3:
+ resolution: {integrity: sha512-ytRZWmXF0i4VFSG8NQ67NUDQ3NGe3E4oByFrxH8eJyW5uBOM5juIdXCg81OY/IcK81qHCzrvGEo8tujlIQbexw==}
+ dev: true
+
+ /@types/chai-subset/1.3.3:
+ resolution: {integrity: sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw==}
+ dependencies:
+ '@types/chai': 4.3.3
+ dev: true
+
+ /@types/chai/4.3.3:
+ resolution: {integrity: sha512-hC7OMnszpxhZPduX+m+nrx+uFoLkWOMiR4oa/AZF3MuSETYTZmFfJAHqZEM8MVlvfG7BEUcgvtwoCTxBp6hm3g==}
+ dev: true
+
+ /@types/concat-stream/1.6.1:
+ resolution: {integrity: sha512-eHE4cQPoj6ngxBZMvVf6Hw7Mh4jMW4U9lpGmS5GBPB9RYxlFg+CHaVN7ErNY4W9XfLIEn20b4VDYaIrbq0q4uA==}
+ dependencies:
+ '@types/node': 8.10.66
+ dev: true
+
+ /@types/cuid/1.3.1:
+ resolution: {integrity: sha512-LwQOxZtpN3aEGElEicpHx1I6exi+mLBecAdLMWNRjGaYByD2CqGjSH1oVEQGeNSqgYBhLC1pIJQMDgcpxk0t8Q==}
+
+ /@types/eslint/7.29.0:
+ resolution: {integrity: sha512-VNcvioYDH8/FxaeTKkM4/TiTwt6pBV9E3OfGmvaw8tPl0rrHCJ4Ll15HRT+pMiFAf/MLQvAzC+6RzUMEL9Ceng==}
+ dependencies:
+ '@types/estree': 1.0.0
+ '@types/json-schema': 7.0.11
+ dev: true
+
+ /@types/estree/1.0.0:
+ resolution: {integrity: sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==}
+ dev: true
+
+ /@types/form-data/0.0.33:
+ resolution: {integrity: sha512-8BSvG1kGm83cyJITQMZSulnl6QV8jqAGreJsc5tPu1Jq0vTSOiY/k24Wx82JRpWwZSqrala6sd5rWi6aNXvqcw==}
+ dependencies:
+ '@types/node': 8.10.66
+ dev: true
+
+ /@types/hast/2.3.4:
+ resolution: {integrity: sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==}
+ dependencies:
+ '@types/unist': 2.0.6
+ dev: true
+
+ /@types/hoist-non-react-statics/3.3.1:
+ resolution: {integrity: sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==}
+ dependencies:
+ '@types/react': 18.0.20
+ hoist-non-react-statics: 3.3.2
+ dev: true
+
+ /@types/istanbul-lib-coverage/2.0.4:
+ resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==}
+
+ /@types/json-schema/7.0.11:
+ resolution: {integrity: sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==}
+ dev: true
+
+ /@types/json5/0.0.29:
+ resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==}
+ dev: true
+
+ /@types/minimist/1.2.2:
+ resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==}
+ dev: true
+
+ /@types/mysql/2.15.21:
+ resolution: {integrity: sha512-NPotx5CVful7yB+qZbWtXL2fA4e7aEHkihHLjklc6ID8aq7bhguHgeIoC1EmSNTAuCgI6ZXrjt2ZSaXnYX0EUg==}
+ dependencies:
+ '@types/node': 18.7.18
+ dev: false
+
+ /@types/node/10.17.60:
+ resolution: {integrity: sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==}
+ dev: true
+
+ /@types/node/18.7.18:
+ resolution: {integrity: sha512-m+6nTEOadJZuTPkKR/SYK3A2d7FZrgElol9UP1Kae90VVU4a6mxnPuLiIW1m4Cq4gZ/nWb9GrdVXJCoCazDAbg==}
+
+ /@types/node/8.10.66:
+ resolution: {integrity: sha512-tktOkFUA4kXx2hhhrB8bIFb5TbwzS4uOhKEmwiD+NoiL0qtP2OQ9mFldbgD4dV1djrlBYP6eBuQZiWjuHUpqFw==}
+ dev: true
+
+ /@types/normalize-package-data/2.4.1:
+ resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==}
+ dev: true
+
+ /@types/prop-types/15.7.5:
+ resolution: {integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==}
+ dev: true
+
+ /@types/qs/6.9.7:
+ resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==}
+ dev: true
+
+ /@types/react-dom/18.0.6:
+ resolution: {integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==}
+ dependencies:
+ '@types/react': 18.0.20
+ dev: true
+
+ /@types/react-redux/7.1.24:
+ resolution: {integrity: sha512-7FkurKcS1k0FHZEtdbbgN8Oc6b+stGSfZYjQGicofJ0j4U0qIn/jaSvnP2pLwZKiai3/17xqqxkkrxTgN8UNbQ==}
+ dependencies:
+ '@types/hoist-non-react-statics': 3.3.1
+ '@types/react': 18.0.20
+ hoist-non-react-statics: 3.3.2
+ redux: 4.2.0
+ dev: true
+
+ /@types/react/18.0.20:
+ resolution: {integrity: sha512-MWul1teSPxujEHVwZl4a5HxQ9vVNsjTchVA+xRqv/VYGCuKGAU6UhfrTdF5aBefwD1BHUD8i/zq+O/vyCm/FrA==}
+ dependencies:
+ '@types/prop-types': 15.7.5
+ '@types/scheduler': 0.16.2
+ csstype: 3.1.1
+ dev: true
+
+ /@types/scheduler/0.16.2:
+ resolution: {integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==}
+ dev: true
+
+ /@types/sqlite3/3.1.8:
+ resolution: {integrity: sha512-sQMt/qnyUWnqiTcJXm5ZfNPIBeJ/DVvJDwxw+0tAxPJvadzfiP1QhryO1JOR6t1yfb8NpzQb/Rud06mob5laIA==}
+ dependencies:
+ '@types/node': 18.7.18
+
+ /@types/unist/2.0.6:
+ resolution: {integrity: sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==}
+ dev: true
+
+ /@vitejs/plugin-react/2.1.0_vite@3.1.1:
+ resolution: {integrity: sha512-am6rPyyU3LzUYne3Gd9oj9c4Rzbq5hQnuGXSMT6Gujq45Il/+bunwq3lrB7wghLkiF45ygMwft37vgJ/NE8IAA==}
+ engines: {node: ^14.18.0 || >=16.0.0}
+ peerDependencies:
+ vite: ^3.0.0
+ dependencies:
+ '@babel/core': 7.19.1
+ '@babel/plugin-transform-react-jsx': 7.19.0_@babel+core@7.19.1
+ '@babel/plugin-transform-react-jsx-development': 7.18.6_@babel+core@7.19.1
+ '@babel/plugin-transform-react-jsx-self': 7.18.6_@babel+core@7.19.1
+ '@babel/plugin-transform-react-jsx-source': 7.18.6_@babel+core@7.19.1
+ magic-string: 0.26.3
+ react-refresh: 0.14.0
+ vite: 3.1.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /@xstate/fsm/1.4.0:
+ resolution: {integrity: sha512-uTHDeu2xI5E1IFwf37JFQM31RrH7mY7877RqPBS4ZqSNUwoLDuct8AhBWaXGnVizBAYyimVwgCyGa9z/NiRhXA==}
+ dev: true
+
+ /abbrev/1.1.1:
+ resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
+
+ /abort-controller/3.0.0:
+ resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
+ engines: {node: '>=6.5'}
+ dependencies:
+ event-target-shim: 5.0.1
+
+ /abstract-logging/2.0.1:
+ resolution: {integrity: sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==}
+
+ /ace-builds/1.10.1:
+ resolution: {integrity: sha512-w8Xj6lZUtOYAquVYvdpZhb0GxXrZ+qpVfgj5LP2FwUbXE8fPrCmfu86FjwOiSphx/8PMbXXVldFLD2+RIXayyA==}
+ dev: true
+
+ /acorn-jsx/5.3.2_acorn@8.8.0:
+ resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
+ peerDependencies:
+ acorn: ^6.0.0 || ^7.0.0 || ^8.0.0
+ dependencies:
+ acorn: 8.8.0
+ dev: true
+
+ /acorn/8.8.0:
+ resolution: {integrity: sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==}
+ engines: {node: '>=0.4.0'}
+ hasBin: true
+ dev: true
+
+ /acquerello/1.0.9:
+ resolution: {integrity: sha512-LhLS55uO9ZhBw0ESaBSm8hjfhDJiw/7BkfGKOTOHTY2HBabkTcaCPxfrHkvlOaHP/8oe7K8F+sjmyLtQpvTZ4w==}
+ engines: {node: '>=14.15.0'}
+ dev: false
+
+ /agent-base/6.0.2:
+ resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==}
+ engines: {node: '>= 6.0.0'}
+ dependencies:
+ debug: 4.3.4
+ transitivePeerDependencies:
+ - supports-color
+
+ /agentkeepalive/4.2.1:
+ resolution: {integrity: sha512-Zn4cw2NEqd+9fiSVWMscnjyQ1a8Yfoc5oBajLeo5w+YBHgDUcEBY2hS4YpTz6iN5f/2zQiktcuM6tS8x1p9dpA==}
+ engines: {node: '>= 8.0.0'}
+ dependencies:
+ debug: 4.3.4
+ depd: 1.1.2
+ humanize-ms: 1.2.1
+ transitivePeerDependencies:
+ - supports-color
+ optional: true
+
+ /aggregate-error/3.1.0:
+ resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==}
+ engines: {node: '>=8'}
+ dependencies:
+ clean-stack: 2.2.0
+ indent-string: 4.0.0
+
+ /ajv-formats/2.1.1_ajv@8.11.0:
+ resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
+ peerDependencies:
+ ajv: ^8.0.0
+ peerDependenciesMeta:
+ ajv:
+ optional: true
+ dependencies:
+ ajv: 8.11.0
+
+ /ajv/6.12.6:
+ resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ fast-json-stable-stringify: 2.1.0
+ json-schema-traverse: 0.4.1
+ uri-js: 4.4.1
+
+ /ajv/8.11.0:
+ resolution: {integrity: sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ json-schema-traverse: 1.0.0
+ require-from-string: 2.0.2
+ uri-js: 4.4.1
+
+ /ansi-escapes/4.3.2:
+ resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ type-fest: 0.21.3
+ dev: true
+
+ /ansi-regex/5.0.1:
+ resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
+ engines: {node: '>=8'}
+
+ /ansi-regex/6.0.1:
+ resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /ansi-styles/3.2.1:
+ resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==}
+ engines: {node: '>=4'}
+ dependencies:
+ color-convert: 1.9.3
+
+ /ansi-styles/4.3.0:
+ resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
+ engines: {node: '>=8'}
+ dependencies:
+ color-convert: 2.0.1
+
+ /anymatch/3.1.2:
+ resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ normalize-path: 3.0.0
+ picomatch: 2.3.1
+ dev: true
+
+ /append-transform/2.0.0:
+ resolution: {integrity: sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==}
+ engines: {node: '>=8'}
+ dependencies:
+ default-require-extensions: 3.0.0
+ dev: true
+
+ /aproba/2.0.0:
+ resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==}
+
+ /archy/1.0.0:
+ resolution: {integrity: sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==}
+
+ /are-we-there-yet/2.0.0:
+ resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==}
+ engines: {node: '>=10'}
+ dependencies:
+ delegates: 1.0.0
+ readable-stream: 3.6.0
+
+ /are-we-there-yet/3.0.1:
+ resolution: {integrity: sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==}
+ engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
+ dependencies:
+ delegates: 1.0.0
+ readable-stream: 3.6.0
+ optional: true
+
+ /argparse/1.0.10:
+ resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==}
+ dependencies:
+ sprintf-js: 1.0.3
+
+ /argparse/2.0.1:
+ resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
+
+ /array-includes/3.1.5:
+ resolution: {integrity: sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ get-intrinsic: 1.1.3
+ is-string: 1.0.7
+ dev: true
+
+ /array-union/2.1.0:
+ resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /array.prototype.flat/1.3.0:
+ resolution: {integrity: sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ es-shim-unscopables: 1.0.0
+ dev: true
+
+ /array.prototype.flatmap/1.3.0:
+ resolution: {integrity: sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ es-shim-unscopables: 1.0.0
+ dev: true
+
+ /arrify/1.0.1:
+ resolution: {integrity: sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /asap/1.0.0:
+ resolution: {integrity: sha512-Ej9qjcXY+8Tuy1cNqiwNMwFRXOy9UwgTeMA8LxreodygIPV48lx8PU1ecFxb5ZeU1DpMKxiq6vGLTxcitWZPbA==}
+
+ /asap/2.0.6:
+ resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==}
+ dev: true
+
+ /asn1.js/5.4.1:
+ resolution: {integrity: sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==}
+ dependencies:
+ bn.js: 4.12.0
+ inherits: 2.0.4
+ minimalistic-assert: 1.0.1
+ safer-buffer: 2.1.2
+
+ /assert-never/1.2.1:
+ resolution: {integrity: sha512-TaTivMB6pYI1kXwrFlEhLeGfOqoDNdTxjCdwRfFFkEA30Eu+k48W34nlok2EYWJfFFzqaEmichdNM7th6M5HNw==}
+
+ /assertion-error/1.1.0:
+ resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==}
+ dev: true
+
+ /astral-regex/2.0.0:
+ resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==}
+ engines: {node: '>=8'}
+ dev: false
+
+ /async-hook-domain/2.0.4:
+ resolution: {integrity: sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /asynckit/0.4.0:
+ resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
+ dev: true
+
+ /atomic-sleep/1.0.0:
+ resolution: {integrity: sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==}
+ engines: {node: '>=8.0.0'}
+
+ /autolinker/3.16.2:
+ resolution: {integrity: sha512-JiYl7j2Z19F9NdTmirENSUUIIL/9MytEWtmzhfmsKPCp9E+G35Y0UNCMoM9tFigxT59qSc8Ml2dlZXOCVTYwuA==}
+ dependencies:
+ tslib: 2.4.0
+ dev: true
+
+ /avvio/8.2.0:
+ resolution: {integrity: sha512-bbCQdg7bpEv6kGH41RO/3B2/GMMmJSo2iBK+X8AWN9mujtfUipMDfIjsgHCfpnKqoGEQrrmCDKSa5OQ19+fDmg==}
+ dependencies:
+ archy: 1.0.0
+ debug: 4.3.4
+ fastq: 1.13.0
+ transitivePeerDependencies:
+ - supports-color
+
+ /balanced-match/1.0.2:
+ resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
+
+ /base64-js/1.5.1:
+ resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==}
+ dev: true
+
+ /basic-auth/2.0.1:
+ resolution: {integrity: sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ safe-buffer: 5.1.2
+ dev: false
+
+ /binary-extensions/2.2.0:
+ resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /bind-obj-methods/3.0.0:
+ resolution: {integrity: sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /bindings/1.5.0:
+ resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==}
+ dependencies:
+ file-uri-to-path: 1.0.0
+ dev: false
+ optional: true
+
+ /bintrees/1.0.2:
+ resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==}
+ dev: false
+
+ /bn.js/4.12.0:
+ resolution: {integrity: sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==}
+
+ /brace-expansion/1.1.11:
+ resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
+ dependencies:
+ balanced-match: 1.0.2
+ concat-map: 0.0.1
+
+ /brace-expansion/2.0.1:
+ resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
+ dependencies:
+ balanced-match: 1.0.2
+
+ /braces/3.0.2:
+ resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==}
+ engines: {node: '>=8'}
+ dependencies:
+ fill-range: 7.0.1
+ dev: true
+
+ /brorand/1.1.0:
+ resolution: {integrity: sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w==}
+ dev: false
+
+ /browserslist/4.21.4:
+ resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==}
+ engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
+ hasBin: true
+ dependencies:
+ caniuse-lite: 1.0.30001402
+ electron-to-chromium: 1.4.253
+ node-releases: 2.0.6
+ update-browserslist-db: 1.0.9_browserslist@4.21.4
+ dev: true
+
+ /buffer-from/1.1.2:
+ resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
+ dev: true
+
+ /buffer-writer/2.0.0:
+ resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==}
+ engines: {node: '>=4'}
+
+ /builtins/5.0.1:
+ resolution: {integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==}
+ dependencies:
+ semver: 7.3.7
+ dev: true
+
+ /bulma/0.9.4:
+ resolution: {integrity: sha512-86FlT5+1GrsgKbPLRRY7cGDg8fsJiP/jzTqXXVqiUZZ2aZT8uemEOHlU1CDU+TxklPEZ11HZNNWclRBBecP4CQ==}
+ dev: true
+
+ /c8/7.12.0:
+ resolution: {integrity: sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==}
+ engines: {node: '>=10.12.0'}
+ hasBin: true
+ dependencies:
+ '@bcoe/v8-coverage': 0.2.3
+ '@istanbuljs/schema': 0.1.3
+ find-up: 5.0.0
+ foreground-child: 2.0.0
+ istanbul-lib-coverage: 3.2.0
+ istanbul-lib-report: 3.0.0
+ istanbul-reports: 3.1.5
+ rimraf: 3.0.2
+ test-exclude: 6.0.0
+ v8-to-istanbul: 9.0.1
+ yargs: 16.2.0
+ yargs-parser: 20.2.9
+
+ /cacache/15.3.0:
+ resolution: {integrity: sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==}
+ engines: {node: '>= 10'}
+ dependencies:
+ '@npmcli/fs': 1.1.1
+ '@npmcli/move-file': 1.1.2
+ chownr: 2.0.0
+ fs-minipass: 2.1.0
+ glob: 7.2.3
+ infer-owner: 1.0.4
+ lru-cache: 6.0.0
+ minipass: 3.3.4
+ minipass-collect: 1.0.2
+ minipass-flush: 1.0.5
+ minipass-pipeline: 1.2.4
+ mkdirp: 1.0.4
+ p-map: 4.0.0
+ promise-inflight: 1.0.1
+ rimraf: 3.0.2
+ ssri: 8.0.1
+ tar: 6.1.11
+ unique-filename: 1.1.1
+ transitivePeerDependencies:
+ - bluebird
+ optional: true
+
+ /caching-transform/4.0.0:
+ resolution: {integrity: sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==}
+ engines: {node: '>=8'}
+ dependencies:
+ hasha: 5.2.2
+ make-dir: 3.1.0
+ package-hash: 4.0.0
+ write-file-atomic: 3.0.3
+ dev: true
+
+ /call-bind/1.0.2:
+ resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==}
+ dependencies:
+ function-bind: 1.1.1
+ get-intrinsic: 1.1.3
+ dev: true
+
+ /caller-callsite/2.0.0:
+ resolution: {integrity: sha512-JuG3qI4QOftFsZyOn1qq87fq5grLIyk1JYd5lJmdA+fG7aQ9pA/i3JIJGcO3q0MrRcHlOt1U+ZeHW8Dq9axALQ==}
+ engines: {node: '>=4'}
+ dependencies:
+ callsites: 2.0.0
+
+ /caller-path/2.0.0:
+ resolution: {integrity: sha512-MCL3sf6nCSXOwCTzvPKhN18TU7AHTvdtam8DAogxcrJ8Rjfbbg7Lgng64H9Iy+vUV6VGFClN/TyxBkAebLRR4A==}
+ engines: {node: '>=4'}
+ dependencies:
+ caller-callsite: 2.0.0
+
+ /callsites/2.0.0:
+ resolution: {integrity: sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==}
+ engines: {node: '>=4'}
+
+ /callsites/3.1.0:
+ resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
+ engines: {node: '>=6'}
+
+ /camelcase-keys/6.2.2:
+ resolution: {integrity: sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==}
+ engines: {node: '>=8'}
+ dependencies:
+ camelcase: 5.3.1
+ map-obj: 4.3.0
+ quick-lru: 4.0.1
+ dev: true
+
+ /camelcase/5.3.1:
+ resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /camelcase/6.3.0:
+ resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==}
+ engines: {node: '>=10'}
+ dev: false
+
+ /caniuse-lite/1.0.30001402:
+ resolution: {integrity: sha512-Mx4MlhXO5NwuvXGgVb+hg65HZ+bhUYsz8QtDGDo2QmaJS2GBX47Xfi2koL86lc8K+l+htXeTEB/Aeqvezoo6Ew==}
+ dev: true
+
+ /caseless/0.12.0:
+ resolution: {integrity: sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==}
+ dev: true
+
+ /chai/4.3.6:
+ resolution: {integrity: sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q==}
+ engines: {node: '>=4'}
+ dependencies:
+ assertion-error: 1.1.0
+ check-error: 1.0.2
+ deep-eql: 3.0.1
+ get-func-name: 2.0.0
+ loupe: 2.3.4
+ pathval: 1.1.1
+ type-detect: 4.0.8
+ dev: true
+
+ /chalk/2.4.2:
+ resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==}
+ engines: {node: '>=4'}
+ dependencies:
+ ansi-styles: 3.2.1
+ escape-string-regexp: 1.0.5
+ supports-color: 5.5.0
+
+ /chalk/4.1.2:
+ resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
+ engines: {node: '>=10'}
+ dependencies:
+ ansi-styles: 4.3.0
+ supports-color: 7.2.0
+ dev: true
+
+ /character-entities-legacy/1.1.4:
+ resolution: {integrity: sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==}
+ dev: true
+
+ /character-entities/1.2.4:
+ resolution: {integrity: sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==}
+ dev: true
+
+ /character-reference-invalid/1.1.4:
+ resolution: {integrity: sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==}
+ dev: true
+
+ /check-error/1.0.2:
+ resolution: {integrity: sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA==}
+ dev: true
+
+ /chokidar/3.5.3:
+ resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==}
+ engines: {node: '>= 8.10.0'}
+ dependencies:
+ anymatch: 3.1.2
+ braces: 3.0.2
+ glob-parent: 5.1.2
+ is-binary-path: 2.1.0
+ is-glob: 4.0.3
+ normalize-path: 3.0.0
+ readdirp: 3.6.0
+ optionalDependencies:
+ fsevents: 2.3.2
+ dev: true
+
+ /chownr/2.0.0:
+ resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==}
+ engines: {node: '>=10'}
+
+ /classnames/2.3.2:
+ resolution: {integrity: sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==}
+ dev: true
+
+ /clean-stack/2.2.0:
+ resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==}
+ engines: {node: '>=6'}
+
+ /cliui/6.0.0:
+ resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==}
+ dependencies:
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ wrap-ansi: 6.2.0
+ dev: true
+
+ /cliui/7.0.4:
+ resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==}
+ dependencies:
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ wrap-ansi: 7.0.0
+
+ /close-with-grace/1.1.0:
+ resolution: {integrity: sha512-6cCp71Y5tKw1o9sGVBOa9OwY4vJ+YoLpFcWiTt9YCBhYlcQi0z68EiiN9mJ6/401Za6TZ5YOZg012IHHZt15lw==}
+ dev: false
+
+ /codemirror-graphql/2.0.0_4qrm3am6isowdlonkgtqtogfxe:
+ resolution: {integrity: sha512-4trIaV9LYo/yRMu3s5qf7ASrKQjcCGrVfqOwaFsdjjcG8koh93gCzZ+csMhe3n6A7lMLWEpPdFWBIepKGV7qQg==}
+ peerDependencies:
+ '@codemirror/language': ^0.20.0
+ codemirror: ^5.65.3
+ graphql: ^15.5.0 || ^16.0.0
+ dependencies:
+ '@codemirror/language': 0.20.2
+ codemirror: 5.65.8
+ graphql: 16.6.0
+ graphql-language-service: 5.1.0_graphql@16.6.0
+ dev: true
+
+ /codemirror/5.65.8:
+ resolution: {integrity: sha512-TNGkSkkoAsmZSf6W6g35LMVQJBHKasc2CKwhr/fTxSYun7cn6J+CbtyNjV/MYlFVkNTsqZoviegyCZimWhoMMA==}
+ dev: true
+
+ /color-convert/1.9.3:
+ resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==}
+ dependencies:
+ color-name: 1.1.3
+
+ /color-convert/2.0.1:
+ resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
+ engines: {node: '>=7.0.0'}
+ dependencies:
+ color-name: 1.1.4
+
+ /color-name/1.1.3:
+ resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==}
+
+ /color-name/1.1.4:
+ resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
+
+ /color-support/1.1.3:
+ resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==}
+ hasBin: true
+
+ /colorette/2.0.19:
+ resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==}
+ dev: false
+
+ /combined-stream/1.0.8:
+ resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ delayed-stream: 1.0.0
+ dev: true
+
+ /comma-separated-tokens/1.0.8:
+ resolution: {integrity: sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==}
+ dev: true
+
+ /commander/9.4.0:
+ resolution: {integrity: sha512-sRPT+umqkz90UA8M1yqYfnHlZA7fF6nSphDtxeywPZ49ysjxDQybzk13CL+mXekDRG92skbcqCLVovuCusNmFw==}
+ engines: {node: ^12.20.0 || >=14}
+ dev: false
+
+ /commist/3.1.2:
+ resolution: {integrity: sha512-cyQMbb5GiGHD/MiTqqI4V5fySqVKS8WUtN0iwxZ3sd6GYZ+8gzo6aYiT2EhBuwzVoBjsAu8F9mnlqlnXGqADZg==}
+ dev: false
+
+ /commondir/1.0.1:
+ resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==}
+ dev: true
+
+ /concat-map/0.0.1:
+ resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
+
+ /concat-stream/1.6.2:
+ resolution: {integrity: sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==}
+ engines: {'0': node >= 0.8}
+ dependencies:
+ buffer-from: 1.1.2
+ inherits: 2.0.4
+ readable-stream: 2.3.7
+ typedarray: 0.0.6
+ dev: true
+
+ /concat-stream/2.0.0:
+ resolution: {integrity: sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==}
+ engines: {'0': node >= 6.0}
+ dependencies:
+ buffer-from: 1.1.2
+ inherits: 2.0.4
+ readable-stream: 3.6.0
+ typedarray: 0.0.6
+ dev: true
+
+ /console-control-strings/1.1.0:
+ resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==}
+
+ /content-disposition/0.5.4:
+ resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ safe-buffer: 5.2.1
+
+ /convert-source-map/1.8.0:
+ resolution: {integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==}
+ dependencies:
+ safe-buffer: 5.1.2
+
+ /cookie/0.5.0:
+ resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==}
+ engines: {node: '>= 0.6'}
+
+ /copy-to-clipboard/3.3.2:
+ resolution: {integrity: sha512-Vme1Z6RUDzrb6xAI7EZlVZ5uvOk2F//GaxKUxajDqm9LhOVM1inxNAD2vy+UZDYsd0uyA9s7b3/FVZPSxqrCfg==}
+ dependencies:
+ toggle-selection: 1.0.6
+ dev: true
+
+ /core-js-pure/3.25.1:
+ resolution: {integrity: sha512-7Fr74bliUDdeJCBMxkkIuQ4xfxn/SwrVg+HkJUAoNEXVqYLv55l6Af0dJ5Lq2YBUW9yKqSkLXaS5SYPK6MGa/A==}
+ requiresBuild: true
+ dev: true
+
+ /core-util-is/1.0.3:
+ resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
+ dev: true
+
+ /cosmiconfig/5.2.1:
+ resolution: {integrity: sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==}
+ engines: {node: '>=4'}
+ dependencies:
+ import-fresh: 2.0.0
+ is-directory: 0.3.1
+ js-yaml: 3.14.1
+ parse-json: 4.0.0
+
+ /cross-fetch/3.1.5:
+ resolution: {integrity: sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==}
+ dependencies:
+ node-fetch: 2.6.7
+ transitivePeerDependencies:
+ - encoding
+
+ /cross-spawn/7.0.3:
+ resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==}
+ engines: {node: '>= 8'}
+ dependencies:
+ path-key: 3.1.1
+ shebang-command: 2.0.0
+ which: 2.0.2
+
+ /css.escape/1.5.1:
+ resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==}
+ dev: true
+
+ /csstype/3.1.1:
+ resolution: {integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==}
+ dev: true
+
+ /cuid/2.1.8:
+ resolution: {integrity: sha512-xiEMER6E7TlTPnDxrM4eRiC6TRgjNX9xzEZ5U/Se2YJKr7Mq4pJn/2XEHjl3STcSh96GmkHPcBXLES8M29wyyg==}
+
+ /dateformat/4.6.3:
+ resolution: {integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==}
+ dev: false
+
+ /debug/2.6.9:
+ resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+ dependencies:
+ ms: 2.0.0
+
+ /debug/3.2.7:
+ resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+ dependencies:
+ ms: 2.1.3
+ dev: true
+
+ /debug/4.3.4:
+ resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
+ engines: {node: '>=6.0'}
+ peerDependencies:
+ supports-color: '*'
+ peerDependenciesMeta:
+ supports-color:
+ optional: true
+ dependencies:
+ ms: 2.1.2
+
+ /decamelize-keys/1.1.0:
+ resolution: {integrity: sha512-ocLWuYzRPoS9bfiSdDd3cxvrzovVMZnRDVEzAs+hWIVXGDbHxWMECij2OBuyB/An0FFW/nLuq6Kv1i/YC5Qfzg==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ decamelize: 1.2.0
+ map-obj: 1.0.1
+ dev: true
+
+ /decamelize/1.2.0:
+ resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /deep-eql/3.0.1:
+ resolution: {integrity: sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==}
+ engines: {node: '>=0.12'}
+ dependencies:
+ type-detect: 4.0.8
+ dev: true
+
+ /deep-extend/0.6.0:
+ resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==}
+ engines: {node: '>=4.0.0'}
+ dev: true
+
+ /deep-is/0.1.4:
+ resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
+ dev: true
+
+ /deepmerge/4.2.2:
+ resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==}
+ engines: {node: '>=0.10.0'}
+
+ /default-require-extensions/3.0.0:
+ resolution: {integrity: sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==}
+ engines: {node: '>=8'}
+ dependencies:
+ strip-bom: 4.0.0
+ dev: true
+
+ /define-lazy-prop/2.0.0:
+ resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==}
+ engines: {node: '>=8'}
+ dev: false
+
+ /define-properties/1.1.4:
+ resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-property-descriptors: 1.0.0
+ object-keys: 1.1.1
+ dev: true
+
+ /delayed-stream/1.0.0:
+ resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
+ engines: {node: '>=0.4.0'}
+ dev: true
+
+ /delegates/1.0.0:
+ resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==}
+
+ /denque/2.1.0:
+ resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==}
+ engines: {node: '>=0.10'}
+ dev: false
+
+ /depd/1.1.2:
+ resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
+ engines: {node: '>= 0.6'}
+ optional: true
+
+ /depd/2.0.0:
+ resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
+ engines: {node: '>= 0.8'}
+
+ /desm/1.3.0:
+ resolution: {integrity: sha512-RvlHN2gfYA0BpCfjpWzCdQeR6p5U+84f5DzcirLow86UA/OcpwuOqXRC4Oz0bG9rzcJPVtMT6ZgNtjp4qh+uqA==}
+ dev: false
+
+ /destroy/1.2.0:
+ resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
+ engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
+
+ /detect-libc/2.0.1:
+ resolution: {integrity: sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==}
+ engines: {node: '>=8'}
+
+ /detect-node-es/1.1.0:
+ resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==}
+ dev: true
+
+ /diff/4.0.2:
+ resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==}
+ engines: {node: '>=0.3.1'}
+ dev: true
+
+ /dir-glob/3.0.1:
+ resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==}
+ engines: {node: '>=8'}
+ dependencies:
+ path-type: 4.0.0
+ dev: true
+
+ /doctrine/2.1.0:
+ resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ esutils: 2.0.3
+ dev: true
+
+ /doctrine/3.0.0:
+ resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ esutils: 2.0.3
+ dev: true
+
+ /dompurify/2.3.3:
+ resolution: {integrity: sha512-dqnqRkPMAjOZE0FogZ+ceJNM2dZ3V/yNOuFB7+39qpO93hHhfRpHw3heYQC7DPK9FqbQTfBKUJhiSfz4MvXYwg==}
+ dev: true
+
+ /dotenv-expand/8.0.3:
+ resolution: {integrity: sha512-SErOMvge0ZUyWd5B0NXMQlDkN+8r+HhVUsxgOO7IoPDOdDRD2JjExpN6y3KnFR66jsJMwSn1pqIivhU5rcJiNg==}
+ engines: {node: '>=12'}
+ dev: false
+
+ /dotenv/16.0.2:
+ resolution: {integrity: sha512-JvpYKUmzQhYoIFgK2MOnF3bciIZoItIIoryihy0rIA+H4Jy0FmgyKYAHCTN98P5ybGSJcIFbh6QKeJdtZd1qhA==}
+ engines: {node: '>=12'}
+ dev: false
+
+ /drange/1.1.1:
+ resolution: {integrity: sha512-pYxfDYpued//QpnLIm4Avk7rsNtAtQkUES2cwAYSvD/wd2pKD71gN2Ebj3e7klzXwjocvE8c5vx/1fxwpqmSxA==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /dtsgenerator/3.16.1:
+ resolution: {integrity: sha512-nol1a7xCSWZxu5jAYvLDvgEY03W4fjnGy7BJRd4Jc713D/Q4FI42N1NCFVz65yyb479HaDGsHo2kb3Aev92/sA==}
+ engines: {node: '>= 14.0'}
+ hasBin: true
+ dependencies:
+ commander: 9.4.0
+ cross-fetch: 3.1.5
+ debug: 4.3.4
+ glob: 8.0.3
+ https-proxy-agent: 5.0.1
+ js-yaml: 4.1.0
+ tslib: 2.4.0
+ typescript: 4.8.3
+ transitivePeerDependencies:
+ - encoding
+ - supports-color
+ dev: false
+
+ /ecdsa-sig-formatter/1.0.11:
+ resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==}
+ dependencies:
+ safe-buffer: 5.2.1
+
+ /ee-first/1.1.1:
+ resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}
+
+ /electron-to-chromium/1.4.253:
+ resolution: {integrity: sha512-1pezJ2E1UyBTGbA7fUlHdPSXQw1k+82VhTFLG5G0AUqLGvsZqFzleOblceqegZzxYX4kC7hGEEdzIQI9RZ1Cuw==}
+ dev: true
+
+ /elliptic/6.5.4:
+ resolution: {integrity: sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==}
+ dependencies:
+ bn.js: 4.12.0
+ brorand: 1.1.0
+ hash.js: 1.1.7
+ hmac-drbg: 1.0.1
+ inherits: 2.0.4
+ minimalistic-assert: 1.0.1
+ minimalistic-crypto-utils: 1.0.1
+ dev: false
+
+ /emoji-regex/8.0.0:
+ resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
+
+ /encodeurl/1.0.2:
+ resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==}
+ engines: {node: '>= 0.8'}
+
+ /encoding/0.1.13:
+ resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==}
+ requiresBuild: true
+ dependencies:
+ iconv-lite: 0.6.3
+ optional: true
+
+ /end-of-stream/1.4.4:
+ resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==}
+ dependencies:
+ once: 1.4.0
+ dev: false
+
+ /entities/2.1.0:
+ resolution: {integrity: sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==}
+ dev: true
+
+ /entities/2.2.0:
+ resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==}
+ dev: true
+
+ /env-paths/2.2.1:
+ resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
+ engines: {node: '>=6'}
+ optional: true
+
+ /env-schema/5.0.0:
+ resolution: {integrity: sha512-91u95Nlny+LmjF3Mk96j8k6k+GOXcFEdMUv3bWQjtM2l+KTAdW6qITiv8kHYO8vCaCScXpJTDyd1AFnCQTnYaQ==}
+ dependencies:
+ ajv: 8.11.0
+ dotenv: 16.0.2
+ dotenv-expand: 8.0.3
+ dev: false
+
+ /err-code/2.0.3:
+ resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==}
+ optional: true
+
+ /error-ex/1.3.2:
+ resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==}
+ dependencies:
+ is-arrayish: 0.2.1
+
+ /es-abstract/1.20.2:
+ resolution: {integrity: sha512-XxXQuVNrySBNlEkTYJoDNFe5+s2yIOpzq80sUHEdPdQr0S5nTLz4ZPPPswNIpKseDDUS5yghX1gfLIHQZ1iNuQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ es-to-primitive: 1.2.1
+ function-bind: 1.1.1
+ function.prototype.name: 1.1.5
+ get-intrinsic: 1.1.3
+ get-symbol-description: 1.0.0
+ has: 1.0.3
+ has-property-descriptors: 1.0.0
+ has-symbols: 1.0.3
+ internal-slot: 1.0.3
+ is-callable: 1.2.6
+ is-negative-zero: 2.0.2
+ is-regex: 1.1.4
+ is-shared-array-buffer: 1.0.2
+ is-string: 1.0.7
+ is-weakref: 1.0.2
+ object-inspect: 1.12.2
+ object-keys: 1.1.1
+ object.assign: 4.1.4
+ regexp.prototype.flags: 1.4.3
+ string.prototype.trimend: 1.0.5
+ string.prototype.trimstart: 1.0.5
+ unbox-primitive: 1.0.2
+ dev: true
+
+ /es-main/1.2.0:
+ resolution: {integrity: sha512-A4tCSY43O/mH4rHjG1n0mI4DhK2BmKDr8Lk8PXK/GBB6zxGFGmIW4bbkbTQ2Gi9iNamMZ9vbGrwjZOIeiM7vMw==}
+ dev: false
+
+ /es-shim-unscopables/1.0.0:
+ resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==}
+ dependencies:
+ has: 1.0.3
+ dev: true
+
+ /es-to-primitive/1.2.1:
+ resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ is-callable: 1.2.6
+ is-date-object: 1.0.5
+ is-symbol: 1.0.4
+ dev: true
+
+ /es6-error/4.1.1:
+ resolution: {integrity: sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==}
+ dev: true
+
+ /esbuild-android-64/0.15.7:
+ resolution: {integrity: sha512-p7rCvdsldhxQr3YHxptf1Jcd86dlhvc3EQmQJaZzzuAxefO9PvcI0GLOa5nCWem1AJ8iMRu9w0r5TG8pHmbi9w==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [android]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-android-arm64/0.15.7:
+ resolution: {integrity: sha512-L775l9ynJT7rVqRM5vo+9w5g2ysbOCfsdLV4CWanTZ1k/9Jb3IYlQ06VCI1edhcosTYJRECQFJa3eAvkx72eyQ==}
+ engines: {node: '>=12'}
+ cpu: [arm64]
+ os: [android]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-darwin-64/0.15.7:
+ resolution: {integrity: sha512-KGPt3r1c9ww009t2xLB6Vk0YyNOXh7hbjZ3EecHoVDxgtbUlYstMPDaReimKe6eOEfyY4hBEEeTvKwPsiH5WZg==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [darwin]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-darwin-arm64/0.15.7:
+ resolution: {integrity: sha512-kBIHvtVqbSGajN88lYMnR3aIleH3ABZLLFLxwL2stiuIGAjGlQW741NxVTpUHQXUmPzxi6POqc9npkXa8AcSZQ==}
+ engines: {node: '>=12'}
+ cpu: [arm64]
+ os: [darwin]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-freebsd-64/0.15.7:
+ resolution: {integrity: sha512-hESZB91qDLV5MEwNxzMxPfbjAhOmtfsr9Wnuci7pY6TtEh4UDuevmGmkUIjX/b+e/k4tcNBMf7SRQ2mdNuK/HQ==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [freebsd]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-freebsd-arm64/0.15.7:
+ resolution: {integrity: sha512-dLFR0ChH5t+b3J8w0fVKGvtwSLWCv7GYT2Y2jFGulF1L5HftQLzVGN+6pi1SivuiVSmTh28FwUhi9PwQicXI6Q==}
+ engines: {node: '>=12'}
+ cpu: [arm64]
+ os: [freebsd]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-32/0.15.7:
+ resolution: {integrity: sha512-v3gT/LsONGUZcjbt2swrMjwxo32NJzk+7sAgtxhGx1+ZmOFaTRXBAi1PPfgpeo/J//Un2jIKm/I+qqeo4caJvg==}
+ engines: {node: '>=12'}
+ cpu: [ia32]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-64/0.15.7:
+ resolution: {integrity: sha512-LxXEfLAKwOVmm1yecpMmWERBshl+Kv5YJ/1KnyAr6HRHFW8cxOEsEfisD3sVl/RvHyW//lhYUVSuy9jGEfIRAQ==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-arm/0.15.7:
+ resolution: {integrity: sha512-JKgAHtMR5f75wJTeuNQbyznZZa+pjiUHV7sRZp42UNdyXC6TiUYMW/8z8yIBAr2Fpad8hM1royZKQisqPABPvQ==}
+ engines: {node: '>=12'}
+ cpu: [arm]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-arm64/0.15.7:
+ resolution: {integrity: sha512-P3cfhudpzWDkglutWgXcT2S7Ft7o2e3YDMrP1n0z2dlbUZghUkKCyaWw0zhp4KxEEzt/E7lmrtRu/pGWnwb9vw==}
+ engines: {node: '>=12'}
+ cpu: [arm64]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-mips64le/0.15.7:
+ resolution: {integrity: sha512-T7XKuxl0VpeFLCJXub6U+iybiqh0kM/bWOTb4qcPyDDwNVhLUiPcGdG2/0S7F93czUZOKP57YiLV8YQewgLHKw==}
+ engines: {node: '>=12'}
+ cpu: [mips64el]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-ppc64le/0.15.7:
+ resolution: {integrity: sha512-6mGuC19WpFN7NYbecMIJjeQgvDb5aMuvyk0PDYBJrqAEMkTwg3Z98kEKuCm6THHRnrgsdr7bp4SruSAxEM4eJw==}
+ engines: {node: '>=12'}
+ cpu: [ppc64]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-riscv64/0.15.7:
+ resolution: {integrity: sha512-uUJsezbswAYo/X7OU/P+PuL/EI9WzxsEQXDekfwpQ23uGiooxqoLFAPmXPcRAt941vjlY9jtITEEikWMBr+F/g==}
+ engines: {node: '>=12'}
+ cpu: [riscv64]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-linux-s390x/0.15.7:
+ resolution: {integrity: sha512-+tO+xOyTNMc34rXlSxK7aCwJgvQyffqEM5MMdNDEeMU3ss0S6wKvbBOQfgd5jRPblfwJ6b+bKiz0g5nABpY0QQ==}
+ engines: {node: '>=12'}
+ cpu: [s390x]
+ os: [linux]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-netbsd-64/0.15.7:
+ resolution: {integrity: sha512-yVc4Wz+Pu3cP5hzm5kIygNPrjar/v5WCSoRmIjCPWfBVJkZNb5brEGKUlf+0Y759D48BCWa0WHrWXaNy0DULTQ==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [netbsd]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-openbsd-64/0.15.7:
+ resolution: {integrity: sha512-GsimbwC4FSR4lN3wf8XmTQ+r8/0YSQo21rWDL0XFFhLHKlzEA4SsT1Tl8bPYu00IU6UWSJ+b3fG/8SB69rcuEQ==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [openbsd]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-sunos-64/0.15.7:
+ resolution: {integrity: sha512-8CDI1aL/ts0mDGbWzjEOGKXnU7p3rDzggHSBtVryQzkSOsjCHRVe0iFYUuhczlxU1R3LN/E7HgUO4NXzGGP/Ag==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [sunos]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-windows-32/0.15.7:
+ resolution: {integrity: sha512-cOnKXUEPS8EGCzRSFa1x6NQjGhGsFlVgjhqGEbLTPsA7x4RRYiy2RKoArNUU4iR2vHmzqS5Gr84MEumO/wxYKA==}
+ engines: {node: '>=12'}
+ cpu: [ia32]
+ os: [win32]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-windows-64/0.15.7:
+ resolution: {integrity: sha512-7MI08Ec2sTIDv+zH6StNBKO+2hGUYIT42GmFyW6MBBWWtJhTcQLinKS6ldIN1d52MXIbiJ6nXyCJ+LpL4jBm3Q==}
+ engines: {node: '>=12'}
+ cpu: [x64]
+ os: [win32]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild-windows-arm64/0.15.7:
+ resolution: {integrity: sha512-R06nmqBlWjKHddhRJYlqDd3Fabx9LFdKcjoOy08YLimwmsswlFBJV4rXzZCxz/b7ZJXvrZgj8DDv1ewE9+StMw==}
+ engines: {node: '>=12'}
+ cpu: [arm64]
+ os: [win32]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /esbuild/0.15.7:
+ resolution: {integrity: sha512-7V8tzllIbAQV1M4QoE52ImKu8hT/NLGlGXkiDsbEU5PS6K8Mn09ZnYoS+dcmHxOS9CRsV4IRAMdT3I67IyUNXw==}
+ engines: {node: '>=12'}
+ hasBin: true
+ requiresBuild: true
+ optionalDependencies:
+ '@esbuild/linux-loong64': 0.15.7
+ esbuild-android-64: 0.15.7
+ esbuild-android-arm64: 0.15.7
+ esbuild-darwin-64: 0.15.7
+ esbuild-darwin-arm64: 0.15.7
+ esbuild-freebsd-64: 0.15.7
+ esbuild-freebsd-arm64: 0.15.7
+ esbuild-linux-32: 0.15.7
+ esbuild-linux-64: 0.15.7
+ esbuild-linux-arm: 0.15.7
+ esbuild-linux-arm64: 0.15.7
+ esbuild-linux-mips64le: 0.15.7
+ esbuild-linux-ppc64le: 0.15.7
+ esbuild-linux-riscv64: 0.15.7
+ esbuild-linux-s390x: 0.15.7
+ esbuild-netbsd-64: 0.15.7
+ esbuild-openbsd-64: 0.15.7
+ esbuild-sunos-64: 0.15.7
+ esbuild-windows-32: 0.15.7
+ esbuild-windows-64: 0.15.7
+ esbuild-windows-arm64: 0.15.7
+ dev: true
+
+ /escalade/3.1.1:
+ resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==}
+ engines: {node: '>=6'}
+
+ /escape-goat/4.0.0:
+ resolution: {integrity: sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==}
+ engines: {node: '>=12'}
+ dev: false
+
+ /escape-html/1.0.3:
+ resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==}
+
+ /escape-string-regexp/1.0.5:
+ resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==}
+ engines: {node: '>=0.8.0'}
+
+ /escape-string-regexp/2.0.0:
+ resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /escape-string-regexp/4.0.0:
+ resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /eslint-config-standard-jsx/11.0.0_g6sljrn72fr5r5n2js2jyjg7bi:
+ resolution: {integrity: sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==}
+ peerDependencies:
+ eslint: ^8.8.0
+ eslint-plugin-react: ^7.28.0
+ dependencies:
+ eslint: 8.23.1
+ eslint-plugin-react: 7.31.8_eslint@8.23.1
+ dev: true
+
+ /eslint-config-standard/17.0.0_4nulviyjkaspo7v2xlghuwxbf4:
+ resolution: {integrity: sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==}
+ peerDependencies:
+ eslint: ^8.0.1
+ eslint-plugin-import: ^2.25.2
+ eslint-plugin-n: ^15.0.0
+ eslint-plugin-promise: ^6.0.0
+ dependencies:
+ eslint: 8.23.1
+ eslint-plugin-import: 2.26.0_eslint@8.23.1
+ eslint-plugin-n: 15.2.5_eslint@8.23.1
+ eslint-plugin-promise: 6.0.1_eslint@8.23.1
+ dev: true
+
+ /eslint-formatter-pretty/4.1.0:
+ resolution: {integrity: sha512-IsUTtGxF1hrH6lMWiSl1WbGaiP01eT6kzywdY1U+zLc0MP+nwEnUiS9UI8IaOTUhTeQJLlCEWIbXINBH4YJbBQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@types/eslint': 7.29.0
+ ansi-escapes: 4.3.2
+ chalk: 4.1.2
+ eslint-rule-docs: 1.1.235
+ log-symbols: 4.1.0
+ plur: 4.0.0
+ string-width: 4.2.3
+ supports-hyperlinks: 2.3.0
+ dev: true
+
+ /eslint-import-resolver-node/0.3.6:
+ resolution: {integrity: sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==}
+ dependencies:
+ debug: 3.2.7
+ resolve: 1.22.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /eslint-module-utils/2.7.4_oxfrjumrtiktpkw7r2zaom7f74:
+ resolution: {integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==}
+ engines: {node: '>=4'}
+ peerDependencies:
+ '@typescript-eslint/parser': '*'
+ eslint: '*'
+ eslint-import-resolver-node: '*'
+ eslint-import-resolver-typescript: '*'
+ eslint-import-resolver-webpack: '*'
+ peerDependenciesMeta:
+ '@typescript-eslint/parser':
+ optional: true
+ eslint:
+ optional: true
+ eslint-import-resolver-node:
+ optional: true
+ eslint-import-resolver-typescript:
+ optional: true
+ eslint-import-resolver-webpack:
+ optional: true
+ dependencies:
+ debug: 3.2.7
+ eslint: 8.23.1
+ eslint-import-resolver-node: 0.3.6
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /eslint-plugin-es/4.1.0_eslint@8.23.1:
+ resolution: {integrity: sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==}
+ engines: {node: '>=8.10.0'}
+ peerDependencies:
+ eslint: '>=4.19.1'
+ dependencies:
+ eslint: 8.23.1
+ eslint-utils: 2.1.0
+ regexpp: 3.2.0
+ dev: true
+
+ /eslint-plugin-import/2.26.0_eslint@8.23.1:
+ resolution: {integrity: sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==}
+ engines: {node: '>=4'}
+ peerDependencies:
+ '@typescript-eslint/parser': '*'
+ eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8
+ peerDependenciesMeta:
+ '@typescript-eslint/parser':
+ optional: true
+ dependencies:
+ array-includes: 3.1.5
+ array.prototype.flat: 1.3.0
+ debug: 2.6.9
+ doctrine: 2.1.0
+ eslint: 8.23.1
+ eslint-import-resolver-node: 0.3.6
+ eslint-module-utils: 2.7.4_oxfrjumrtiktpkw7r2zaom7f74
+ has: 1.0.3
+ is-core-module: 2.10.0
+ is-glob: 4.0.3
+ minimatch: 3.1.2
+ object.values: 1.1.5
+ resolve: 1.22.1
+ tsconfig-paths: 3.14.1
+ transitivePeerDependencies:
+ - eslint-import-resolver-typescript
+ - eslint-import-resolver-webpack
+ - supports-color
+ dev: true
+
+ /eslint-plugin-n/15.2.5_eslint@8.23.1:
+ resolution: {integrity: sha512-8+BYsqiyZfpu6NXmdLOXVUfk8IocpCjpd8nMRRH0A9ulrcemhb2VI9RSJMEy5udx++A/YcVPD11zT8hpFq368g==}
+ engines: {node: '>=12.22.0'}
+ peerDependencies:
+ eslint: '>=7.0.0'
+ dependencies:
+ builtins: 5.0.1
+ eslint: 8.23.1
+ eslint-plugin-es: 4.1.0_eslint@8.23.1
+ eslint-utils: 3.0.0_eslint@8.23.1
+ ignore: 5.2.0
+ is-core-module: 2.10.0
+ minimatch: 3.1.2
+ resolve: 1.22.1
+ semver: 7.3.7
+ dev: true
+
+ /eslint-plugin-promise/6.0.1_eslint@8.23.1:
+ resolution: {integrity: sha512-uM4Tgo5u3UWQiroOyDEsYcVMOo7re3zmno0IZmB5auxoaQNIceAbXEkSt8RNrKtaYehARHG06pYK6K1JhtP0Zw==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ peerDependencies:
+ eslint: ^7.0.0 || ^8.0.0
+ dependencies:
+ eslint: 8.23.1
+ dev: true
+
+ /eslint-plugin-react/7.31.8_eslint@8.23.1:
+ resolution: {integrity: sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw==}
+ engines: {node: '>=4'}
+ peerDependencies:
+ eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8
+ dependencies:
+ array-includes: 3.1.5
+ array.prototype.flatmap: 1.3.0
+ doctrine: 2.1.0
+ eslint: 8.23.1
+ estraverse: 5.3.0
+ jsx-ast-utils: 3.3.3
+ minimatch: 3.1.2
+ object.entries: 1.1.5
+ object.fromentries: 2.0.5
+ object.hasown: 1.1.1
+ object.values: 1.1.5
+ prop-types: 15.8.1
+ resolve: 2.0.0-next.4
+ semver: 6.3.0
+ string.prototype.matchall: 4.0.7
+ dev: true
+
+ /eslint-rule-docs/1.1.235:
+ resolution: {integrity: sha512-+TQ+x4JdTnDoFEXXb3fDvfGOwnyNV7duH8fXWTPD1ieaBmB8omj7Gw/pMBBu4uI2uJCCU8APDaQJzWuXnTsH4A==}
+ dev: true
+
+ /eslint-scope/7.1.1:
+ resolution: {integrity: sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ esrecurse: 4.3.0
+ estraverse: 5.3.0
+ dev: true
+
+ /eslint-utils/2.1.0:
+ resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==}
+ engines: {node: '>=6'}
+ dependencies:
+ eslint-visitor-keys: 1.3.0
+ dev: true
+
+ /eslint-utils/3.0.0_eslint@8.23.1:
+ resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==}
+ engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0}
+ peerDependencies:
+ eslint: '>=5'
+ dependencies:
+ eslint: 8.23.1
+ eslint-visitor-keys: 2.1.0
+ dev: true
+
+ /eslint-visitor-keys/1.3.0:
+ resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /eslint-visitor-keys/2.1.0:
+ resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /eslint-visitor-keys/3.3.0:
+ resolution: {integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dev: true
+
+ /eslint/8.23.1:
+ resolution: {integrity: sha512-w7C1IXCc6fNqjpuYd0yPlcTKKmHlHHktRkzmBPZ+7cvNBQuiNjx0xaMTjAJGCafJhQkrFJooREv0CtrVzmHwqg==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ hasBin: true
+ dependencies:
+ '@eslint/eslintrc': 1.3.2
+ '@humanwhocodes/config-array': 0.10.4
+ '@humanwhocodes/gitignore-to-minimatch': 1.0.2
+ '@humanwhocodes/module-importer': 1.0.1
+ ajv: 6.12.6
+ chalk: 4.1.2
+ cross-spawn: 7.0.3
+ debug: 4.3.4
+ doctrine: 3.0.0
+ escape-string-regexp: 4.0.0
+ eslint-scope: 7.1.1
+ eslint-utils: 3.0.0_eslint@8.23.1
+ eslint-visitor-keys: 3.3.0
+ espree: 9.4.0
+ esquery: 1.4.0
+ esutils: 2.0.3
+ fast-deep-equal: 3.1.3
+ file-entry-cache: 6.0.1
+ find-up: 5.0.0
+ glob-parent: 6.0.2
+ globals: 13.17.0
+ globby: 11.1.0
+ grapheme-splitter: 1.0.4
+ ignore: 5.2.0
+ import-fresh: 3.3.0
+ imurmurhash: 0.1.4
+ is-glob: 4.0.3
+ js-sdsl: 4.1.4
+ js-yaml: 4.1.0
+ json-stable-stringify-without-jsonify: 1.0.1
+ levn: 0.4.1
+ lodash.merge: 4.6.2
+ minimatch: 3.1.2
+ natural-compare: 1.4.0
+ optionator: 0.9.1
+ regexpp: 3.2.0
+ strip-ansi: 6.0.1
+ strip-json-comments: 3.1.1
+ text-table: 0.2.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /espree/9.4.0:
+ resolution: {integrity: sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ acorn: 8.8.0
+ acorn-jsx: 5.3.2_acorn@8.8.0
+ eslint-visitor-keys: 3.3.0
+ dev: true
+
+ /esprima/4.0.1:
+ resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==}
+ engines: {node: '>=4'}
+ hasBin: true
+
+ /esquery/1.4.0:
+ resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==}
+ engines: {node: '>=0.10'}
+ dependencies:
+ estraverse: 5.3.0
+ dev: true
+
+ /esrecurse/4.3.0:
+ resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==}
+ engines: {node: '>=4.0'}
+ dependencies:
+ estraverse: 5.3.0
+ dev: true
+
+ /estraverse/5.3.0:
+ resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
+ engines: {node: '>=4.0'}
+ dev: true
+
+ /esutils/2.0.3:
+ resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /etag/1.8.1:
+ resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==}
+ engines: {node: '>= 0.6'}
+
+ /event-target-shim/5.0.1:
+ resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==}
+ engines: {node: '>=6'}
+
+ /events-to-array/1.1.2:
+ resolution: {integrity: sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==}
+ dev: true
+
+ /events.on/1.0.1:
+ resolution: {integrity: sha512-yT4htzImIQAf7mFV3heqTRNVwysZIgQjrribiCYQk152gcG6shz/WU/6xVGr0oDzkzcDPhMcCYy4lEKBiadSRA==}
+
+ /execa/6.1.0:
+ resolution: {integrity: sha512-QVWlX2e50heYJcCPG0iWtf8r0xjEYfz/OYLGDYH+IyjWezzPNxz63qNFOu0l4YftGWuizFVZHHs8PrLU5p2IDA==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ dependencies:
+ cross-spawn: 7.0.3
+ get-stream: 6.0.1
+ human-signals: 3.0.1
+ is-stream: 3.0.0
+ merge-stream: 2.0.0
+ npm-run-path: 5.1.0
+ onetime: 6.0.0
+ signal-exit: 3.0.7
+ strip-final-newline: 3.0.0
+
+ /fast-copy/2.1.3:
+ resolution: {integrity: sha512-LDzYKNTHhD+XOp8wGMuCkY4eTxFZOOycmpwLBiuF3r3OjOmZnURRD8t2dUAbmKuXGbo/MGggwbSjcBdp8QT0+g==}
+ dev: false
+
+ /fast-deep-equal/3.1.3:
+ resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
+
+ /fast-glob/3.2.12:
+ resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==}
+ engines: {node: '>=8.6.0'}
+ dependencies:
+ '@nodelib/fs.stat': 2.0.5
+ '@nodelib/fs.walk': 1.2.8
+ glob-parent: 5.1.2
+ merge2: 1.4.1
+ micromatch: 4.0.5
+ dev: true
+
+ /fast-json-patch/3.1.1:
+ resolution: {integrity: sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ==}
+ dev: true
+
+ /fast-json-stable-stringify/2.1.0:
+ resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==}
+
+ /fast-json-stringify/1.21.0:
+ resolution: {integrity: sha512-xY6gyjmHN3AK1Y15BCbMpeO9+dea5ePVsp3BouHCdukcx0hOHbXwFhRodhcI0NpZIgDChSeAKkHW9YjKvhwKBA==}
+ dependencies:
+ ajv: 6.12.6
+ deepmerge: 4.2.2
+ string-similarity: 4.0.4
+
+ /fast-json-stringify/5.3.0:
+ resolution: {integrity: sha512-jTlJV/VAaYMtYl5G41uEL8UQT7/fT5W6LuxKxIS/Lpm6bXxmR+reF3m3WgP/WwxXybH61O+xhWK7n9uAsY6zGA==}
+ dependencies:
+ '@fastify/deepmerge': 1.1.0
+ ajv: 8.11.0
+ ajv-formats: 2.1.1_ajv@8.11.0
+ fast-deep-equal: 3.1.3
+ fast-uri: 2.1.0
+ rfdc: 1.3.0
+
+ /fast-jwt/1.7.1:
+ resolution: {integrity: sha512-HdDR/k2d3qgUHDVwyhzYdHG/EK+IGhDGZFNidlSvCpVIhWMce8UYXUjx8hWEDhvOA032qoCtO1f/d/VcuzWhzA==}
+ engines: {node: ^18 || ^17 || ^16 || ^14 || ^12}
+ dependencies:
+ asn1.js: 5.4.1
+ ecdsa-sig-formatter: 1.0.11
+ mnemonist: 0.39.2
+
+ /fast-levenshtein/2.0.6:
+ resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==}
+ dev: true
+
+ /fast-redact/3.1.2:
+ resolution: {integrity: sha512-+0em+Iya9fKGfEQGcd62Yv6onjBmmhV1uh86XVfOU8VwAe6kaFdQCWI9s0/Nnugx5Vd9tdbZ7e6gE2tR9dzXdw==}
+ engines: {node: '>=6'}
+
+ /fast-safe-stringify/2.1.1:
+ resolution: {integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==}
+ dev: false
+
+ /fast-uri/2.1.0:
+ resolution: {integrity: sha512-qKRta6N7BWEFVlyonVY/V+BMLgFqktCUV0QjT259ekAIlbVrMaFnFLxJ4s/JPl4tou56S1BzPufI60bLe29fHA==}
+
+ /fastfall/1.5.1:
+ resolution: {integrity: sha512-KH6p+Z8AKPXnmA7+Iz2Lh8ARCMr+8WNPVludm1LGkZoD2MjY6LVnRMtTKhkdzI+jr0RzQWXKzKyBJm1zoHEL4Q==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ reusify: 1.0.4
+ dev: false
+
+ /fastify-isolate/0.7.0:
+ resolution: {integrity: sha512-Fih0xKwd60QudKxcpdX5blzl2j5tfraFg4atSoGPPiMDpfF4iG6KGvX96SjLnSkFt0AUtzYWXHr71WFmYbMemw==}
+ dependencies:
+ import-fresh: 3.3.0
+ optionalDependencies:
+ '@matteo.collina/isolates': 2.1.0
+ dev: false
+
+ /fastify-metrics/9.2.2_fastify@4.6.0:
+ resolution: {integrity: sha512-67PsMy33zCvZk9juIY79YXO/dSLnVhYAV4510uiHfoa/z1VwTIoSkbsEa17JB+VL+KNW3seaSCDacjxOO/OINg==}
+ peerDependencies:
+ fastify: ^4.0.0
+ dependencies:
+ fastify: 4.6.0
+ fastify-plugin: 4.2.1
+ prom-client: 14.1.0
+ dev: false
+
+ /fastify-plugin/3.0.1:
+ resolution: {integrity: sha512-qKcDXmuZadJqdTm6vlCqioEbyewF60b/0LOFCcYN1B6BIZGlYJumWWOYs70SFYLDAH4YqdE1cxH/RKMG7rFxgA==}
+ dev: false
+
+ /fastify-plugin/4.2.1:
+ resolution: {integrity: sha512-dlGKiwLzRBKkEf5J5ho0uAD/Jdv8GQVUbriB3tAX3ehRUXE4gTV3lRd5inEg9li1aLzb0EGj8y2K4/8g1TN06g==}
+
+ /fastify-print-routes/2.0.4:
+ resolution: {integrity: sha512-QbTUJ33GdS9u3uV90kAG9s/bqVcVprFYaunBADWUelQkhUGId15rjZAH7v3EW4G56TM6FLt6dlgmdsISongtcg==}
+ engines: {node: '>=14.15.0'}
+ dependencies:
+ acquerello: 1.0.9
+ fastify-plugin: 4.2.1
+ table: 6.8.0
+ dev: false
+
+ /fastify/4.6.0:
+ resolution: {integrity: sha512-EgWUvcJNvsql1R4g5/ce866BYk8SgJKjGh6AI0e9BR+NidP7hqX1ObiwHEVbkR15A9XwMtkKd3TE/tFZCjsqnA==}
+ dependencies:
+ '@fastify/ajv-compiler': 3.2.0
+ '@fastify/error': 3.0.0
+ '@fastify/fast-json-stringify-compiler': 4.1.0
+ abstract-logging: 2.0.1
+ avvio: 8.2.0
+ find-my-way: 7.1.0
+ light-my-request: 5.6.0
+ pino: 8.5.0
+ process-warning: 2.0.0
+ proxy-addr: 2.0.7
+ rfdc: 1.3.0
+ secure-json-parse: 2.5.0
+ semver: 7.3.7
+ tiny-lru: 8.0.2
+ transitivePeerDependencies:
+ - supports-color
+
+ /fastparallel/2.4.1:
+ resolution: {integrity: sha512-qUmhxPgNHmvRjZKBFUNI0oZuuH9OlSIOXmJ98lhKPxMZZ7zS/Fi0wRHOihDSz0R1YiIOjxzOY4bq65YTcdBi2Q==}
+ dependencies:
+ reusify: 1.0.4
+ xtend: 4.0.2
+
+ /fastq/1.13.0:
+ resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==}
+ dependencies:
+ reusify: 1.0.4
+
+ /fastseries/1.7.2:
+ resolution: {integrity: sha512-dTPFrPGS8SNSzAt7u/CbMKCJ3s01N04s4JFbORHcmyvVfVKmbhMD1VtRbh5enGHxkaQDqWyLefiKOGGmohGDDQ==}
+ dependencies:
+ reusify: 1.0.4
+ xtend: 4.0.2
+ dev: false
+
+ /fault/1.0.4:
+ resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==}
+ dependencies:
+ format: 0.2.2
+ dev: true
+
+ /file-entry-cache/6.0.1:
+ resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
+ engines: {node: ^10.12.0 || >=12.0.0}
+ dependencies:
+ flat-cache: 3.0.4
+ dev: true
+
+ /file-uri-to-path/1.0.0:
+ resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==}
+ dev: false
+ optional: true
+
+ /fill-range/7.0.1:
+ resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ to-regex-range: 5.0.1
+ dev: true
+
+ /find-cache-dir/3.3.2:
+ resolution: {integrity: sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==}
+ engines: {node: '>=8'}
+ dependencies:
+ commondir: 1.0.1
+ make-dir: 3.1.0
+ pkg-dir: 4.2.0
+ dev: true
+
+ /find-my-way/7.1.0:
+ resolution: {integrity: sha512-yQYjxgcZmo6SQ1bRPr9ToMcCyzBOZ3L1cbDYTNCHRq7XfQPLSDhbywUAsQCLWlL3uuOUAKvTxeJ2V2i+Z9YqGA==}
+ engines: {node: '>=14'}
+ dependencies:
+ fast-deep-equal: 3.1.3
+ safe-regex2: 2.0.0
+
+ /find-up/3.0.0:
+ resolution: {integrity: sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==}
+ engines: {node: '>=6'}
+ dependencies:
+ locate-path: 3.0.0
+ dev: true
+
+ /find-up/4.1.0:
+ resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==}
+ engines: {node: '>=8'}
+ dependencies:
+ locate-path: 5.0.0
+ path-exists: 4.0.0
+ dev: true
+
+ /find-up/5.0.0:
+ resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
+ engines: {node: '>=10'}
+ dependencies:
+ locate-path: 6.0.0
+ path-exists: 4.0.0
+
+ /findit/2.0.0:
+ resolution: {integrity: sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==}
+ dev: true
+
+ /flat-cache/3.0.4:
+ resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==}
+ engines: {node: ^10.12.0 || >=12.0.0}
+ dependencies:
+ flatted: 3.2.7
+ rimraf: 3.0.2
+ dev: true
+
+ /flatted/3.2.7:
+ resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==}
+ dev: true
+
+ /focus-lock/0.11.2:
+ resolution: {integrity: sha512-pZ2bO++NWLHhiKkgP1bEXHhR1/OjVcSvlCJ98aNJDFeb7H5OOQaO+SKOZle6041O9rv2tmbrO4JzClAvDUHf0g==}
+ engines: {node: '>=10'}
+ dependencies:
+ tslib: 2.4.0
+ dev: true
+
+ /foreground-child/2.0.0:
+ resolution: {integrity: sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==}
+ engines: {node: '>=8.0.0'}
+ dependencies:
+ cross-spawn: 7.0.3
+ signal-exit: 3.0.7
+
+ /form-data-encoder/1.7.2:
+ resolution: {integrity: sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==}
+ dev: true
+
+ /form-data/2.5.1:
+ resolution: {integrity: sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==}
+ engines: {node: '>= 0.12'}
+ dependencies:
+ asynckit: 0.4.0
+ combined-stream: 1.0.8
+ mime-types: 2.1.35
+ dev: true
+
+ /format/0.2.2:
+ resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==}
+ engines: {node: '>=0.4.x'}
+ dev: true
+
+ /formdata-node/4.4.1:
+ resolution: {integrity: sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==}
+ engines: {node: '>= 12.20'}
+ dependencies:
+ node-domexception: 1.0.0
+ web-streams-polyfill: 4.0.0-beta.3
+ dev: true
+
+ /forwarded/0.2.0:
+ resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
+ engines: {node: '>= 0.6'}
+
+ /fresh/0.5.2:
+ resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==}
+ engines: {node: '>= 0.6'}
+
+ /fromentries/1.3.2:
+ resolution: {integrity: sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==}
+ dev: true
+
+ /fs-exists-cached/1.0.0:
+ resolution: {integrity: sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==}
+ dev: true
+
+ /fs-minipass/2.1.0:
+ resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.4
+
+ /fs.realpath/1.0.0:
+ resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
+
+ /fsevents/2.3.2:
+ resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==}
+ engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
+ os: [darwin]
+ requiresBuild: true
+ dev: true
+ optional: true
+
+ /function-bind/1.1.1:
+ resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==}
+ dev: true
+
+ /function-loop/2.0.1:
+ resolution: {integrity: sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==}
+ dev: true
+
+ /function.prototype.name/1.1.5:
+ resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ functions-have-names: 1.2.3
+ dev: true
+
+ /functions-have-names/1.2.3:
+ resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==}
+ dev: true
+
+ /funtypes/4.2.0:
+ resolution: {integrity: sha512-DvOtjiKvkeuXGV0O8LQh9quUP3bSOTEQPGv537Sao8kDq2rDbg48UsSJ7wlBLPzR2Mn0pV7cyAiq5pYG1oUyCQ==}
+
+ /gauge/3.0.2:
+ resolution: {integrity: sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ aproba: 2.0.0
+ color-support: 1.1.3
+ console-control-strings: 1.1.0
+ has-unicode: 2.0.1
+ object-assign: 4.1.1
+ signal-exit: 3.0.7
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ wide-align: 1.1.5
+
+ /gauge/4.0.4:
+ resolution: {integrity: sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==}
+ engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
+ dependencies:
+ aproba: 2.0.0
+ color-support: 1.1.3
+ console-control-strings: 1.1.0
+ has-unicode: 2.0.1
+ signal-exit: 3.0.7
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ wide-align: 1.1.5
+ optional: true
+
+ /generate-function/2.3.1:
+ resolution: {integrity: sha512-eeB5GfMNeevm/GRYq20ShmsaGcmI81kIX2K9XQx5miC8KdHaC6Jm0qQ8ZNeGOi7wYB8OsdxKs+Y2oVuTFuVwKQ==}
+ dependencies:
+ is-property: 1.0.2
+
+ /gensync/1.0.0-beta.2:
+ resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
+ engines: {node: '>=6.9.0'}
+ dev: true
+
+ /get-caller-file/2.0.5:
+ resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
+ engines: {node: 6.* || 8.* || >= 10.*}
+
+ /get-func-name/2.0.0:
+ resolution: {integrity: sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig==}
+ dev: true
+
+ /get-intrinsic/1.1.3:
+ resolution: {integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==}
+ dependencies:
+ function-bind: 1.1.1
+ has: 1.0.3
+ has-symbols: 1.0.3
+ dev: true
+
+ /get-jwks/8.0.0:
+ resolution: {integrity: sha512-KNA0tPT505uCPeNzwBzdF2s0pa9a4EeZJGRsVLIfAu8C8BIwxvM0iAq9vD1CswejRjP04ZIIpqzcZ73BmGu5cA==}
+ engines: {node: '>=14'}
+ dependencies:
+ jwk-to-pem: 2.0.5
+ lru-cache: 7.14.0
+ node-fetch: 2.6.7
+ transitivePeerDependencies:
+ - encoding
+ dev: false
+
+ /get-nonce/1.0.1:
+ resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /get-package-type/0.1.0:
+ resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==}
+ engines: {node: '>=8.0.0'}
+ dev: true
+
+ /get-port/3.2.0:
+ resolution: {integrity: sha512-x5UJKlgeUiNT8nyo/AcnwLnZuZNcSjSw0kogRB+Whd1fjjFq4B1hySFxSFWWSn4mIBzg3sRNUDFYc4g5gjPoLg==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /get-stdin/8.0.0:
+ resolution: {integrity: sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /get-stream/6.0.1:
+ resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==}
+ engines: {node: '>=10'}
+
+ /get-symbol-description/1.0.0:
+ resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ get-intrinsic: 1.1.3
+ dev: true
+
+ /glob-parent/5.1.2:
+ resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
+ engines: {node: '>= 6'}
+ dependencies:
+ is-glob: 4.0.3
+ dev: true
+
+ /glob-parent/6.0.2:
+ resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
+ engines: {node: '>=10.13.0'}
+ dependencies:
+ is-glob: 4.0.3
+ dev: true
+
+ /glob/7.2.3:
+ resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==}
+ dependencies:
+ fs.realpath: 1.0.0
+ inflight: 1.0.6
+ inherits: 2.0.4
+ minimatch: 3.1.2
+ once: 1.4.0
+ path-is-absolute: 1.0.1
+
+ /glob/8.0.3:
+ resolution: {integrity: sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ==}
+ engines: {node: '>=12'}
+ dependencies:
+ fs.realpath: 1.0.0
+ inflight: 1.0.6
+ inherits: 2.0.4
+ minimatch: 5.1.0
+ once: 1.4.0
+
+ /globals/11.12.0:
+ resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /globals/13.17.0:
+ resolution: {integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==}
+ engines: {node: '>=8'}
+ dependencies:
+ type-fest: 0.20.2
+ dev: true
+
+ /globby/11.1.0:
+ resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==}
+ engines: {node: '>=10'}
+ dependencies:
+ array-union: 2.1.0
+ dir-glob: 3.0.1
+ fast-glob: 3.2.12
+ ignore: 5.2.0
+ merge2: 1.4.1
+ slash: 3.0.0
+ dev: true
+
+ /goober/2.1.11_csstype@3.1.1:
+ resolution: {integrity: sha512-5SS2lmxbhqH0u9ABEWq7WPU69a4i2pYcHeCxqaNq6Cw3mnrF0ghWNM4tEGid4dKy8XNIAUbuThuozDHHKJVh3A==}
+ peerDependencies:
+ csstype: ^3.0.10
+ dependencies:
+ csstype: 3.1.1
+ dev: true
+
+ /graceful-fs/4.2.10:
+ resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==}
+
+ /grapheme-splitter/1.0.4:
+ resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==}
+ dev: true
+
+ /graphiql/2.0.7_xst6jk3wj5nubsgv7xxqfuksc4:
+ resolution: {integrity: sha512-vHZeCS+KKbMQAWJ0CDdnCPmVOG0d48BeoZJjBlm2H5WlCpNjVMm0WVpfL7dG3aP4k+eF+800sMpUx3VVFt7LYw==}
+ peerDependencies:
+ graphql: ^15.5.0 || ^16.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
+ dependencies:
+ '@graphiql/react': 0.13.1_xst6jk3wj5nubsgv7xxqfuksc4
+ '@graphiql/toolkit': 0.8.0_graphql@16.6.0
+ entities: 2.2.0
+ graphql: 16.6.0
+ graphql-language-service: 5.1.0_graphql@16.6.0
+ markdown-it: 12.3.2
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ transitivePeerDependencies:
+ - '@codemirror/language'
+ - '@types/node'
+ - '@types/react'
+ - graphql-ws
+ - react-is
+ dev: true
+
+ /graphql-jit/0.7.4_graphql@16.6.0:
+ resolution: {integrity: sha512-kWyHmsQtKMD6xcKDgf4dgPLyIZhviqA6IWGdnA0ElL9wgrIOTxf3eI4c0/U3tnoAU3t09zliVCfDkfIptzYjIA==}
+ peerDependencies:
+ graphql: '>=15'
+ dependencies:
+ '@graphql-typed-document-node/core': 3.1.1_graphql@16.6.0
+ fast-json-stringify: 1.21.0
+ generate-function: 2.3.1
+ graphql: 16.6.0
+ json-schema: 0.4.0
+ lodash.memoize: 4.1.2
+ lodash.merge: 4.6.2
+ lodash.mergewith: 4.6.2
+
+ /graphql-language-service/5.1.0_graphql@16.6.0:
+ resolution: {integrity: sha512-APffigZ/l2me6soek+Yq5Us3HBwmfw4vns4QoqsTePXkK3knVO8rn0uAC6PmTyglb1pmFFPbYaRIzW4wmcnnGQ==}
+ hasBin: true
+ peerDependencies:
+ graphql: ^15.5.0 || ^16.0.0
+ dependencies:
+ graphql: 16.6.0
+ nullthrows: 1.1.1
+ vscode-languageserver-types: 3.17.2
+ dev: true
+
+ /graphql-scalars/1.18.0_graphql@16.6.0:
+ resolution: {integrity: sha512-XrMwSim4xJ5n1UdT3YMJh9uT3oB/th5jR5bIMJvYxmgq/rGDkfXNtCRSL/+dLMHxGM0thYPfIZDua1+aQlKBMA==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0
+ dependencies:
+ graphql: 16.6.0
+ tslib: 2.4.0
+ dev: false
+
+ /graphql/16.6.0:
+ resolution: {integrity: sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw==}
+ engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0}
+
+ /happy-dom/6.0.4:
+ resolution: {integrity: sha512-b+ID23Ms0BY08UNLymsOMG7EI2jSlwEt4cbJs938GZfeNAg+fqgkSO3TokQMgSOFoHznpjWmpVjBUL5boJ9PWw==}
+ dependencies:
+ css.escape: 1.5.1
+ he: 1.2.0
+ node-fetch: 2.6.7
+ sync-request: 6.1.0
+ webidl-conversions: 7.0.0
+ whatwg-encoding: 2.0.0
+ whatwg-mimetype: 3.0.0
+ transitivePeerDependencies:
+ - encoding
+ dev: true
+
+ /hard-rejection/2.1.0:
+ resolution: {integrity: sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /has-bigints/1.0.2:
+ resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==}
+ dev: true
+
+ /has-flag/3.0.0:
+ resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==}
+ engines: {node: '>=4'}
+
+ /has-flag/4.0.0:
+ resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
+ engines: {node: '>=8'}
+
+ /has-property-descriptors/1.0.0:
+ resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==}
+ dependencies:
+ get-intrinsic: 1.1.3
+ dev: true
+
+ /has-symbols/1.0.3:
+ resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /has-tostringtag/1.0.0:
+ resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-symbols: 1.0.3
+ dev: true
+
+ /has-unicode/2.0.1:
+ resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==}
+
+ /has/1.0.3:
+ resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==}
+ engines: {node: '>= 0.4.0'}
+ dependencies:
+ function-bind: 1.1.1
+ dev: true
+
+ /hash.js/1.1.7:
+ resolution: {integrity: sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==}
+ dependencies:
+ inherits: 2.0.4
+ minimalistic-assert: 1.0.1
+ dev: false
+
+ /hasha/5.2.2:
+ resolution: {integrity: sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ is-stream: 2.0.1
+ type-fest: 0.8.1
+ dev: true
+
+ /hast-util-parse-selector/2.2.5:
+ resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==}
+ dev: true
+
+ /hastscript/6.0.0:
+ resolution: {integrity: sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==}
+ dependencies:
+ '@types/hast': 2.3.4
+ comma-separated-tokens: 1.0.8
+ hast-util-parse-selector: 2.2.5
+ property-information: 5.6.0
+ space-separated-tokens: 1.1.5
+ dev: true
+
+ /he/1.2.0:
+ resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==}
+ hasBin: true
+ dev: true
+
+ /help-me/4.1.0:
+ resolution: {integrity: sha512-5HMrkOks2j8Fpu2j5nTLhrBhT7VwHwELpqnSnx802ckofys5MO2SkLpgSz3dgNFHV7IYFX2igm5CM75SmuYidw==}
+ dependencies:
+ glob: 8.0.3
+ readable-stream: 3.6.0
+ dev: false
+
+ /highlight.js/10.7.3:
+ resolution: {integrity: sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==}
+ dev: true
+
+ /history/5.3.0:
+ resolution: {integrity: sha512-ZqaKwjjrAYUYfLG+htGaIIZ4nioX2L70ZUMIFysS3xvBsSG4x/n1V6TXV3N8ZYNuFGlDirFg32T7B6WOUPDYcQ==}
+ dependencies:
+ '@babel/runtime': 7.19.0
+ dev: true
+
+ /hmac-drbg/1.0.1:
+ resolution: {integrity: sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg==}
+ dependencies:
+ hash.js: 1.1.7
+ minimalistic-assert: 1.0.1
+ minimalistic-crypto-utils: 1.0.1
+ dev: false
+
+ /hoist-non-react-statics/3.3.2:
+ resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==}
+ dependencies:
+ react-is: 16.13.1
+ dev: true
+
+ /hosted-git-info/2.8.9:
+ resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==}
+ dev: true
+
+ /hosted-git-info/4.1.0:
+ resolution: {integrity: sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==}
+ engines: {node: '>=10'}
+ dependencies:
+ lru-cache: 6.0.0
+ dev: true
+
+ /html-escaper/2.0.2:
+ resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==}
+
+ /http-basic/8.1.3:
+ resolution: {integrity: sha512-/EcDMwJZh3mABI2NhGfHOGOeOZITqfkEO4p/xK+l3NpyncIHUQBoMvCSF/b5GqvKtySC2srL/GGG3+EtlqlmCw==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ caseless: 0.12.0
+ concat-stream: 1.6.2
+ http-response-object: 3.0.2
+ parse-cache-control: 1.0.1
+ dev: true
+
+ /http-cache-semantics/4.1.0:
+ resolution: {integrity: sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==}
+ optional: true
+
+ /http-errors/2.0.0:
+ resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ depd: 2.0.0
+ inherits: 2.0.4
+ setprototypeof: 1.2.0
+ statuses: 2.0.1
+ toidentifier: 1.0.1
+
+ /http-proxy-agent/4.0.1:
+ resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==}
+ engines: {node: '>= 6'}
+ dependencies:
+ '@tootallnate/once': 1.1.2
+ agent-base: 6.0.2
+ debug: 4.3.4
+ transitivePeerDependencies:
+ - supports-color
+ optional: true
+
+ /http-response-object/3.0.2:
+ resolution: {integrity: sha512-bqX0XTF6fnXSQcEJ2Iuyr75yVakyjIDCqroJQ/aHfSdlM743Cwqoi2nDYMzLGWUcuTWGWy8AAvOKXTfiv6q9RA==}
+ dependencies:
+ '@types/node': 10.17.60
+ dev: true
+
+ /https-proxy-agent/5.0.1:
+ resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==}
+ engines: {node: '>= 6'}
+ dependencies:
+ agent-base: 6.0.2
+ debug: 4.3.4
+ transitivePeerDependencies:
+ - supports-color
+
+ /human-signals/3.0.1:
+ resolution: {integrity: sha512-rQLskxnM/5OCldHo+wNXbpVgDn5A17CUoKX+7Sokwaknlq7CdSnphy0W39GU8dw59XiCXmFXDg4fRuckQRKewQ==}
+ engines: {node: '>=12.20.0'}
+
+ /humanize-ms/1.2.1:
+ resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==}
+ dependencies:
+ ms: 2.1.3
+ optional: true
+
+ /iconv-lite/0.6.3:
+ resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ safer-buffer: 2.1.2
+
+ /ieee754/1.2.1:
+ resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==}
+ dev: true
+
+ /ignore/5.2.0:
+ resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==}
+ engines: {node: '>= 4'}
+ dev: true
+
+ /immutable/3.8.2:
+ resolution: {integrity: sha512-15gZoQ38eYjEjxkorfbcgBKBL6R7T459OuK+CpcWt7O3KF4uPCx2tD0uFETlUDIyo+1789crbMhTvQBSR5yBMg==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /import-fresh/2.0.0:
+ resolution: {integrity: sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==}
+ engines: {node: '>=4'}
+ dependencies:
+ caller-path: 2.0.0
+ resolve-from: 3.0.0
+
+ /import-fresh/3.3.0:
+ resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==}
+ engines: {node: '>=6'}
+ dependencies:
+ parent-module: 1.0.1
+ resolve-from: 4.0.0
+
+ /imurmurhash/0.1.4:
+ resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
+ engines: {node: '>=0.8.19'}
+
+ /indent-string/4.0.0:
+ resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==}
+ engines: {node: '>=8'}
+
+ /infer-owner/1.0.4:
+ resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==}
+ optional: true
+
+ /inflected/2.1.0:
+ resolution: {integrity: sha512-hAEKNxvHf2Iq3H60oMBHkB4wl5jn3TPF3+fXek/sRwAB5gP9xWs4r7aweSF95f99HFoz69pnZTcu8f0SIHV18w==}
+ dev: false
+
+ /inflight/1.0.6:
+ resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==}
+ dependencies:
+ once: 1.4.0
+ wrappy: 1.0.2
+
+ /inherits/2.0.4:
+ resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
+
+ /internal-slot/1.0.3:
+ resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ get-intrinsic: 1.1.3
+ has: 1.0.3
+ side-channel: 1.0.4
+ dev: true
+
+ /invariant/2.2.4:
+ resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==}
+ dependencies:
+ loose-envify: 1.4.0
+ dev: true
+
+ /ip/2.0.0:
+ resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==}
+ optional: true
+
+ /ipaddr.js/1.9.1:
+ resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
+ engines: {node: '>= 0.10'}
+
+ /irregular-plurals/3.3.0:
+ resolution: {integrity: sha512-MVBLKUTangM3EfRPFROhmWQQKRDsrgI83J8GS3jXy+OwYqiR2/aoWndYQ5416jLE3uaGgLH7ncme3X9y09gZ3g==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /is-alphabetical/1.0.4:
+ resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==}
+ dev: true
+
+ /is-alphanumerical/1.0.4:
+ resolution: {integrity: sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==}
+ dependencies:
+ is-alphabetical: 1.0.4
+ is-decimal: 1.0.4
+ dev: true
+
+ /is-arrayish/0.2.1:
+ resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==}
+
+ /is-bigint/1.0.4:
+ resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==}
+ dependencies:
+ has-bigints: 1.0.2
+ dev: true
+
+ /is-binary-path/2.1.0:
+ resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
+ engines: {node: '>=8'}
+ dependencies:
+ binary-extensions: 2.2.0
+ dev: true
+
+ /is-boolean-object/1.1.2:
+ resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ has-tostringtag: 1.0.0
+ dev: true
+
+ /is-callable/1.2.6:
+ resolution: {integrity: sha512-krO72EO2NptOGAX2KYyqbP9vYMlNAXdB53rq6f8LXY6RY7JdSR/3BD6wLUlPHSAesmY9vstNrjvqGaCiRK/91Q==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /is-core-module/2.10.0:
+ resolution: {integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==}
+ dependencies:
+ has: 1.0.3
+ dev: true
+
+ /is-date-object/1.0.5:
+ resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-tostringtag: 1.0.0
+ dev: true
+
+ /is-decimal/1.0.4:
+ resolution: {integrity: sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==}
+ dev: true
+
+ /is-directory/0.3.1:
+ resolution: {integrity: sha512-yVChGzahRFvbkscn2MlwGismPO12i9+znNruC5gVEntG3qu0xQMzsGg/JFbrsqDOHtHFPci+V5aP5T9I+yeKqw==}
+ engines: {node: '>=0.10.0'}
+
+ /is-docker/2.2.1:
+ resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==}
+ engines: {node: '>=8'}
+ hasBin: true
+ dev: false
+
+ /is-dom/1.1.0:
+ resolution: {integrity: sha512-u82f6mvhYxRPKpw8V1N0W8ce1xXwOrQtgGcxl6UCL5zBmZu3is/18K0rR7uFCnMDuAsS/3W54mGL4vsaFUQlEQ==}
+ dependencies:
+ is-object: 1.0.2
+ is-window: 1.0.2
+ dev: true
+
+ /is-extglob/2.1.1:
+ resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-fullwidth-code-point/3.0.0:
+ resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
+ engines: {node: '>=8'}
+
+ /is-glob/4.0.3:
+ resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ is-extglob: 2.1.1
+ dev: true
+
+ /is-hexadecimal/1.0.4:
+ resolution: {integrity: sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==}
+ dev: true
+
+ /is-lambda/1.0.1:
+ resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==}
+ optional: true
+
+ /is-negative-zero/2.0.2:
+ resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /is-number-object/1.0.7:
+ resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-tostringtag: 1.0.0
+ dev: true
+
+ /is-number/7.0.0:
+ resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
+ engines: {node: '>=0.12.0'}
+ dev: true
+
+ /is-object/1.0.2:
+ resolution: {integrity: sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==}
+ dev: true
+
+ /is-plain-obj/1.1.0:
+ resolution: {integrity: sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-plain-object/2.0.4:
+ resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ isobject: 3.0.1
+ dev: true
+
+ /is-plain-object/5.0.0:
+ resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-primitive/3.0.1:
+ resolution: {integrity: sha512-GljRxhWvlCNRfZyORiH77FwdFwGcMO620o37EOYC0ORWdq+WYNVqW0w2Juzew4M+L81l6/QS3t5gkkihyRqv9w==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-promise/4.0.0:
+ resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==}
+
+ /is-property/1.0.2:
+ resolution: {integrity: sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==}
+
+ /is-regex/1.1.4:
+ resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ has-tostringtag: 1.0.0
+ dev: true
+
+ /is-shared-array-buffer/1.0.2:
+ resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==}
+ dependencies:
+ call-bind: 1.0.2
+ dev: true
+
+ /is-stream/2.0.1:
+ resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /is-stream/3.0.0:
+ resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+
+ /is-string/1.0.7:
+ resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-tostringtag: 1.0.0
+ dev: true
+
+ /is-symbol/1.0.4:
+ resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ has-symbols: 1.0.3
+ dev: true
+
+ /is-typedarray/1.0.0:
+ resolution: {integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==}
+ dev: true
+
+ /is-unicode-supported/0.1.0:
+ resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /is-weakref/1.0.2:
+ resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==}
+ dependencies:
+ call-bind: 1.0.2
+ dev: true
+
+ /is-window/1.0.2:
+ resolution: {integrity: sha512-uj00kdXyZb9t9RcAUAwMZAnkBUwdYGhYlt7djMXhfyhUCzwNba50tIiBKR7q0l7tdoBtFVw/3JmLY6fI3rmZmg==}
+ dev: true
+
+ /is-windows/1.0.2:
+ resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /is-wsl/2.2.0:
+ resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==}
+ engines: {node: '>=8'}
+ dependencies:
+ is-docker: 2.2.1
+ dev: false
+
+ /isarray/1.0.0:
+ resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
+ dev: true
+
+ /isexe/2.0.0:
+ resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
+
+ /isobject/3.0.1:
+ resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /istanbul-lib-coverage/3.2.0:
+ resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==}
+ engines: {node: '>=8'}
+
+ /istanbul-lib-hook/3.0.0:
+ resolution: {integrity: sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ append-transform: 2.0.0
+ dev: true
+
+ /istanbul-lib-instrument/4.0.3:
+ resolution: {integrity: sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ '@babel/core': 7.19.1
+ '@istanbuljs/schema': 0.1.3
+ istanbul-lib-coverage: 3.2.0
+ semver: 6.3.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /istanbul-lib-processinfo/2.0.3:
+ resolution: {integrity: sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==}
+ engines: {node: '>=8'}
+ dependencies:
+ archy: 1.0.0
+ cross-spawn: 7.0.3
+ istanbul-lib-coverage: 3.2.0
+ p-map: 3.0.0
+ rimraf: 3.0.2
+ uuid: 8.3.2
+ dev: true
+
+ /istanbul-lib-report/3.0.0:
+ resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==}
+ engines: {node: '>=8'}
+ dependencies:
+ istanbul-lib-coverage: 3.2.0
+ make-dir: 3.1.0
+ supports-color: 7.2.0
+
+ /istanbul-lib-source-maps/4.0.1:
+ resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==}
+ engines: {node: '>=10'}
+ dependencies:
+ debug: 4.3.4
+ istanbul-lib-coverage: 3.2.0
+ source-map: 0.6.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /istanbul-reports/3.1.5:
+ resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==}
+ engines: {node: '>=8'}
+ dependencies:
+ html-escaper: 2.0.2
+ istanbul-lib-report: 3.0.0
+
+ /jackspeak/1.4.1:
+ resolution: {integrity: sha512-npN8f+M4+IQ8xD3CcWi3U62VQwKlT3Tj4GxbdT/fYTmeogD9eBF9OFdpoFG/VPNoshRjPUijdkp/p2XrzUHaVg==}
+ engines: {node: '>=8'}
+ dependencies:
+ cliui: 7.0.4
+ dev: true
+
+ /javascript-natural-sort/0.7.1:
+ resolution: {integrity: sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==}
+ dev: true
+
+ /jmespath/0.16.0:
+ resolution: {integrity: sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==}
+ engines: {node: '>= 0.6.0'}
+ dev: true
+
+ /joycon/3.1.1:
+ resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==}
+ engines: {node: '>=10'}
+ dev: false
+
+ /js-file-download/0.4.12:
+ resolution: {integrity: sha512-rML+NkoD08p5Dllpjo0ffy4jRHeY6Zsapvr/W86N7E0yuzAO6qa5X9+xog6zQNlH102J7IXljNY2FtS6Lj3ucg==}
+ dev: true
+
+ /js-sdsl/4.1.4:
+ resolution: {integrity: sha512-Y2/yD55y5jteOAmY50JbUZYwk3CP3wnLPEZnlR1w9oKhITrBEtAxwuWKebFf8hMrPMgbYwFoWK/lH2sBkErELw==}
+ dev: true
+
+ /js-tokens/4.0.0:
+ resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
+
+ /js-yaml/3.14.1:
+ resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==}
+ hasBin: true
+ dependencies:
+ argparse: 1.0.10
+ esprima: 4.0.1
+
+ /js-yaml/4.1.0:
+ resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==}
+ hasBin: true
+ dependencies:
+ argparse: 2.0.1
+
+ /jsesc/2.5.2:
+ resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==}
+ engines: {node: '>=4'}
+ hasBin: true
+ dev: true
+
+ /json-format-highlight/1.0.4:
+ resolution: {integrity: sha512-RqenIjKr1I99XfXPAml9G7YlEZg/GnsH7emWyWJh2yuGXqHW8spN7qx6/ME+MoIBb35/fxrMC9Jauj6nvGe4Mg==}
+ dev: true
+
+ /json-parse-better-errors/1.0.2:
+ resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==}
+
+ /json-parse-even-better-errors/2.3.1:
+ resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
+ dev: true
+
+ /json-schema-resolver/1.3.0:
+ resolution: {integrity: sha512-EX7W1r8aZ/T3j8GbbBxPXi60bnsELfT90OiA1QrbGMvwzVSbyMNOAzvMFcFb8m7gKCXZLJpGe+cJOvWgoFl29A==}
+ engines: {node: '>=10'}
+ dependencies:
+ debug: 4.3.4
+ rfdc: 1.3.0
+ uri-js: 4.4.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: false
+
+ /json-schema-traverse/0.4.1:
+ resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==}
+
+ /json-schema-traverse/1.0.0:
+ resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
+
+ /json-schema/0.4.0:
+ resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==}
+
+ /json-source-map/0.6.1:
+ resolution: {integrity: sha512-1QoztHPsMQqhDq0hlXY5ZqcEdUzxQEIxgFkKl4WUp2pgShObl+9ovi4kRh2TfvAfxAoHOJ9vIMEqk3k4iex7tg==}
+ dev: true
+
+ /json-stable-stringify-without-jsonify/1.0.1:
+ resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==}
+ dev: true
+
+ /json5/1.0.1:
+ resolution: {integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==}
+ hasBin: true
+ dependencies:
+ minimist: 1.2.6
+ dev: true
+
+ /json5/2.2.1:
+ resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==}
+ engines: {node: '>=6'}
+ hasBin: true
+
+ /jsoneditor/9.9.0:
+ resolution: {integrity: sha512-NHJhyaqcc5U33ah6dEcd0S9b14Auocpe9nydvC9ui7Uq/vjEFnsd7ot6O9Jqwv53B7DmHFUWq5cT4qeWh4MEoA==}
+ dependencies:
+ ace-builds: 1.10.1
+ ajv: 6.12.6
+ javascript-natural-sort: 0.7.1
+ jmespath: 0.16.0
+ json-source-map: 0.6.1
+ jsonrepair: 2.2.1
+ mobius1-selectr: 2.4.13
+ picomodal: 3.0.0
+ vanilla-picker: 2.12.1
+ dev: true
+
+ /jsonrepair/2.2.1:
+ resolution: {integrity: sha512-o9Je8TceILo872uQC9fIBJm957j1Io7z8Ca1iWIqY6S5S65HGE9XN7XEEw7+tUviB9Vq4sygV89MVTxl+rhZyg==}
+ hasBin: true
+ dev: true
+
+ /jsx-ast-utils/3.3.3:
+ resolution: {integrity: sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==}
+ engines: {node: '>=4.0'}
+ dependencies:
+ array-includes: 3.1.5
+ object.assign: 4.1.4
+ dev: true
+
+ /jwk-to-pem/2.0.5:
+ resolution: {integrity: sha512-L90jwellhO8jRKYwbssU9ifaMVqajzj3fpRjDKcsDzrslU9syRbFqfkXtT4B89HYAap+xsxNcxgBSB09ig+a7A==}
+ dependencies:
+ asn1.js: 5.4.1
+ elliptic: 6.5.4
+ safe-buffer: 5.2.1
+ dev: false
+
+ /kind-of/6.0.3:
+ resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /levn/0.4.1:
+ resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ prelude-ls: 1.2.1
+ type-check: 0.4.0
+ dev: true
+
+ /libtap/1.4.0:
+ resolution: {integrity: sha512-STLFynswQ2A6W14JkabgGetBNk6INL1REgJ9UeNKw5llXroC2cGLgKTqavv0sl8OLVztLLipVKMcQ7yeUcqpmg==}
+ engines: {node: '>=10'}
+ dependencies:
+ async-hook-domain: 2.0.4
+ bind-obj-methods: 3.0.0
+ diff: 4.0.2
+ function-loop: 2.0.1
+ minipass: 3.3.4
+ own-or: 1.0.0
+ own-or-env: 1.0.2
+ signal-exit: 3.0.7
+ stack-utils: 2.0.5
+ tap-parser: 11.0.1
+ tap-yaml: 1.0.0
+ tcompare: 5.0.7
+ trivial-deferred: 1.0.1
+ dev: true
+
+ /light-my-request/5.6.0:
+ resolution: {integrity: sha512-xd9HY7p8t+j5bzP5Waqe4+0hluDnupzhLNu7Lk3l6wUbq3MFmTO0xykd0xwd7yhq7nMXfRPnDEurqYEkcJ3Q5g==}
+ dependencies:
+ cookie: 0.5.0
+ process-warning: 2.0.0
+ set-cookie-parser: 2.5.1
+
+ /lines-and-columns/1.2.4:
+ resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
+ dev: true
+
+ /linkify-it/3.0.3:
+ resolution: {integrity: sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==}
+ dependencies:
+ uc.micro: 1.0.6
+ dev: true
+
+ /load-json-file/5.3.0:
+ resolution: {integrity: sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==}
+ engines: {node: '>=6'}
+ dependencies:
+ graceful-fs: 4.2.10
+ parse-json: 4.0.0
+ pify: 4.0.1
+ strip-bom: 3.0.0
+ type-fest: 0.3.1
+ dev: true
+
+ /local-pkg/0.4.2:
+ resolution: {integrity: sha512-mlERgSPrbxU3BP4qBqAvvwlgW4MTg78iwJdGGnv7kibKjWcJksrG3t6LB5lXI93wXRDvG4NpUgJFmTG4T6rdrg==}
+ engines: {node: '>=14'}
+ dev: true
+
+ /locate-path/3.0.0:
+ resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==}
+ engines: {node: '>=6'}
+ dependencies:
+ p-locate: 3.0.0
+ path-exists: 3.0.0
+ dev: true
+
+ /locate-path/5.0.0:
+ resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==}
+ engines: {node: '>=8'}
+ dependencies:
+ p-locate: 4.1.0
+ dev: true
+
+ /locate-path/6.0.0:
+ resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
+ engines: {node: '>=10'}
+ dependencies:
+ p-locate: 5.0.0
+
+ /lodash.debounce/4.0.8:
+ resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==}
+ dev: true
+
+ /lodash.flattendeep/4.4.0:
+ resolution: {integrity: sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==}
+ dev: true
+
+ /lodash.memoize/4.1.2:
+ resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==}
+
+ /lodash.merge/4.6.2:
+ resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==}
+
+ /lodash.mergewith/4.6.2:
+ resolution: {integrity: sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==}
+
+ /lodash.truncate/4.4.2:
+ resolution: {integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==}
+ dev: false
+
+ /lodash/4.17.21:
+ resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==}
+ dev: true
+
+ /log-symbols/4.1.0:
+ resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==}
+ engines: {node: '>=10'}
+ dependencies:
+ chalk: 4.1.2
+ is-unicode-supported: 0.1.0
+ dev: true
+
+ /long/4.0.0:
+ resolution: {integrity: sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==}
+ dev: false
+
+ /loose-envify/1.4.0:
+ resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==}
+ hasBin: true
+ dependencies:
+ js-tokens: 4.0.0
+ dev: true
+
+ /loupe/2.3.4:
+ resolution: {integrity: sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==}
+ dependencies:
+ get-func-name: 2.0.0
+ dev: true
+
+ /lowlight/1.20.0:
+ resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==}
+ dependencies:
+ fault: 1.0.4
+ highlight.js: 10.7.3
+ dev: true
+
+ /lru-cache/4.1.5:
+ resolution: {integrity: sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==}
+ dependencies:
+ pseudomap: 1.0.2
+ yallist: 2.1.2
+ dev: false
+
+ /lru-cache/6.0.0:
+ resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
+ engines: {node: '>=10'}
+ dependencies:
+ yallist: 4.0.0
+
+ /lru-cache/7.14.0:
+ resolution: {integrity: sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ==}
+ engines: {node: '>=12'}
+ dev: false
+
+ /magic-string/0.26.3:
+ resolution: {integrity: sha512-u1Po0NDyFcwdg2nzHT88wSK0+Rih0N1M+Ph1Sp08k8yvFFU3KR72wryS7e1qMPJypt99WB7fIFVCA92mQrMjrg==}
+ engines: {node: '>=12'}
+ dependencies:
+ sourcemap-codec: 1.4.8
+ dev: true
+
+ /make-dir/3.1.0:
+ resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==}
+ engines: {node: '>=8'}
+ dependencies:
+ semver: 6.3.0
+
+ /make-fetch-happen/9.1.0:
+ resolution: {integrity: sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==}
+ engines: {node: '>= 10'}
+ dependencies:
+ agentkeepalive: 4.2.1
+ cacache: 15.3.0
+ http-cache-semantics: 4.1.0
+ http-proxy-agent: 4.0.1
+ https-proxy-agent: 5.0.1
+ is-lambda: 1.0.1
+ lru-cache: 6.0.0
+ minipass: 3.3.4
+ minipass-collect: 1.0.2
+ minipass-fetch: 1.4.1
+ minipass-flush: 1.0.5
+ minipass-pipeline: 1.2.4
+ negotiator: 0.6.3
+ promise-retry: 2.0.1
+ socks-proxy-agent: 6.2.1
+ ssri: 8.0.1
+ transitivePeerDependencies:
+ - bluebird
+ - supports-color
+ optional: true
+
+ /map-obj/1.0.1:
+ resolution: {integrity: sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /map-obj/4.3.0:
+ resolution: {integrity: sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /markdown-it/12.3.2:
+ resolution: {integrity: sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==}
+ hasBin: true
+ dependencies:
+ argparse: 2.0.1
+ entities: 2.1.0
+ linkify-it: 3.0.3
+ mdurl: 1.0.1
+ uc.micro: 1.0.6
+ dev: true
+
+ /mdurl/1.0.1:
+ resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==}
+ dev: true
+
+ /meow/9.0.0:
+ resolution: {integrity: sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ '@types/minimist': 1.2.2
+ camelcase-keys: 6.2.2
+ decamelize: 1.2.0
+ decamelize-keys: 1.1.0
+ hard-rejection: 2.1.0
+ minimist-options: 4.1.0
+ normalize-package-data: 3.0.3
+ read-pkg-up: 7.0.1
+ redent: 3.0.0
+ trim-newlines: 3.0.1
+ type-fest: 0.18.1
+ yargs-parser: 20.2.9
+ dev: true
+
+ /mercurius/11.0.0_graphql@16.6.0:
+ resolution: {integrity: sha512-7t++ALNt9agIrc2hLhGes4ZIEwQ9zAz8hUmmTQojLNhuGCLDU7NKzcxKsX7UClBJewaQCORqlepE+k11Dsq6lg==}
+ engines: {node: '>=14.19.3'}
+ peerDependencies:
+ graphql: ^16.0.0
+ dependencies:
+ '@fastify/error': 3.0.0
+ '@fastify/static': 6.5.0
+ '@fastify/websocket': 7.0.1
+ events.on: 1.0.1
+ fastify-plugin: 4.2.1
+ graphql: 16.6.0
+ graphql-jit: 0.7.4_graphql@16.6.0
+ mqemitter: 5.0.0
+ p-map: 4.0.0
+ readable-stream: 4.1.0
+ safe-stable-stringify: 2.3.1
+ secure-json-parse: 2.5.0
+ single-user-cache: 0.6.0
+ tiny-lru: 8.0.2
+ undici: 5.10.0
+ ws: 8.8.1
+ transitivePeerDependencies:
+ - bufferutil
+ - supports-color
+ - utf-8-validate
+
+ /merge-stream/2.0.0:
+ resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
+
+ /merge2/1.4.1:
+ resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
+ engines: {node: '>= 8'}
+ dev: true
+
+ /meros/1.2.0:
+ resolution: {integrity: sha512-3QRZIS707pZQnijHdhbttXRWwrHhZJ/gzolneoxKVz9N/xmsvY/7Ls8lpnI9gxbgxjcHsAVEW3mgwiZCo6kkJQ==}
+ engines: {node: '>=12'}
+ peerDependencies:
+ '@types/node': '>=12'
+ peerDependenciesMeta:
+ '@types/node':
+ optional: true
+ dev: true
+
+ /micromatch/4.0.5:
+ resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==}
+ engines: {node: '>=8.6'}
+ dependencies:
+ braces: 3.0.2
+ picomatch: 2.3.1
+ dev: true
+
+ /mime-db/1.52.0:
+ resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
+ engines: {node: '>= 0.6'}
+ dev: true
+
+ /mime-types/2.1.35:
+ resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
+ engines: {node: '>= 0.6'}
+ dependencies:
+ mime-db: 1.52.0
+ dev: true
+
+ /mime/1.6.0:
+ resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
+ engines: {node: '>=4'}
+ hasBin: true
+
+ /mimic-fn/4.0.0:
+ resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==}
+ engines: {node: '>=12'}
+
+ /min-indent/1.0.1:
+ resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /minimalistic-assert/1.0.1:
+ resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==}
+
+ /minimalistic-crypto-utils/1.0.1:
+ resolution: {integrity: sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==}
+ dev: false
+
+ /minimatch/3.1.2:
+ resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==}
+ dependencies:
+ brace-expansion: 1.1.11
+
+ /minimatch/5.1.0:
+ resolution: {integrity: sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==}
+ engines: {node: '>=10'}
+ dependencies:
+ brace-expansion: 2.0.1
+
+ /minimist-options/4.1.0:
+ resolution: {integrity: sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==}
+ engines: {node: '>= 6'}
+ dependencies:
+ arrify: 1.0.1
+ is-plain-obj: 1.1.0
+ kind-of: 6.0.3
+ dev: true
+
+ /minimist/1.2.6:
+ resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==}
+
+ /minipass-collect/1.0.2:
+ resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.4
+ optional: true
+
+ /minipass-fetch/1.4.1:
+ resolution: {integrity: sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==}
+ engines: {node: '>=8'}
+ dependencies:
+ minipass: 3.3.4
+ minipass-sized: 1.0.3
+ minizlib: 2.1.2
+ optionalDependencies:
+ encoding: 0.1.13
+ optional: true
+
+ /minipass-flush/1.0.5:
+ resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.4
+ optional: true
+
+ /minipass-pipeline/1.2.4:
+ resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==}
+ engines: {node: '>=8'}
+ dependencies:
+ minipass: 3.3.4
+ optional: true
+
+ /minipass-sized/1.0.3:
+ resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==}
+ engines: {node: '>=8'}
+ dependencies:
+ minipass: 3.3.4
+ optional: true
+
+ /minipass/3.3.4:
+ resolution: {integrity: sha512-I9WPbWHCGu8W+6k1ZiGpPu0GkoKBeorkfKNuAFBNS1HNFJvke82sxvI5bzcCNpWPorkOO5QQ+zomzzwRxejXiw==}
+ engines: {node: '>=8'}
+ dependencies:
+ yallist: 4.0.0
+
+ /minizlib/2.1.2:
+ resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.4
+ yallist: 4.0.0
+
+ /mkdirp/1.0.4:
+ resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==}
+ engines: {node: '>=10'}
+ hasBin: true
+
+ /mnemonist/0.39.2:
+ resolution: {integrity: sha512-n3ZCEosuMH03DVivZ9N0fcXPWiZrBLEdfSlEJ+S/mJxmk3zuo1ur0dj9URDczFyP1VS3wfiyKzqLLDXoPJ6rPA==}
+ dependencies:
+ obliterator: 2.0.4
+
+ /mobius1-selectr/2.4.13:
+ resolution: {integrity: sha512-Mk9qDrvU44UUL0EBhbAA1phfQZ7aMZPjwtL7wkpiBzGh8dETGqfsh50mWoX9EkjDlkONlErWXArHCKfoxVg0Bw==}
+ dev: true
+
+ /mqemitter/5.0.0:
+ resolution: {integrity: sha512-rqNRQhGgl0W/NV+Zrx0rpAUTZcSlAtivCVUmXBUPcFYt+AeDEpoJgy5eKlFWJP6xnatONL59WIFdV0W6niOMhw==}
+ engines: {node: '>=10'}
+ dependencies:
+ fastparallel: 2.4.1
+ qlobber: 7.0.1
+
+ /ms/2.0.0:
+ resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
+
+ /ms/2.1.2:
+ resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
+
+ /ms/2.1.3:
+ resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
+
+ /mysql2/2.3.3:
+ resolution: {integrity: sha512-wxJUev6LgMSgACDkb/InIFxDprRa6T95+VEoR+xPvtngtccNH2dGjEB/fVZ8yg1gWv1510c9CvXuJHi5zUm0ZA==}
+ engines: {node: '>= 8.0'}
+ dependencies:
+ denque: 2.1.0
+ generate-function: 2.3.1
+ iconv-lite: 0.6.3
+ long: 4.0.0
+ lru-cache: 6.0.0
+ named-placeholders: 1.1.2
+ seq-queue: 0.0.5
+ sqlstring: 2.3.3
+ dev: false
+
+ /named-placeholders/1.1.2:
+ resolution: {integrity: sha512-wiFWqxoLL3PGVReSZpjLVxyJ1bRqe+KKJVbr4hGs1KWfTZTQyezHFBbuKj9hsizHyGV2ne7EMjHdxEGAybD5SA==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ lru-cache: 4.1.5
+ dev: false
+
+ /nanoid/3.3.4:
+ resolution: {integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==}
+ engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
+ hasBin: true
+ dev: true
+
+ /natural-compare/1.4.0:
+ resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
+ dev: true
+
+ /negotiator/0.6.3:
+ resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
+ engines: {node: '>= 0.6'}
+ optional: true
+
+ /node-addon-api/4.3.0:
+ resolution: {integrity: sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==}
+
+ /node-domexception/1.0.0:
+ resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
+ engines: {node: '>=10.5.0'}
+ dev: true
+
+ /node-fetch/2.6.7:
+ resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==}
+ engines: {node: 4.x || >=6.0.0}
+ peerDependencies:
+ encoding: ^0.1.0
+ peerDependenciesMeta:
+ encoding:
+ optional: true
+ dependencies:
+ whatwg-url: 5.0.0
+
+ /node-gyp/8.4.1:
+ resolution: {integrity: sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==}
+ engines: {node: '>= 10.12.0'}
+ hasBin: true
+ requiresBuild: true
+ dependencies:
+ env-paths: 2.2.1
+ glob: 7.2.3
+ graceful-fs: 4.2.10
+ make-fetch-happen: 9.1.0
+ nopt: 5.0.0
+ npmlog: 6.0.2
+ rimraf: 3.0.2
+ semver: 7.3.7
+ tar: 6.1.11
+ which: 2.0.2
+ transitivePeerDependencies:
+ - bluebird
+ - supports-color
+ optional: true
+
+ /node-preload/0.2.1:
+ resolution: {integrity: sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ process-on-spawn: 1.0.0
+ dev: true
+
+ /node-releases/2.0.6:
+ resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==}
+ dev: true
+
+ /nopt/5.0.0:
+ resolution: {integrity: sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==}
+ engines: {node: '>=6'}
+ hasBin: true
+ dependencies:
+ abbrev: 1.1.1
+
+ /normalize-package-data/2.5.0:
+ resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==}
+ dependencies:
+ hosted-git-info: 2.8.9
+ resolve: 1.22.1
+ semver: 5.7.1
+ validate-npm-package-license: 3.0.4
+ dev: true
+
+ /normalize-package-data/3.0.3:
+ resolution: {integrity: sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==}
+ engines: {node: '>=10'}
+ dependencies:
+ hosted-git-info: 4.1.0
+ is-core-module: 2.10.0
+ semver: 7.3.7
+ validate-npm-package-license: 3.0.4
+ dev: true
+
+ /normalize-path/3.0.0:
+ resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /npm-run-path/5.1.0:
+ resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==}
+ engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
+ dependencies:
+ path-key: 4.0.0
+
+ /npmlog/5.0.1:
+ resolution: {integrity: sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==}
+ dependencies:
+ are-we-there-yet: 2.0.0
+ console-control-strings: 1.1.0
+ gauge: 3.0.2
+ set-blocking: 2.0.0
+
+ /npmlog/6.0.2:
+ resolution: {integrity: sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==}
+ engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0}
+ dependencies:
+ are-we-there-yet: 3.0.1
+ console-control-strings: 1.1.0
+ gauge: 4.0.4
+ set-blocking: 2.0.0
+ optional: true
+
+ /nullthrows/1.1.1:
+ resolution: {integrity: sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==}
+ dev: true
+
+ /nyc/15.1.0:
+ resolution: {integrity: sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==}
+ engines: {node: '>=8.9'}
+ hasBin: true
+ dependencies:
+ '@istanbuljs/load-nyc-config': 1.1.0
+ '@istanbuljs/schema': 0.1.3
+ caching-transform: 4.0.0
+ convert-source-map: 1.8.0
+ decamelize: 1.2.0
+ find-cache-dir: 3.3.2
+ find-up: 4.1.0
+ foreground-child: 2.0.0
+ get-package-type: 0.1.0
+ glob: 7.2.3
+ istanbul-lib-coverage: 3.2.0
+ istanbul-lib-hook: 3.0.0
+ istanbul-lib-instrument: 4.0.3
+ istanbul-lib-processinfo: 2.0.3
+ istanbul-lib-report: 3.0.0
+ istanbul-lib-source-maps: 4.0.1
+ istanbul-reports: 3.1.5
+ make-dir: 3.1.0
+ node-preload: 0.2.1
+ p-map: 3.0.0
+ process-on-spawn: 1.0.0
+ resolve-from: 5.0.0
+ rimraf: 3.0.2
+ signal-exit: 3.0.7
+ spawn-wrap: 2.0.0
+ test-exclude: 6.0.0
+ yargs: 15.4.1
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /object-assign/4.1.1:
+ resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==}
+ engines: {node: '>=0.10.0'}
+
+ /object-inspect/1.12.2:
+ resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==}
+ dev: true
+
+ /object-keys/1.1.1:
+ resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /object.assign/4.1.4:
+ resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ has-symbols: 1.0.3
+ object-keys: 1.1.1
+ dev: true
+
+ /object.entries/1.1.5:
+ resolution: {integrity: sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /object.fromentries/2.0.5:
+ resolution: {integrity: sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /object.hasown/1.1.1:
+ resolution: {integrity: sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==}
+ dependencies:
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /object.values/1.1.5:
+ resolution: {integrity: sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /obliterator/2.0.4:
+ resolution: {integrity: sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ==}
+
+ /on-exit-leak-free/2.1.0:
+ resolution: {integrity: sha512-VuCaZZAjReZ3vUwgOB8LxAosIurDiAW0s13rI1YwmaP++jvcxP77AWoQvenZebpCA2m8WC1/EosPYPMjnRAp/w==}
+
+ /on-finished/2.4.1:
+ resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
+ engines: {node: '>= 0.8'}
+ dependencies:
+ ee-first: 1.1.1
+
+ /once/1.4.0:
+ resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
+ dependencies:
+ wrappy: 1.0.2
+
+ /onetime/6.0.0:
+ resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==}
+ engines: {node: '>=12'}
+ dependencies:
+ mimic-fn: 4.0.0
+
+ /open/8.4.0:
+ resolution: {integrity: sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q==}
+ engines: {node: '>=12'}
+ dependencies:
+ define-lazy-prop: 2.0.0
+ is-docker: 2.2.1
+ is-wsl: 2.2.0
+ dev: false
+
+ /openapi-types/12.0.2:
+ resolution: {integrity: sha512-GuTo7FyZjOIWVhIhQSWJVaws6A82sWIGyQogxxYBYKZ0NBdyP2CYSIgOwFfSB+UVoPExk/YzFpyYitHS8KVZtA==}
+
+ /opener/1.5.2:
+ resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==}
+ hasBin: true
+ dev: true
+
+ /optionator/0.9.1:
+ resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ deep-is: 0.1.4
+ fast-levenshtein: 2.0.6
+ levn: 0.4.1
+ prelude-ls: 1.2.1
+ type-check: 0.4.0
+ word-wrap: 1.2.3
+ dev: true
+
+ /own-or-env/1.0.2:
+ resolution: {integrity: sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==}
+ dependencies:
+ own-or: 1.0.0
+ dev: true
+
+ /own-or/1.0.0:
+ resolution: {integrity: sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==}
+ dev: true
+
+ /p-limit/2.3.0:
+ resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==}
+ engines: {node: '>=6'}
+ dependencies:
+ p-try: 2.2.0
+ dev: true
+
+ /p-limit/3.1.0:
+ resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ yocto-queue: 0.1.0
+
+ /p-locate/3.0.0:
+ resolution: {integrity: sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==}
+ engines: {node: '>=6'}
+ dependencies:
+ p-limit: 2.3.0
+ dev: true
+
+ /p-locate/4.1.0:
+ resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==}
+ engines: {node: '>=8'}
+ dependencies:
+ p-limit: 2.3.0
+ dev: true
+
+ /p-locate/5.0.0:
+ resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
+ engines: {node: '>=10'}
+ dependencies:
+ p-limit: 3.1.0
+
+ /p-map/3.0.0:
+ resolution: {integrity: sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ aggregate-error: 3.1.0
+ dev: true
+
+ /p-map/4.0.0:
+ resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ aggregate-error: 3.1.0
+
+ /p-try/2.2.0:
+ resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /package-hash/4.0.0:
+ resolution: {integrity: sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ graceful-fs: 4.2.10
+ hasha: 5.2.2
+ lodash.flattendeep: 4.4.0
+ release-zalgo: 1.0.0
+ dev: true
+
+ /packet-reader/1.0.0:
+ resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==}
+
+ /parent-module/1.0.1:
+ resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
+ engines: {node: '>=6'}
+ dependencies:
+ callsites: 3.1.0
+
+ /parse-cache-control/1.0.1:
+ resolution: {integrity: sha512-60zvsJReQPX5/QP0Kzfd/VrpjScIQ7SHBW6bFCYfEP+fp0Eppr1SHhIO5nd1PjZtvclzSzES9D/p5nFJurwfWg==}
+ dev: true
+
+ /parse-entities/2.0.0:
+ resolution: {integrity: sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==}
+ dependencies:
+ character-entities: 1.2.4
+ character-entities-legacy: 1.1.4
+ character-reference-invalid: 1.1.4
+ is-alphanumerical: 1.0.4
+ is-decimal: 1.0.4
+ is-hexadecimal: 1.0.4
+ dev: true
+
+ /parse-json/4.0.0:
+ resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==}
+ engines: {node: '>=4'}
+ dependencies:
+ error-ex: 1.3.2
+ json-parse-better-errors: 1.0.2
+
+ /parse-json/5.2.0:
+ resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
+ engines: {node: '>=8'}
+ dependencies:
+ '@babel/code-frame': 7.18.6
+ error-ex: 1.3.2
+ json-parse-even-better-errors: 2.3.1
+ lines-and-columns: 1.2.4
+ dev: true
+
+ /path-exists/3.0.0:
+ resolution: {integrity: sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /path-exists/4.0.0:
+ resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
+ engines: {node: '>=8'}
+
+ /path-is-absolute/1.0.1:
+ resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==}
+ engines: {node: '>=0.10.0'}
+
+ /path-key/3.1.1:
+ resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
+ engines: {node: '>=8'}
+
+ /path-key/4.0.0:
+ resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==}
+ engines: {node: '>=12'}
+
+ /path-parse/1.0.7:
+ resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
+ dev: true
+
+ /path-type/4.0.0:
+ resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /pathval/1.1.1:
+ resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==}
+ dev: true
+
+ /pg-connection-string/2.5.0:
+ resolution: {integrity: sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==}
+
+ /pg-cursor/2.7.4_pg@8.8.0:
+ resolution: {integrity: sha512-CNWwOzTTZ9QvphoOL+Wg/7pmVr9GnAWBjPbuK2FRclrB4A/WRO/ssCJ9BlkzIGmmofK2M/LyokNHgsLSn+fMHA==}
+ peerDependencies:
+ pg: ^8
+ dependencies:
+ pg: 8.8.0
+
+ /pg-int8/1.0.1:
+ resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==}
+ engines: {node: '>=4.0.0'}
+
+ /pg-pool/3.5.2_pg@8.8.0:
+ resolution: {integrity: sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==}
+ peerDependencies:
+ pg: '>=8.0'
+ dependencies:
+ pg: 8.8.0
+
+ /pg-protocol/1.5.0:
+ resolution: {integrity: sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==}
+
+ /pg-types/2.2.0:
+ resolution: {integrity: sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==}
+ engines: {node: '>=4'}
+ dependencies:
+ pg-int8: 1.0.1
+ postgres-array: 2.0.0
+ postgres-bytea: 1.0.0
+ postgres-date: 1.0.7
+ postgres-interval: 1.2.0
+
+ /pg/8.8.0:
+ resolution: {integrity: sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==}
+ engines: {node: '>= 8.0.0'}
+ peerDependencies:
+ pg-native: '>=3.0.1'
+ peerDependenciesMeta:
+ pg-native:
+ optional: true
+ dependencies:
+ buffer-writer: 2.0.0
+ packet-reader: 1.0.0
+ pg-connection-string: 2.5.0
+ pg-pool: 3.5.2_pg@8.8.0
+ pg-protocol: 1.5.0
+ pg-types: 2.2.0
+ pgpass: 1.0.5
+
+ /pgpass/1.0.5:
+ resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==}
+ dependencies:
+ split2: 4.1.0
+
+ /picocolors/1.0.0:
+ resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==}
+ dev: true
+
+ /picomatch/2.3.1:
+ resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
+ engines: {node: '>=8.6'}
+ dev: true
+
+ /picomodal/3.0.0:
+ resolution: {integrity: sha512-FoR3TDfuLlqUvcEeK5ifpKSVVns6B4BQvc8SDF6THVMuadya6LLtji0QgUDSStw0ZR2J7I6UGi5V2V23rnPWTw==}
+ dev: true
+
+ /pify/4.0.1:
+ resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /pino-abstract-transport/1.0.0:
+ resolution: {integrity: sha512-c7vo5OpW4wIS42hUVcT5REsL8ZljsUfBjqV/e2sFxmFEFZiq1XLUp5EYLtuDH6PEHq9W1egWqRbnLUP5FuZmOA==}
+ dependencies:
+ readable-stream: 4.1.0
+ split2: 4.1.0
+
+ /pino-pretty/9.1.0:
+ resolution: {integrity: sha512-IM6NY9LLo/dVgY7/prJhCh4rAJukafdt0ibxeNOWc2fxKMyTk90SOB9Ao2HfbtShT9QPeP0ePpJktksMhSQMYA==}
+ hasBin: true
+ dependencies:
+ colorette: 2.0.19
+ dateformat: 4.6.3
+ fast-copy: 2.1.3
+ fast-safe-stringify: 2.1.1
+ help-me: 4.1.0
+ joycon: 3.1.1
+ minimist: 1.2.6
+ on-exit-leak-free: 2.1.0
+ pino-abstract-transport: 1.0.0
+ pump: 3.0.0
+ readable-stream: 4.1.0
+ secure-json-parse: 2.5.0
+ sonic-boom: 3.2.0
+ strip-json-comments: 3.1.1
+ dev: false
+
+ /pino-std-serializers/6.0.0:
+ resolution: {integrity: sha512-mMMOwSKrmyl+Y12Ri2xhH1lbzQxwwpuru9VjyJpgFIH4asSj88F2csdMwN6+M5g1Ll4rmsYghHLQJw81tgZ7LQ==}
+
+ /pino/8.5.0:
+ resolution: {integrity: sha512-PuD6sOti8Y+p9zRoNB5dibmfjfM/OU2tEtJFICxw5ulXi1d0qnq/Rt3CsR6aBEAOeyCXP+ZUfiNWW+tt55pNzg==}
+ hasBin: true
+ dependencies:
+ atomic-sleep: 1.0.0
+ fast-redact: 3.1.2
+ on-exit-leak-free: 2.1.0
+ pino-abstract-transport: 1.0.0
+ pino-std-serializers: 6.0.0
+ process-warning: 2.0.0
+ quick-format-unescaped: 4.0.4
+ real-require: 0.2.0
+ safe-stable-stringify: 2.3.1
+ sonic-boom: 3.2.0
+ thread-stream: 2.2.0
+
+ /pkg-conf/3.1.0:
+ resolution: {integrity: sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==}
+ engines: {node: '>=6'}
+ dependencies:
+ find-up: 3.0.0
+ load-json-file: 5.3.0
+ dev: true
+
+ /pkg-dir/4.2.0:
+ resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ find-up: 4.1.0
+ dev: true
+
+ /playwright-core/1.25.2:
+ resolution: {integrity: sha512-0yTbUE9lIddkEpLHL3u8PoCL+pWiZtj5A/j3U7YoNjcmKKDGBnCrgHJMzwd2J5vy6l28q4ki3JIuz7McLHhl1A==}
+ engines: {node: '>=14'}
+ hasBin: true
+ dev: true
+
+ /playwright/1.25.2:
+ resolution: {integrity: sha512-RwMB5SFRV/8wSfK+tK8ycpqdzORvoqUNz9DUeRfSgZFrZej5uuBl9wFjWcc+OkXFEtaPmx1acAVGG7hA4IJ1kg==}
+ engines: {node: '>=14'}
+ hasBin: true
+ requiresBuild: true
+ dependencies:
+ playwright-core: 1.25.2
+ dev: true
+
+ /plur/4.0.0:
+ resolution: {integrity: sha512-4UGewrYgqDFw9vV6zNV+ADmPAUAfJPKtGvb/VdpQAx25X5f3xXdGdyOEVFwkl8Hl/tl7+xbeHqSEM+D5/TirUg==}
+ engines: {node: '>=10'}
+ dependencies:
+ irregular-plurals: 3.3.0
+ dev: true
+
+ /postcss/8.4.16:
+ resolution: {integrity: sha512-ipHE1XBvKzm5xI7hiHCZJCSugxvsdq2mPnsq5+UF+VHCjiBvtDrlxJfMBToWaP9D5XlgNmcFGqoHmUn0EYEaRQ==}
+ engines: {node: ^10 || ^12 || >=14}
+ dependencies:
+ nanoid: 3.3.4
+ picocolors: 1.0.0
+ source-map-js: 1.0.2
+ dev: true
+
+ /postgrator/7.1.0:
+ resolution: {integrity: sha512-twbg6EBhC/GYpRV75ZHzWUcztjfmRQf+rGTHm9r8epCTaWbDBplbjh5keruUJkWP4Lb3UeHnHrDVuFl6BRS3Wg==}
+ engines: {node: '>=14.0.0'}
+ dependencies:
+ glob: 7.2.3
+ dev: false
+
+ /postgres-array/2.0.0:
+ resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==}
+ engines: {node: '>=4'}
+
+ /postgres-bytea/1.0.0:
+ resolution: {integrity: sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==}
+ engines: {node: '>=0.10.0'}
+
+ /postgres-date/1.0.7:
+ resolution: {integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==}
+ engines: {node: '>=0.10.0'}
+
+ /postgres-interval/1.2.0:
+ resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ xtend: 4.0.2
+
+ /prelude-ls/1.2.1:
+ resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
+ engines: {node: '>= 0.8.0'}
+ dev: true
+
+ /prismjs/1.27.0:
+ resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /prismjs/1.29.0:
+ resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /process-nextick-args/2.0.1:
+ resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
+ dev: true
+
+ /process-on-spawn/1.0.0:
+ resolution: {integrity: sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==}
+ engines: {node: '>=8'}
+ dependencies:
+ fromentries: 1.3.2
+ dev: true
+
+ /process-warning/2.0.0:
+ resolution: {integrity: sha512-+MmoAXoUX+VTHAlwns0h+kFUWFs/3FZy+ZuchkgjyOu3oioLAo2LB5aCfKPh2+P9O18i3m43tUEv3YqttSy0Ww==}
+
+ /prom-client/14.1.0:
+ resolution: {integrity: sha512-iFWCchQmi4170omLpFXbzz62SQTmPhtBL35v0qGEVRHKcqIeiexaoYeP0vfZTujxEq3tA87iqOdRbC9svS1B9A==}
+ engines: {node: '>=10'}
+ dependencies:
+ tdigest: 0.1.2
+ dev: false
+
+ /promise-inflight/1.0.1:
+ resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==}
+ peerDependencies:
+ bluebird: '*'
+ peerDependenciesMeta:
+ bluebird:
+ optional: true
+ optional: true
+
+ /promise-retry/2.0.1:
+ resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==}
+ engines: {node: '>=10'}
+ dependencies:
+ err-code: 2.0.3
+ retry: 0.12.0
+ optional: true
+
+ /promise/6.1.0:
+ resolution: {integrity: sha512-O+uwGKreKNKkshzZv2P7N64lk6EP17iXBn0PbUnNQhk+Q0AHLstiTrjkx3v5YBd3cxUe7Sq6KyRhl/A0xUjk7Q==}
+ dependencies:
+ asap: 1.0.0
+
+ /promise/8.2.0:
+ resolution: {integrity: sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg==}
+ dependencies:
+ asap: 2.0.6
+ dev: true
+
+ /prop-types/15.8.1:
+ resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==}
+ dependencies:
+ loose-envify: 1.4.0
+ object-assign: 4.1.1
+ react-is: 16.13.1
+ dev: true
+
+ /property-information/5.6.0:
+ resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==}
+ dependencies:
+ xtend: 4.0.2
+ dev: true
+
+ /proxy-addr/2.0.7:
+ resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
+ engines: {node: '>= 0.10'}
+ dependencies:
+ forwarded: 0.2.0
+ ipaddr.js: 1.9.1
+
+ /pseudomap/1.0.2:
+ resolution: {integrity: sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==}
+ dev: false
+
+ /pump/3.0.0:
+ resolution: {integrity: sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==}
+ dependencies:
+ end-of-stream: 1.4.4
+ once: 1.4.0
+ dev: false
+
+ /punycode/1.3.2:
+ resolution: {integrity: sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==}
+ dev: true
+
+ /punycode/2.1.1:
+ resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==}
+ engines: {node: '>=6'}
+
+ /pupa/3.1.0:
+ resolution: {integrity: sha512-FLpr4flz5xZTSJxSeaheeMKN/EDzMdK7b8PTOC6a5PYFKTucWbdqjgqaEyH0shFiSJrVB1+Qqi4Tk19ccU6Aug==}
+ engines: {node: '>=12.20'}
+ dependencies:
+ escape-goat: 4.0.0
+ dev: false
+
+ /qlobber/7.0.1:
+ resolution: {integrity: sha512-FsFg9lMuMEFNKmTO9nV7tlyPhx8BmskPPjH2akWycuYVTtWaVwhW5yCHLJQ6Q+3mvw5cFX2vMfW2l9z2SiYAbg==}
+ engines: {node: '>= 14'}
+
+ /qs/6.11.0:
+ resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==}
+ engines: {node: '>=0.6'}
+ dependencies:
+ side-channel: 1.0.4
+ dev: true
+
+ /querystring/0.2.0:
+ resolution: {integrity: sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==}
+ engines: {node: '>=0.4.x'}
+ deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead.
+ dev: true
+
+ /querystringify/2.2.0:
+ resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==}
+ dev: true
+
+ /queue-microtask/1.2.3:
+ resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
+ dev: true
+
+ /quick-format-unescaped/4.0.4:
+ resolution: {integrity: sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==}
+
+ /quick-lru/4.0.1:
+ resolution: {integrity: sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /randexp/0.5.3:
+ resolution: {integrity: sha512-U+5l2KrcMNOUPYvazA3h5ekF80FHTUG+87SEAmHZmolh1M+i/WyTCxVzmi+tidIa1tM4BSe8g2Y/D3loWDjj+w==}
+ engines: {node: '>=4'}
+ dependencies:
+ drange: 1.1.1
+ ret: 0.2.2
+ dev: true
+
+ /randombytes/2.1.0:
+ resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
+ dependencies:
+ safe-buffer: 5.2.1
+ dev: true
+
+ /range-parser/1.2.1:
+ resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
+ engines: {node: '>= 0.6'}
+
+ /react-clientside-effect/1.2.6_react@18.2.0:
+ resolution: {integrity: sha512-XGGGRQAKY+q25Lz9a/4EPqom7WRjz3z9R2k4jhVKA/puQFH/5Nt27vFZYql4m4NVNdUvX8PS3O7r/Zzm7cjUlg==}
+ peerDependencies:
+ react: ^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0
+ dependencies:
+ '@babel/runtime': 7.19.0
+ react: 18.2.0
+ dev: true
+
+ /react-copy-to-clipboard/5.0.4_react@18.2.0:
+ resolution: {integrity: sha512-IeVAiNVKjSPeGax/Gmkqfa/+PuMTBhutEvFUaMQLwE2tS0EXrAdgOpWDX26bWTXF3HrioorR7lr08NqeYUWQCQ==}
+ peerDependencies:
+ react: ^15.3.0 || ^16.0.0 || ^17.0.0
+ dependencies:
+ copy-to-clipboard: 3.3.2
+ prop-types: 15.8.1
+ react: 18.2.0
+ dev: true
+
+ /react-debounce-input/3.2.4_react@18.2.0:
+ resolution: {integrity: sha512-fX70bNj0fLEYO2Zcvuh7eh9wOUQ29GIx6r8IxIJlc0i0mpUH++9ax0BhfAYfzndADli3RAMROrZQ014J01owrg==}
+ peerDependencies:
+ react: ^15.3.0 || ^16.0.0 || ^17.0.0
+ dependencies:
+ lodash.debounce: 4.0.8
+ prop-types: 15.8.1
+ react: 18.2.0
+ dev: true
+
+ /react-dom/18.2.0_react@18.2.0:
+ resolution: {integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==}
+ peerDependencies:
+ react: ^18.2.0
+ dependencies:
+ loose-envify: 1.4.0
+ react: 18.2.0
+ scheduler: 0.23.0
+ dev: true
+
+ /react-focus-lock/2.9.1_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-pSWOQrUmiKLkffPO6BpMXN7SNKXMsuOakl652IBuALAu1esk+IcpJyM+ALcYzPTTFz1rD0R54aB9A4HuP5t1Wg==}
+ peerDependencies:
+ '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@babel/runtime': 7.19.0
+ '@types/react': 18.0.20
+ focus-lock: 0.11.2
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-clientside-effect: 1.2.6_react@18.2.0
+ use-callback-ref: 1.3.0_w5j4k42lgipnm43s3brx6h3c34
+ use-sidecar: 1.1.2_w5j4k42lgipnm43s3brx6h3c34
+ dev: true
+
+ /react-hot-toast/2.4.0_owo25xnefcwdq3zjgtohz6dbju:
+ resolution: {integrity: sha512-qnnVbXropKuwUpriVVosgo8QrB+IaPJCpL8oBI6Ov84uvHZ5QQcTp2qg6ku2wNfgJl6rlQXJIQU5q+5lmPOutA==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ react: '>=16'
+ react-dom: '>=16'
+ dependencies:
+ goober: 2.1.11_csstype@3.1.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ transitivePeerDependencies:
+ - csstype
+ dev: true
+
+ /react-immutable-proptypes/2.2.0_immutable@3.8.2:
+ resolution: {integrity: sha512-Vf4gBsePlwdGvSZoLSBfd4HAP93HDauMY4fDjXhreg/vg6F3Fj/MXDNyTbltPC/xZKmZc+cjLu3598DdYK6sgQ==}
+ peerDependencies:
+ immutable: '>=3.6.2'
+ dependencies:
+ immutable: 3.8.2
+ invariant: 2.2.4
+ dev: true
+
+ /react-immutable-pure-component/2.2.2_lqollyv6thcj6icxhf5vjqu2mi:
+ resolution: {integrity: sha512-vkgoMJUDqHZfXXnjVlG3keCxSO/U6WeDQ5/Sl0GK2cH8TOxEzQ5jXqDXHEL/jqk6fsNxV05oH5kD7VNMUE2k+A==}
+ peerDependencies:
+ immutable: '>= 2 || >= 4.0.0-rc'
+ react: '>= 16.6'
+ react-dom: '>= 16.6'
+ dependencies:
+ immutable: 3.8.2
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ dev: true
+
+ /react-inspector/5.1.1_react@18.2.0:
+ resolution: {integrity: sha512-GURDaYzoLbW8pMGXwYPDBIv6nqei4kK7LPRZ9q9HCZF54wqXz/dnylBp/kfE9XmekBhHvLDdcYeyIwSrvtOiWg==}
+ peerDependencies:
+ react: ^16.8.4 || ^17.0.0
+ dependencies:
+ '@babel/runtime': 7.19.0
+ is-dom: 1.1.0
+ prop-types: 15.8.1
+ react: 18.2.0
+ dev: true
+
+ /react-is/16.13.1:
+ resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==}
+ dev: true
+
+ /react-is/17.0.2:
+ resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==}
+ dev: true
+
+ /react-is/18.2.0:
+ resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==}
+ dev: true
+
+ /react-redux/7.2.8_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-6+uDjhs3PSIclqoCk0kd6iX74gzrGc3W5zcAjbrFgEdIjRSQObdIwfx80unTkVUYvbQ95Y8Av3OvFHq1w5EOUw==}
+ peerDependencies:
+ react: ^16.8.3 || ^17 || ^18
+ react-dom: '*'
+ react-native: '*'
+ peerDependenciesMeta:
+ react-dom:
+ optional: true
+ react-native:
+ optional: true
+ dependencies:
+ '@babel/runtime': 7.19.0
+ '@types/react-redux': 7.1.24
+ hoist-non-react-statics: 3.3.2
+ loose-envify: 1.4.0
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-is: 17.0.2
+ dev: true
+
+ /react-refresh/0.14.0:
+ resolution: {integrity: sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /react-remove-scroll-bar/2.3.3_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-i9GMNWwpz8XpUpQ6QlevUtFjHGqnPG4Hxs+wlIJntu/xcsZVEpJcIV71K3ZkqNy2q3GfgvkD7y6t/Sv8ofYSbw==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@types/react': 18.0.20
+ react: 18.2.0
+ react-style-singleton: 2.2.1_w5j4k42lgipnm43s3brx6h3c34
+ tslib: 2.4.0
+ dev: true
+
+ /react-remove-scroll/2.5.5_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-ImKhrzJJsyXJfBZ4bzu8Bwpka14c/fQt0k+cyFp/PBhTfyDnU5hjOtM4AG/0AMyy8oKzOTR0lDgJIM7pYXI0kw==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@types/react': 18.0.20
+ react: 18.2.0
+ react-remove-scroll-bar: 2.3.3_w5j4k42lgipnm43s3brx6h3c34
+ react-style-singleton: 2.2.1_w5j4k42lgipnm43s3brx6h3c34
+ tslib: 2.4.0
+ use-callback-ref: 1.3.0_w5j4k42lgipnm43s3brx6h3c34
+ use-sidecar: 1.1.2_w5j4k42lgipnm43s3brx6h3c34
+ dev: true
+
+ /react-router-dom/6.4.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-4Aw1xmXKeleYYQ3x0Lcl2undHR6yMjXZjd9DKZd53SGOYqirrUThyUb0wwAX5VZAyvSuzjNJmZlJ3rR9+/vzqg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ react: '>=16.8'
+ react-dom: '>=16.8'
+ dependencies:
+ react: 18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-router: 6.4.0_react@18.2.0
+ dev: true
+
+ /react-router/6.4.0_react@18.2.0:
+ resolution: {integrity: sha512-B+5bEXFlgR1XUdHYR6P94g299SjrfCBMmEDJNcFbpAyRH1j1748yt9NdDhW3++nw1lk3zQJ6aOO66zUx3KlTZg==}
+ engines: {node: '>=14'}
+ peerDependencies:
+ react: '>=16.8'
+ dependencies:
+ '@remix-run/router': 1.0.0
+ react: 18.2.0
+ dev: true
+
+ /react-shallow-renderer/16.15.0_react@18.2.0:
+ resolution: {integrity: sha512-oScf2FqQ9LFVQgA73vr86xl2NaOIX73rh+YFqcOp68CWj56tSfgtGKrEbyhCj0rSijyG9M1CYprTh39fBi5hzA==}
+ peerDependencies:
+ react: ^16.0.0 || ^17.0.0 || ^18.0.0
+ dependencies:
+ object-assign: 4.1.1
+ react: 18.2.0
+ react-is: 18.2.0
+ dev: true
+
+ /react-style-singleton/2.2.1_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@types/react': 18.0.20
+ get-nonce: 1.0.1
+ invariant: 2.2.4
+ react: 18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /react-syntax-highlighter/15.5.0_react@18.2.0:
+ resolution: {integrity: sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg==}
+ peerDependencies:
+ react: '>= 0.14.0'
+ dependencies:
+ '@babel/runtime': 7.19.0
+ highlight.js: 10.7.3
+ lowlight: 1.20.0
+ prismjs: 1.29.0
+ react: 18.2.0
+ refractor: 3.6.0
+ dev: true
+
+ /react-test-renderer/18.2.0_react@18.2.0:
+ resolution: {integrity: sha512-JWD+aQ0lh2gvh4NM3bBM42Kx+XybOxCpgYK7F8ugAlpaTSnWsX+39Z4XkOykGZAHrjwwTZT3x3KxswVWxHPUqA==}
+ peerDependencies:
+ react: ^18.2.0
+ dependencies:
+ react: 18.2.0
+ react-is: 18.2.0
+ react-shallow-renderer: 16.15.0_react@18.2.0
+ scheduler: 0.23.0
+ dev: true
+
+ /react/18.2.0:
+ resolution: {integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==}
+ engines: {node: '>=0.10.0'}
+ dependencies:
+ loose-envify: 1.4.0
+ dev: true
+
+ /read-pkg-up/7.0.1:
+ resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==}
+ engines: {node: '>=8'}
+ dependencies:
+ find-up: 4.1.0
+ read-pkg: 5.2.0
+ type-fest: 0.8.1
+ dev: true
+
+ /read-pkg/5.2.0:
+ resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==}
+ engines: {node: '>=8'}
+ dependencies:
+ '@types/normalize-package-data': 2.4.1
+ normalize-package-data: 2.5.0
+ parse-json: 5.2.0
+ type-fest: 0.6.0
+ dev: true
+
+ /readable-stream/2.3.7:
+ resolution: {integrity: sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==}
+ dependencies:
+ core-util-is: 1.0.3
+ inherits: 2.0.4
+ isarray: 1.0.0
+ process-nextick-args: 2.0.1
+ safe-buffer: 5.1.2
+ string_decoder: 1.1.1
+ util-deprecate: 1.0.2
+ dev: true
+
+ /readable-stream/3.6.0:
+ resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==}
+ engines: {node: '>= 6'}
+ dependencies:
+ inherits: 2.0.4
+ string_decoder: 1.3.0
+ util-deprecate: 1.0.2
+
+ /readable-stream/4.1.0:
+ resolution: {integrity: sha512-sVisi3+P2lJ2t0BPbpK629j8wRW06yKGJUcaLAGXPAUhyUxVJm7VsCTit1PFgT4JHUDMrGNR+ZjSKpzGaRF3zw==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ abort-controller: 3.0.0
+
+ /readdirp/3.6.0:
+ resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
+ engines: {node: '>=8.10.0'}
+ dependencies:
+ picomatch: 2.3.1
+ dev: true
+
+ /real-require/0.2.0:
+ resolution: {integrity: sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==}
+ engines: {node: '>= 12.13.0'}
+
+ /redent/3.0.0:
+ resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==}
+ engines: {node: '>=8'}
+ dependencies:
+ indent-string: 4.0.0
+ strip-indent: 3.0.0
+ dev: true
+
+ /redux-immutable/4.0.0_immutable@3.8.2:
+ resolution: {integrity: sha512-SchSn/DWfGb3oAejd+1hhHx01xUoxY+V7TeK0BKqpkLKiQPVFf7DYzEaKmrEVxsWxielKfSK9/Xq66YyxgR1cg==}
+ peerDependencies:
+ immutable: ^3.8.1 || ^4.0.0-rc.1
+ dependencies:
+ immutable: 3.8.2
+ dev: true
+
+ /redux/4.2.0:
+ resolution: {integrity: sha512-oSBmcKKIuIR4ME29/AeNUnl5L+hvBq7OaJWzaptTQJAntaPvxIJqfnjbaEiCzzaIz+XmVILfqAM3Ob0aXLPfjA==}
+ dependencies:
+ '@babel/runtime': 7.19.0
+ dev: true
+
+ /refractor/3.6.0:
+ resolution: {integrity: sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==}
+ dependencies:
+ hastscript: 6.0.0
+ parse-entities: 2.0.0
+ prismjs: 1.27.0
+ dev: true
+
+ /regenerator-runtime/0.13.9:
+ resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==}
+ dev: true
+
+ /regexp.prototype.flags/1.4.3:
+ resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==}
+ engines: {node: '>= 0.4'}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ functions-have-names: 1.2.3
+ dev: true
+
+ /regexpp/3.2.0:
+ resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /release-zalgo/1.0.0:
+ resolution: {integrity: sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==}
+ engines: {node: '>=4'}
+ dependencies:
+ es6-error: 4.1.1
+ dev: true
+
+ /remarkable/2.0.1:
+ resolution: {integrity: sha512-YJyMcOH5lrR+kZdmB0aJJ4+93bEojRZ1HGDn9Eagu6ibg7aVZhc3OWbbShRid+Q5eAfsEqWxpe+g5W5nYNfNiA==}
+ engines: {node: '>= 6.0.0'}
+ hasBin: true
+ dependencies:
+ argparse: 1.0.10
+ autolinker: 3.16.2
+ dev: true
+
+ /repeat-string/1.6.1:
+ resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==}
+ engines: {node: '>=0.10'}
+ dev: true
+
+ /require-directory/2.1.1:
+ resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==}
+ engines: {node: '>=0.10.0'}
+
+ /require-from-string/2.0.2:
+ resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
+ engines: {node: '>=0.10.0'}
+
+ /require-main-filename/2.0.0:
+ resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==}
+ dev: true
+
+ /requires-port/1.0.0:
+ resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==}
+ dev: true
+
+ /reselect/4.1.6:
+ resolution: {integrity: sha512-ZovIuXqto7elwnxyXbBtCPo9YFEr3uJqj2rRbcOOog1bmu2Ag85M4hixSwFWyaBMKXNgvPaJ9OSu9SkBPIeJHQ==}
+ dev: true
+
+ /resolve-from/3.0.0:
+ resolution: {integrity: sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==}
+ engines: {node: '>=4'}
+
+ /resolve-from/4.0.0:
+ resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
+ engines: {node: '>=4'}
+
+ /resolve-from/5.0.0:
+ resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /resolve/1.22.1:
+ resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==}
+ hasBin: true
+ dependencies:
+ is-core-module: 2.10.0
+ path-parse: 1.0.7
+ supports-preserve-symlinks-flag: 1.0.0
+ dev: true
+
+ /resolve/2.0.0-next.4:
+ resolution: {integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==}
+ hasBin: true
+ dependencies:
+ is-core-module: 2.10.0
+ path-parse: 1.0.7
+ supports-preserve-symlinks-flag: 1.0.0
+ dev: true
+
+ /ret/0.2.2:
+ resolution: {integrity: sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ==}
+ engines: {node: '>=4'}
+
+ /retry/0.12.0:
+ resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==}
+ engines: {node: '>= 4'}
+ optional: true
+
+ /reusify/1.0.4:
+ resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==}
+ engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
+
+ /rfdc/1.3.0:
+ resolution: {integrity: sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==}
+
+ /rimraf/3.0.2:
+ resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==}
+ hasBin: true
+ dependencies:
+ glob: 7.2.3
+
+ /rollup/2.78.1:
+ resolution: {integrity: sha512-VeeCgtGi4P+o9hIg+xz4qQpRl6R401LWEXBmxYKOV4zlF82lyhgh2hTZnheFUbANE8l2A41F458iwj2vEYaXJg==}
+ engines: {node: '>=10.0.0'}
+ hasBin: true
+ optionalDependencies:
+ fsevents: 2.3.2
+ dev: true
+
+ /run-parallel/1.2.0:
+ resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==}
+ dependencies:
+ queue-microtask: 1.2.3
+ dev: true
+
+ /safe-buffer/5.1.2:
+ resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
+
+ /safe-buffer/5.2.1:
+ resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
+
+ /safe-regex2/2.0.0:
+ resolution: {integrity: sha512-PaUSFsUaNNuKwkBijoAPHAK6/eM6VirvyPWlZ7BAQy4D+hCvh4B6lIG+nPdhbFfIbP+gTGBcrdsOaUs0F+ZBOQ==}
+ dependencies:
+ ret: 0.2.2
+
+ /safe-stable-stringify/2.3.1:
+ resolution: {integrity: sha512-kYBSfT+troD9cDA85VDnHZ1rpHC50O0g1e6WlGHVCz/g+JS+9WKLj+XwFYyR8UbrZN8ll9HUpDAAddY58MGisg==}
+ engines: {node: '>=10'}
+
+ /safer-buffer/2.1.2:
+ resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
+
+ /scheduler/0.23.0:
+ resolution: {integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==}
+ dependencies:
+ loose-envify: 1.4.0
+ dev: true
+
+ /secure-json-parse/2.5.0:
+ resolution: {integrity: sha512-ZQruFgZnIWH+WyO9t5rWt4ZEGqCKPwhiw+YbzTwpmT9elgLrLcfuyUiSnwwjUiVy9r4VM3urtbNF1xmEh9IL2w==}
+
+ /semver/5.7.1:
+ resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==}
+ hasBin: true
+ dev: true
+
+ /semver/6.3.0:
+ resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==}
+ hasBin: true
+
+ /semver/7.3.7:
+ resolution: {integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==}
+ engines: {node: '>=10'}
+ hasBin: true
+ dependencies:
+ lru-cache: 6.0.0
+
+ /send/0.18.0:
+ resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ debug: 2.6.9
+ depd: 2.0.0
+ destroy: 1.2.0
+ encodeurl: 1.0.2
+ escape-html: 1.0.3
+ etag: 1.8.1
+ fresh: 0.5.2
+ http-errors: 2.0.0
+ mime: 1.6.0
+ ms: 2.1.3
+ on-finished: 2.4.1
+ range-parser: 1.2.1
+ statuses: 2.0.1
+ transitivePeerDependencies:
+ - supports-color
+
+ /seq-queue/0.0.5:
+ resolution: {integrity: sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q==}
+ dev: false
+
+ /serialize-error/8.1.0:
+ resolution: {integrity: sha512-3NnuWfM6vBYoy5gZFvHiYsVbafvI9vZv/+jlIigFn4oP4zjNPK3LhcY0xSCgeb1a5L8jO71Mit9LlNoi2UfDDQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ type-fest: 0.20.2
+ dev: true
+
+ /set-blocking/2.0.0:
+ resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==}
+
+ /set-cookie-parser/2.5.1:
+ resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==}
+
+ /set-value/4.1.0:
+ resolution: {integrity: sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw==}
+ engines: {node: '>=11.0'}
+ dependencies:
+ is-plain-object: 2.0.4
+ is-primitive: 3.0.1
+ dev: true
+
+ /setprototypeof/1.2.0:
+ resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
+
+ /sha.js/2.4.11:
+ resolution: {integrity: sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==}
+ hasBin: true
+ dependencies:
+ inherits: 2.0.4
+ safe-buffer: 5.2.1
+ dev: true
+
+ /shebang-command/2.0.0:
+ resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
+ engines: {node: '>=8'}
+ dependencies:
+ shebang-regex: 3.0.0
+
+ /shebang-regex/3.0.0:
+ resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
+ engines: {node: '>=8'}
+
+ /side-channel/1.0.4:
+ resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==}
+ dependencies:
+ call-bind: 1.0.2
+ get-intrinsic: 1.1.3
+ object-inspect: 1.12.2
+ dev: true
+
+ /siginfo/2.0.0:
+ resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==}
+ dev: true
+
+ /signal-exit/3.0.7:
+ resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
+
+ /single-user-cache/0.6.0:
+ resolution: {integrity: sha512-uMrANoiybpbsrVDbZ2M7GPzxeqZiirwkVnsDAre1zGhXAAw+2dImTxu7h0l1sIVtwGeJnVsRxgG4I5rZrUX0rw==}
+ dependencies:
+ safe-stable-stringify: 2.3.1
+
+ /slash/3.0.0:
+ resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /slice-ansi/4.0.0:
+ resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
+ engines: {node: '>=10'}
+ dependencies:
+ ansi-styles: 4.3.0
+ astral-regex: 2.0.0
+ is-fullwidth-code-point: 3.0.0
+ dev: false
+
+ /smart-buffer/4.2.0:
+ resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==}
+ engines: {node: '>= 6.0.0', npm: '>= 3.0.0'}
+ optional: true
+
+ /snazzy/9.0.0:
+ resolution: {integrity: sha512-8QZmJb11OiYaUP90Nnjqcj/LEpO8CLgChnP87Wqjv5tNB4djwHaz27VO2usSRR0NmViapeGW04p0aWAMhxxLXg==}
+ hasBin: true
+ dependencies:
+ chalk: 4.1.2
+ inherits: 2.0.4
+ minimist: 1.2.6
+ readable-stream: 3.6.0
+ standard-json: 1.1.0
+ strip-ansi: 6.0.1
+ text-table: 0.2.0
+ dev: true
+
+ /socks-proxy-agent/6.2.1:
+ resolution: {integrity: sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==}
+ engines: {node: '>= 10'}
+ dependencies:
+ agent-base: 6.0.2
+ debug: 4.3.4
+ socks: 2.7.0
+ transitivePeerDependencies:
+ - supports-color
+ optional: true
+
+ /socks/2.7.0:
+ resolution: {integrity: sha512-scnOe9y4VuiNUULJN72GrM26BNOjVsfPXI+j+98PkyEfsIXroa5ofyjT+FzGvn/xHs73U2JtoBYAVx9Hl4quSA==}
+ engines: {node: '>= 10.13.0', npm: '>= 3.0.0'}
+ dependencies:
+ ip: 2.0.0
+ smart-buffer: 4.2.0
+ optional: true
+
+ /sonic-boom/3.2.0:
+ resolution: {integrity: sha512-SbbZ+Kqj/XIunvIAgUZRlqd6CGQYq71tRRbXR92Za8J/R3Yh4Av+TWENiSiEgnlwckYLyP0YZQWVfyNC0dzLaA==}
+ dependencies:
+ atomic-sleep: 1.0.0
+
+ /source-map-js/1.0.2:
+ resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /source-map-support/0.5.21:
+ resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
+ dependencies:
+ buffer-from: 1.1.2
+ source-map: 0.6.1
+ dev: true
+
+ /source-map/0.6.1:
+ resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /sourcemap-codec/1.4.8:
+ resolution: {integrity: sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==}
+ dev: true
+
+ /space-separated-tokens/1.1.5:
+ resolution: {integrity: sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==}
+ dev: true
+
+ /spawn-wrap/2.0.0:
+ resolution: {integrity: sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==}
+ engines: {node: '>=8'}
+ dependencies:
+ foreground-child: 2.0.0
+ is-windows: 1.0.2
+ make-dir: 3.1.0
+ rimraf: 3.0.2
+ signal-exit: 3.0.7
+ which: 2.0.2
+ dev: true
+
+ /spdx-correct/3.1.1:
+ resolution: {integrity: sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==}
+ dependencies:
+ spdx-expression-parse: 3.0.1
+ spdx-license-ids: 3.0.12
+ dev: true
+
+ /spdx-exceptions/2.3.0:
+ resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==}
+ dev: true
+
+ /spdx-expression-parse/3.0.1:
+ resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==}
+ dependencies:
+ spdx-exceptions: 2.3.0
+ spdx-license-ids: 3.0.12
+ dev: true
+
+ /spdx-license-ids/3.0.12:
+ resolution: {integrity: sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==}
+ dev: true
+
+ /split2/4.1.0:
+ resolution: {integrity: sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==}
+ engines: {node: '>= 10.x'}
+
+ /sprintf-js/1.0.3:
+ resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==}
+
+ /sqlite3/5.1.1:
+ resolution: {integrity: sha512-mMinkrQr/LKJqFiFF+AF7imPSzRCCpTCreusZO3D/ssJHVjZOrbu2Caz+zPH5KTmGGXBxXMGSRDssL+44CLxvg==}
+ requiresBuild: true
+ peerDependenciesMeta:
+ node-gyp:
+ optional: true
+ dependencies:
+ '@mapbox/node-pre-gyp': 1.0.10
+ node-addon-api: 4.3.0
+ tar: 6.1.11
+ optionalDependencies:
+ node-gyp: 8.4.1
+ transitivePeerDependencies:
+ - bluebird
+ - encoding
+ - supports-color
+
+ /sqlstring/2.3.3:
+ resolution: {integrity: sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==}
+ engines: {node: '>= 0.6'}
+ dev: false
+
+ /ssri/8.0.1:
+ resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==}
+ engines: {node: '>= 8'}
+ dependencies:
+ minipass: 3.3.4
+ optional: true
+
+ /stack-utils/2.0.5:
+ resolution: {integrity: sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==}
+ engines: {node: '>=10'}
+ dependencies:
+ escape-string-regexp: 2.0.0
+ dev: true
+
+ /stackback/0.0.2:
+ resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
+ dev: true
+
+ /standard-engine/15.0.0:
+ resolution: {integrity: sha512-4xwUhJNo1g/L2cleysUqUv7/btn7GEbYJvmgKrQ2vd/8pkTmN8cpqAZg+BT8Z1hNeEH787iWUdOpL8fmApLtxA==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ dependencies:
+ get-stdin: 8.0.0
+ minimist: 1.2.6
+ pkg-conf: 3.1.0
+ xdg-basedir: 4.0.0
+ dev: true
+
+ /standard-json/1.1.0:
+ resolution: {integrity: sha512-nkonX+n5g3pyVBvJZmvRlFtT/7JyLbNh4CtrYC3Qfxihgs8PKX52f6ONKQXORStuBWJ5PI83EUrNXme7LKfiTQ==}
+ hasBin: true
+ dependencies:
+ concat-stream: 2.0.0
+ dev: true
+
+ /standard/17.0.0:
+ resolution: {integrity: sha512-GlCM9nzbLUkr+TYR5I2WQoIah4wHA2lMauqbyPLV/oI5gJxqhHzhjl9EG2N0lr/nRqI3KCbCvm/W3smxvLaChA==}
+ engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0}
+ hasBin: true
+ dependencies:
+ eslint: 8.23.1
+ eslint-config-standard: 17.0.0_4nulviyjkaspo7v2xlghuwxbf4
+ eslint-config-standard-jsx: 11.0.0_g6sljrn72fr5r5n2js2jyjg7bi
+ eslint-plugin-import: 2.26.0_eslint@8.23.1
+ eslint-plugin-n: 15.2.5_eslint@8.23.1
+ eslint-plugin-promise: 6.0.1_eslint@8.23.1
+ eslint-plugin-react: 7.31.8_eslint@8.23.1
+ standard-engine: 15.0.0
+ transitivePeerDependencies:
+ - '@typescript-eslint/parser'
+ - eslint-import-resolver-typescript
+ - eslint-import-resolver-webpack
+ - supports-color
+ dev: true
+
+ /statuses/2.0.1:
+ resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==}
+ engines: {node: '>= 0.8'}
+
+ /steed/1.1.3:
+ resolution: {integrity: sha512-EUkci0FAUiE4IvGTSKcDJIQ/eRUP2JJb56+fvZ4sdnguLTqIdKjSxUe138poW8mkvKWXW2sFPrgTsxqoISnmoA==}
+ dependencies:
+ fastfall: 1.5.1
+ fastparallel: 2.4.1
+ fastq: 1.13.0
+ fastseries: 1.7.2
+ reusify: 1.0.4
+ dev: false
+
+ /string-similarity/4.0.4:
+ resolution: {integrity: sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==}
+
+ /string-width/4.2.3:
+ resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
+ engines: {node: '>=8'}
+ dependencies:
+ emoji-regex: 8.0.0
+ is-fullwidth-code-point: 3.0.0
+ strip-ansi: 6.0.1
+
+ /string.prototype.matchall/4.0.7:
+ resolution: {integrity: sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ get-intrinsic: 1.1.3
+ has-symbols: 1.0.3
+ internal-slot: 1.0.3
+ regexp.prototype.flags: 1.4.3
+ side-channel: 1.0.4
+ dev: true
+
+ /string.prototype.trimend/1.0.5:
+ resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /string.prototype.trimstart/1.0.5:
+ resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==}
+ dependencies:
+ call-bind: 1.0.2
+ define-properties: 1.1.4
+ es-abstract: 1.20.2
+ dev: true
+
+ /string_decoder/1.1.1:
+ resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
+ dependencies:
+ safe-buffer: 5.1.2
+ dev: true
+
+ /string_decoder/1.3.0:
+ resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
+ dependencies:
+ safe-buffer: 5.2.1
+
+ /strip-ansi/6.0.1:
+ resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
+ engines: {node: '>=8'}
+ dependencies:
+ ansi-regex: 5.0.1
+
+ /strip-ansi/7.0.1:
+ resolution: {integrity: sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw==}
+ engines: {node: '>=12'}
+ dependencies:
+ ansi-regex: 6.0.1
+ dev: true
+
+ /strip-bom/3.0.0:
+ resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /strip-bom/4.0.0:
+ resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /strip-final-newline/3.0.0:
+ resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==}
+ engines: {node: '>=12'}
+
+ /strip-indent/3.0.0:
+ resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==}
+ engines: {node: '>=8'}
+ dependencies:
+ min-indent: 1.0.1
+ dev: true
+
+ /strip-json-comments/3.1.1:
+ resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
+ engines: {node: '>=8'}
+
+ /strip-literal/0.4.1:
+ resolution: {integrity: sha512-z+F/xmDM8GOdvA5UoZXFxEnxdvMOZ+XEBIwjfLfc8hMSuHpGxjXAUCfuEo+t1GOHSb8+qgI/IBRpxXVMaABYWA==}
+ dependencies:
+ acorn: 8.8.0
+ dev: true
+
+ /style-mod/4.0.0:
+ resolution: {integrity: sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw==}
+ dev: true
+
+ /supports-color/5.5.0:
+ resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
+ engines: {node: '>=4'}
+ dependencies:
+ has-flag: 3.0.0
+
+ /supports-color/7.2.0:
+ resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==}
+ engines: {node: '>=8'}
+ dependencies:
+ has-flag: 4.0.0
+
+ /supports-hyperlinks/2.3.0:
+ resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==}
+ engines: {node: '>=8'}
+ dependencies:
+ has-flag: 4.0.0
+ supports-color: 7.2.0
+ dev: true
+
+ /supports-preserve-symlinks-flag/1.0.0:
+ resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
+ engines: {node: '>= 0.4'}
+ dev: true
+
+ /swagger-client/3.18.5:
+ resolution: {integrity: sha512-c0txGDtfQTJnaIBaEKCwtRNcUaaAfj+RXI4QVV9p3WW+AUCQqp4naCjaDNNsOfMkE4ySyhnblbL+jGqAVC7snw==}
+ dependencies:
+ '@babel/runtime-corejs3': 7.19.1
+ cookie: 0.5.0
+ cross-fetch: 3.1.5
+ deepmerge: 4.2.2
+ fast-json-patch: 3.1.1
+ form-data-encoder: 1.7.2
+ formdata-node: 4.4.1
+ is-plain-object: 5.0.0
+ js-yaml: 4.1.0
+ lodash: 4.17.21
+ qs: 6.11.0
+ traverse: 0.6.6
+ url: 0.11.0
+ transitivePeerDependencies:
+ - encoding
+ dev: true
+
+ /swagger-ui-react/4.13.0_biqbaboplfbrettd7655fr4n2y:
+ resolution: {integrity: sha512-SNAByPvnpFKXUnrH6+V2TjVrbilftyVLWK+7K73tBX3uRNAYv0hzNs5Q6xPIekq4iq7xRtuUhVA7Qxn9vK4C+w==}
+ peerDependencies:
+ react: '>=17.0.0'
+ react-dom: '>=17.0.0'
+ dependencies:
+ '@babel/runtime-corejs3': 7.19.1
+ '@braintree/sanitize-url': 6.0.0
+ base64-js: 1.5.1
+ classnames: 2.3.2
+ css.escape: 1.5.1
+ deep-extend: 0.6.0
+ dompurify: 2.3.3
+ ieee754: 1.2.1
+ immutable: 3.8.2
+ js-file-download: 0.4.12
+ js-yaml: 4.1.0
+ lodash: 4.17.21
+ prop-types: 15.8.1
+ randexp: 0.5.3
+ randombytes: 2.1.0
+ react: 18.2.0
+ react-copy-to-clipboard: 5.0.4_react@18.2.0
+ react-debounce-input: 3.2.4_react@18.2.0
+ react-dom: 18.2.0_react@18.2.0
+ react-immutable-proptypes: 2.2.0_immutable@3.8.2
+ react-immutable-pure-component: 2.2.2_lqollyv6thcj6icxhf5vjqu2mi
+ react-inspector: 5.1.1_react@18.2.0
+ react-redux: 7.2.8_biqbaboplfbrettd7655fr4n2y
+ react-syntax-highlighter: 15.5.0_react@18.2.0
+ redux: 4.2.0
+ redux-immutable: 4.0.0_immutable@3.8.2
+ remarkable: 2.0.1
+ reselect: 4.1.6
+ serialize-error: 8.1.0
+ sha.js: 2.4.11
+ swagger-client: 3.18.5
+ url-parse: 1.5.10
+ xml: 1.0.1
+ xml-but-prettier: 1.0.1
+ zenscroll: 4.0.2
+ transitivePeerDependencies:
+ - encoding
+ - react-native
+ dev: true
+
+ /sync-request/6.1.0:
+ resolution: {integrity: sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==}
+ engines: {node: '>=8.0.0'}
+ dependencies:
+ http-response-object: 3.0.2
+ sync-rpc: 1.3.6
+ then-request: 6.0.2
+ dev: true
+
+ /sync-rpc/1.3.6:
+ resolution: {integrity: sha512-J8jTXuZzRlvU7HemDgHi3pGnh/rkoqR/OZSjhTyyZrEkkYQbk7Z33AXp37mkPfPpfdOuj7Ex3H/TJM1z48uPQw==}
+ dependencies:
+ get-port: 3.2.0
+ dev: true
+
+ /tabbable/4.0.0:
+ resolution: {integrity: sha512-H1XoH1URcBOa/rZZWxLxHCtOdVUEev+9vo5YdYhC9tCY4wnybX+VQrCYuy9ubkg69fCBxCONJOSLGfw0DWMffQ==}
+ dev: true
+
+ /table/6.8.0:
+ resolution: {integrity: sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==}
+ engines: {node: '>=10.0.0'}
+ dependencies:
+ ajv: 8.11.0
+ lodash.truncate: 4.4.2
+ slice-ansi: 4.0.0
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ dev: false
+
+ /tap-mocha-reporter/5.0.3:
+ resolution: {integrity: sha512-6zlGkaV4J+XMRFkN0X+yuw6xHbE9jyCZ3WUKfw4KxMyRGOpYSRuuQTRJyWX88WWuLdVTuFbxzwXhXuS2XE6o0g==}
+ engines: {node: '>= 8'}
+ hasBin: true
+ dependencies:
+ color-support: 1.1.3
+ debug: 4.3.4
+ diff: 4.0.2
+ escape-string-regexp: 2.0.0
+ glob: 7.2.3
+ tap-parser: 11.0.1
+ tap-yaml: 1.0.0
+ unicode-length: 2.1.0
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+
+ /tap-parser/11.0.1:
+ resolution: {integrity: sha512-5ow0oyFOnXVSALYdidMX94u0GEjIlgc/BPFYLx0yRh9hb8+cFGNJqJzDJlUqbLOwx8+NBrIbxCWkIQi7555c0w==}
+ engines: {node: '>= 8'}
+ hasBin: true
+ dependencies:
+ events-to-array: 1.1.2
+ minipass: 3.3.4
+ tap-yaml: 1.0.0
+ dev: true
+
+ /tap-yaml/1.0.0:
+ resolution: {integrity: sha512-Rxbx4EnrWkYk0/ztcm5u3/VznbyFJpyXO12dDBHKWiDVxy7O2Qw6MRrwO5H6Ww0U5YhRY/4C/VzWmFPhBQc4qQ==}
+ dependencies:
+ yaml: 1.10.2
+ dev: true
+
+ /tap/16.3.0:
+ resolution: {integrity: sha512-J9GffPUAbX6FnWbQ/jj7ktzd9nnDFP1fH44OzidqOmxUfZ1hPLMOvpS99LnDiP0H2mO8GY3kGN5XoY0xIKbNFA==}
+ engines: {node: '>=12'}
+ hasBin: true
+ peerDependencies:
+ coveralls: ^3.1.1
+ flow-remove-types: '>=2.112.0'
+ ts-node: '>=8.5.2'
+ typescript: '>=3.7.2'
+ peerDependenciesMeta:
+ coveralls:
+ optional: true
+ flow-remove-types:
+ optional: true
+ ts-node:
+ optional: true
+ typescript:
+ optional: true
+ dependencies:
+ chokidar: 3.5.3
+ findit: 2.0.0
+ foreground-child: 2.0.0
+ fs-exists-cached: 1.0.0
+ glob: 7.2.3
+ isexe: 2.0.0
+ istanbul-lib-processinfo: 2.0.3
+ jackspeak: 1.4.1
+ libtap: 1.4.0
+ minipass: 3.3.4
+ mkdirp: 1.0.4
+ nyc: 15.1.0
+ opener: 1.5.2
+ rimraf: 3.0.2
+ signal-exit: 3.0.7
+ source-map-support: 0.5.21
+ tap-mocha-reporter: 5.0.3
+ tap-parser: 11.0.1
+ tap-yaml: 1.0.0
+ tcompare: 5.0.7
+ which: 2.0.2
+ transitivePeerDependencies:
+ - supports-color
+ dev: true
+ bundledDependencies:
+ - ink
+ - treport
+ - '@types/react'
+ - '@isaacs/import-jsx'
+ - react
+
+ /tar/6.1.11:
+ resolution: {integrity: sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==}
+ engines: {node: '>= 10'}
+ dependencies:
+ chownr: 2.0.0
+ fs-minipass: 2.1.0
+ minipass: 3.3.4
+ minizlib: 2.1.2
+ mkdirp: 1.0.4
+ yallist: 4.0.0
+
+ /tcompare/5.0.7:
+ resolution: {integrity: sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==}
+ engines: {node: '>=10'}
+ dependencies:
+ diff: 4.0.2
+ dev: true
+
+ /tdigest/0.1.2:
+ resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==}
+ dependencies:
+ bintrees: 1.0.2
+ dev: false
+
+ /test-exclude/6.0.0:
+ resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==}
+ engines: {node: '>=8'}
+ dependencies:
+ '@istanbuljs/schema': 0.1.3
+ glob: 7.2.3
+ minimatch: 3.1.2
+
+ /text-table/0.2.0:
+ resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==}
+ dev: true
+
+ /then-queue/1.3.0:
+ resolution: {integrity: sha512-5p1q2me8gQFl+GBHoFh0M8Y56XUV0Xl44f71X4HAzCZZI92V1BiBhYDAD4qudC04ZxYoaYCFjOrRoPkO/qzEng==}
+ dependencies:
+ promise: 6.1.0
+
+ /then-request/6.0.2:
+ resolution: {integrity: sha512-3ZBiG7JvP3wbDzA9iNY5zJQcHL4jn/0BWtXIkagfz7QgOL/LqjCEOBQuJNZfu0XYnv5JhKh+cDxCPM4ILrqruA==}
+ engines: {node: '>=6.0.0'}
+ dependencies:
+ '@types/concat-stream': 1.6.1
+ '@types/form-data': 0.0.33
+ '@types/node': 8.10.66
+ '@types/qs': 6.9.7
+ caseless: 0.12.0
+ concat-stream: 1.6.2
+ form-data: 2.5.1
+ http-basic: 8.1.3
+ http-response-object: 3.0.2
+ promise: 8.2.0
+ qs: 6.11.0
+ dev: true
+
+ /thread-stream/2.2.0:
+ resolution: {integrity: sha512-rUkv4/fnb4rqy/gGy7VuqK6wE1+1DOCOWy4RMeaV69ZHMP11tQKZvZSip1yTgrKCMZzEMcCL/bKfHvSfDHx+iQ==}
+ dependencies:
+ real-require: 0.2.0
+
+ /tiny-lru/8.0.2:
+ resolution: {integrity: sha512-ApGvZ6vVvTNdsmt676grvCkUCGwzG9IqXma5Z07xJgiC5L7akUMof5U8G2JTI9Rz/ovtVhJBlY6mNhEvtjzOIg==}
+ engines: {node: '>=6'}
+
+ /tiny-warning/1.0.3:
+ resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==}
+ dev: true
+
+ /tinybench/2.1.5:
+ resolution: {integrity: sha512-ak+PZZEuH3mw6CCFOgf5S90YH0MARnZNhxjhjguAmoJimEMAJuNip/rJRd6/wyylHItomVpKTzZk9zrhTrQCoQ==}
+ dev: true
+
+ /tinypool/0.3.0:
+ resolution: {integrity: sha512-NX5KeqHOBZU6Bc0xj9Vr5Szbb1j8tUHIeD18s41aDJaPeC5QTdEhK0SpdpUrZlj2nv5cctNcSjaKNanXlfcVEQ==}
+ engines: {node: '>=14.0.0'}
+ dev: true
+
+ /tinyspy/1.0.2:
+ resolution: {integrity: sha512-bSGlgwLBYf7PnUsQ6WOc6SJ3pGOcd+d8AA6EUnLDDM0kWEstC1JIlSZA3UNliDXhd9ABoS7hiRBDCu+XP/sf1Q==}
+ engines: {node: '>=14.0.0'}
+ dev: true
+
+ /to-fast-properties/2.0.0:
+ resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /to-regex-range/5.0.1:
+ resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
+ engines: {node: '>=8.0'}
+ dependencies:
+ is-number: 7.0.0
+ dev: true
+
+ /toggle-selection/1.0.6:
+ resolution: {integrity: sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==}
+ dev: true
+
+ /toidentifier/1.0.1:
+ resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
+ engines: {node: '>=0.6'}
+
+ /tr46/0.0.3:
+ resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
+
+ /traverse/0.6.6:
+ resolution: {integrity: sha512-kdf4JKs8lbARxWdp7RKdNzoJBhGUcIalSYibuGyHJbmk40pOysQ0+QPvlkCOICOivDWU2IJo2rkrxyTK2AH4fw==}
+ dev: true
+
+ /trim-newlines/3.0.1:
+ resolution: {integrity: sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /trivial-deferred/1.0.1:
+ resolution: {integrity: sha512-dagAKX7vaesNNAwOc9Np9C2mJ+7YopF4lk+jE2JML9ta4kZ91Y6UruJNH65bLRYoUROD8EY+Pmi44qQWwXR7sw==}
+ dev: true
+
+ /tsconfig-paths/3.14.1:
+ resolution: {integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==}
+ dependencies:
+ '@types/json5': 0.0.29
+ json5: 1.0.1
+ minimist: 1.2.6
+ strip-bom: 3.0.0
+ dev: true
+
+ /tsd/0.23.0:
+ resolution: {integrity: sha512-dY4p7LbshRQ1hizr+xlbebgkfB0kT8wnQZW7LjBrOsmbws5mt1YYY4VSKoLYXyzYxObIOSQ3qns+tX8tP0Mz6g==}
+ engines: {node: '>=14.16'}
+ hasBin: true
+ dependencies:
+ '@tsd/typescript': 4.8.3
+ eslint-formatter-pretty: 4.1.0
+ globby: 11.1.0
+ meow: 9.0.0
+ path-exists: 4.0.0
+ read-pkg-up: 7.0.1
+ dev: true
+
+ /tsd/0.24.1:
+ resolution: {integrity: sha512-sD+s81/2aM4RRhimCDttd4xpBNbUFWnoMSHk/o8kC8Ek23jljeRNWjsxFJmOmYLuLTN9swRt1b6iXfUXTcTiIA==}
+ engines: {node: '>=14.16'}
+ hasBin: true
+ dependencies:
+ '@tsd/typescript': 4.8.3
+ eslint-formatter-pretty: 4.1.0
+ globby: 11.1.0
+ meow: 9.0.0
+ path-exists: 4.0.0
+ read-pkg-up: 7.0.1
+ dev: true
+
+ /tslib/2.4.0:
+ resolution: {integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==}
+
+ /type-check/0.4.0:
+ resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==}
+ engines: {node: '>= 0.8.0'}
+ dependencies:
+ prelude-ls: 1.2.1
+ dev: true
+
+ /type-detect/4.0.8:
+ resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==}
+ engines: {node: '>=4'}
+ dev: true
+
+ /type-fest/0.18.1:
+ resolution: {integrity: sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /type-fest/0.20.2:
+ resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /type-fest/0.21.3:
+ resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==}
+ engines: {node: '>=10'}
+ dev: true
+
+ /type-fest/0.3.1:
+ resolution: {integrity: sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==}
+ engines: {node: '>=6'}
+ dev: true
+
+ /type-fest/0.6.0:
+ resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /type-fest/0.8.1:
+ resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /typedarray-to-buffer/3.1.5:
+ resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==}
+ dependencies:
+ is-typedarray: 1.0.0
+ dev: true
+
+ /typedarray/0.0.6:
+ resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==}
+ dev: true
+
+ /typescript/4.8.3:
+ resolution: {integrity: sha512-goMHfm00nWPa8UvR/CPSvykqf6dVV8x/dp0c5mFTMTIu0u0FlGWRioyy7Nn0PGAdHxpJZnuO/ut+PpQ8UiHAig==}
+ engines: {node: '>=4.2.0'}
+ hasBin: true
+ dev: false
+
+ /ua-parser-js/1.0.2:
+ resolution: {integrity: sha512-00y/AXhx0/SsnI51fTc0rLRmafiGOM4/O+ny10Ps7f+j/b8p/ZY11ytMgznXkOVo4GQ+KwQG5UQLkLGirsACRg==}
+ dev: false
+
+ /uc.micro/1.0.6:
+ resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==}
+ dev: true
+
+ /unbox-primitive/1.0.2:
+ resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==}
+ dependencies:
+ call-bind: 1.0.2
+ has-bigints: 1.0.2
+ has-symbols: 1.0.3
+ which-boxed-primitive: 1.0.2
+ dev: true
+
+ /undici/5.10.0:
+ resolution: {integrity: sha512-c8HsD3IbwmjjbLvoZuRI26TZic+TSEe8FPMLLOkN1AfYRhdjnKBU6yL+IwcSCbdZiX4e5t0lfMDLDCqj4Sq70g==}
+ engines: {node: '>=12.18'}
+
+ /unicode-length/2.1.0:
+ resolution: {integrity: sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==}
+ dependencies:
+ punycode: 2.1.1
+ dev: true
+
+ /unique-filename/1.1.1:
+ resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==}
+ dependencies:
+ unique-slug: 2.0.2
+ optional: true
+
+ /unique-slug/2.0.2:
+ resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==}
+ dependencies:
+ imurmurhash: 0.1.4
+ optional: true
+
+ /update-browserslist-db/1.0.9_browserslist@4.21.4:
+ resolution: {integrity: sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==}
+ hasBin: true
+ peerDependencies:
+ browserslist: '>= 4.21.0'
+ dependencies:
+ browserslist: 4.21.4
+ escalade: 3.1.1
+ picocolors: 1.0.0
+ dev: true
+
+ /uri-js/4.4.1:
+ resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==}
+ dependencies:
+ punycode: 2.1.1
+
+ /url-parse/1.5.10:
+ resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==}
+ dependencies:
+ querystringify: 2.2.0
+ requires-port: 1.0.0
+ dev: true
+
+ /url/0.11.0:
+ resolution: {integrity: sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ==}
+ dependencies:
+ punycode: 1.3.2
+ querystring: 0.2.0
+ dev: true
+
+ /use-callback-ref/1.3.0_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-3FT9PRuRdbB9HfXhEq35u4oZkvpJ5kuYbpqhCfmiZyReuRgpnhDlbr2ZEnnuS0RrJAPn6l23xjFg9kpDM+Ms7w==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ '@types/react': ^16.8.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@types/react': 18.0.20
+ react: 18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /use-sidecar/1.1.2_w5j4k42lgipnm43s3brx6h3c34:
+ resolution: {integrity: sha512-epTbsLuzZ7lPClpz2TyryBfztm7m+28DlEv2ZCQ3MDr5ssiwyOwGH/e5F9CkfWjJ1t4clvI58yF822/GUkjjhw==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ '@types/react': ^16.9.0 || ^17.0.0 || ^18.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ dependencies:
+ '@types/react': 18.0.20
+ detect-node-es: 1.1.0
+ react: 18.2.0
+ tslib: 2.4.0
+ dev: true
+
+ /util-deprecate/1.0.2:
+ resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
+
+ /uuid/8.3.2:
+ resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
+ hasBin: true
+ dev: true
+
+ /v8-to-istanbul/9.0.1:
+ resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==}
+ engines: {node: '>=10.12.0'}
+ dependencies:
+ '@jridgewell/trace-mapping': 0.3.15
+ '@types/istanbul-lib-coverage': 2.0.4
+ convert-source-map: 1.8.0
+
+ /validate-npm-package-license/3.0.4:
+ resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==}
+ dependencies:
+ spdx-correct: 3.1.1
+ spdx-expression-parse: 3.0.1
+ dev: true
+
+ /vanilla-picker/2.12.1:
+ resolution: {integrity: sha512-2qrEP9VYylKXbyzXKsbu2dferBTvqnlsr29XjHwFE+/MEp0VNj6oEUESLDtKZ7DWzGdSv1x/+ujqFZF+KsO3cg==}
+ dependencies:
+ '@sphinxxxx/color-conversion': 2.2.2
+ dev: true
+
+ /vite/3.1.1:
+ resolution: {integrity: sha512-hgxQWev/AL7nWYrqByYo8nfcH9n97v6oFsta9+JX8h6cEkni7nHKP2kJleNYV2kcGhE8jsbaY1aStwPZXzPbgA==}
+ engines: {node: ^14.18.0 || >=16.0.0}
+ hasBin: true
+ peerDependencies:
+ less: '*'
+ sass: '*'
+ stylus: '*'
+ terser: ^5.4.0
+ peerDependenciesMeta:
+ less:
+ optional: true
+ sass:
+ optional: true
+ stylus:
+ optional: true
+ terser:
+ optional: true
+ dependencies:
+ esbuild: 0.15.7
+ postcss: 8.4.16
+ resolve: 1.22.1
+ rollup: 2.78.1
+ optionalDependencies:
+ fsevents: 2.3.2
+ dev: true
+
+ /vitest/0.23.2_happy-dom@6.0.4:
+ resolution: {integrity: sha512-kTBKp3ROPDkYC+x2zWt4znkDtnT08W1FQ6ngRFuqxpBGNuNVS+eWZKfffr8y2JGvEzZ9EzMAOcNaiqMj/FZqMw==}
+ engines: {node: '>=v14.16.0'}
+ hasBin: true
+ peerDependencies:
+ '@edge-runtime/vm': '*'
+ '@vitest/browser': '*'
+ '@vitest/ui': '*'
+ happy-dom: '*'
+ jsdom: '*'
+ peerDependenciesMeta:
+ '@edge-runtime/vm':
+ optional: true
+ '@vitest/browser':
+ optional: true
+ '@vitest/ui':
+ optional: true
+ happy-dom:
+ optional: true
+ jsdom:
+ optional: true
+ dependencies:
+ '@types/chai': 4.3.3
+ '@types/chai-subset': 1.3.3
+ '@types/node': 18.7.18
+ chai: 4.3.6
+ debug: 4.3.4
+ happy-dom: 6.0.4
+ local-pkg: 0.4.2
+ strip-literal: 0.4.1
+ tinybench: 2.1.5
+ tinypool: 0.3.0
+ tinyspy: 1.0.2
+ vite: 3.1.1
+ transitivePeerDependencies:
+ - less
+ - sass
+ - stylus
+ - supports-color
+ - terser
+ dev: true
+
+ /vscode-languageserver-types/3.17.2:
+ resolution: {integrity: sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==}
+ dev: true
+
+ /w3c-keyname/2.2.6:
+ resolution: {integrity: sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg==}
+ dev: true
+
+ /web-streams-polyfill/4.0.0-beta.3:
+ resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==}
+ engines: {node: '>= 14'}
+ dev: true
+
+ /webidl-conversions/3.0.1:
+ resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
+
+ /webidl-conversions/7.0.0:
+ resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /whatwg-encoding/2.0.0:
+ resolution: {integrity: sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==}
+ engines: {node: '>=12'}
+ dependencies:
+ iconv-lite: 0.6.3
+ dev: true
+
+ /whatwg-mimetype/3.0.0:
+ resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==}
+ engines: {node: '>=12'}
+ dev: true
+
+ /whatwg-url/5.0.0:
+ resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
+ dependencies:
+ tr46: 0.0.3
+ webidl-conversions: 3.0.1
+
+ /which-boxed-primitive/1.0.2:
+ resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==}
+ dependencies:
+ is-bigint: 1.0.4
+ is-boolean-object: 1.1.2
+ is-number-object: 1.0.7
+ is-string: 1.0.7
+ is-symbol: 1.0.4
+ dev: true
+
+ /which-module/2.0.0:
+ resolution: {integrity: sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==}
+ dev: true
+
+ /which/2.0.2:
+ resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
+ engines: {node: '>= 8'}
+ hasBin: true
+ dependencies:
+ isexe: 2.0.0
+
+ /why-is-node-running/2.2.2:
+ resolution: {integrity: sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==}
+ engines: {node: '>=8'}
+ hasBin: true
+ dependencies:
+ siginfo: 2.0.0
+ stackback: 0.0.2
+ dev: true
+
+ /wide-align/1.1.5:
+ resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==}
+ dependencies:
+ string-width: 4.2.3
+
+ /word-wrap/1.2.3:
+ resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==}
+ engines: {node: '>=0.10.0'}
+ dev: true
+
+ /wrap-ansi/6.2.0:
+ resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==}
+ engines: {node: '>=8'}
+ dependencies:
+ ansi-styles: 4.3.0
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+ dev: true
+
+ /wrap-ansi/7.0.0:
+ resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==}
+ engines: {node: '>=10'}
+ dependencies:
+ ansi-styles: 4.3.0
+ string-width: 4.2.3
+ strip-ansi: 6.0.1
+
+ /wrappy/1.0.2:
+ resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
+
+ /write-file-atomic/3.0.3:
+ resolution: {integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==}
+ dependencies:
+ imurmurhash: 0.1.4
+ is-typedarray: 1.0.0
+ signal-exit: 3.0.7
+ typedarray-to-buffer: 3.1.5
+ dev: true
+
+ /ws/8.8.1:
+ resolution: {integrity: sha512-bGy2JzvzkPowEJV++hF07hAD6niYSr0JzBNo/J29WsB57A2r7Wlc1UFcTR9IzrPvuNVO4B8LGqF8qcpsVOhJCA==}
+ engines: {node: '>=10.0.0'}
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: ^5.0.2
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
+
+ /xdg-basedir/4.0.0:
+ resolution: {integrity: sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==}
+ engines: {node: '>=8'}
+ dev: true
+
+ /xml-but-prettier/1.0.1:
+ resolution: {integrity: sha512-C2CJaadHrZTqESlH03WOyw0oZTtoy2uEg6dSDF6YRg+9GnYNub53RRemLpnvtbHDFelxMx4LajiFsYeR6XJHgQ==}
+ dependencies:
+ repeat-string: 1.6.1
+ dev: true
+
+ /xml/1.0.1:
+ resolution: {integrity: sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==}
+ dev: true
+
+ /xtend/4.0.2:
+ resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==}
+ engines: {node: '>=0.4'}
+
+ /y18n/4.0.3:
+ resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==}
+ dev: true
+
+ /y18n/5.0.8:
+ resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
+ engines: {node: '>=10'}
+
+ /yallist/2.1.2:
+ resolution: {integrity: sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==}
+ dev: false
+
+ /yallist/4.0.0:
+ resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
+
+ /yaml/1.10.2:
+ resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==}
+ engines: {node: '>= 6'}
+ dev: true
+
+ /yaml/2.1.1:
+ resolution: {integrity: sha512-o96x3OPo8GjWeSLF+wOAbrPfhFOGY0W00GNaxCDv+9hkcDJEnev1yh8S7pgHF0ik6zc8sQLuL8hjHjJULZp8bw==}
+ engines: {node: '>= 14'}
+ dev: false
+
+ /yargs-parser/18.1.3:
+ resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==}
+ engines: {node: '>=6'}
+ dependencies:
+ camelcase: 5.3.1
+ decamelize: 1.2.0
+ dev: true
+
+ /yargs-parser/20.2.9:
+ resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==}
+ engines: {node: '>=10'}
+
+ /yargs/15.4.1:
+ resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==}
+ engines: {node: '>=8'}
+ dependencies:
+ cliui: 6.0.0
+ decamelize: 1.2.0
+ find-up: 4.1.0
+ get-caller-file: 2.0.5
+ require-directory: 2.1.1
+ require-main-filename: 2.0.0
+ set-blocking: 2.0.0
+ string-width: 4.2.3
+ which-module: 2.0.0
+ y18n: 4.0.3
+ yargs-parser: 18.1.3
+ dev: true
+
+ /yargs/16.2.0:
+ resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==}
+ engines: {node: '>=10'}
+ dependencies:
+ cliui: 7.0.4
+ escalade: 3.1.1
+ get-caller-file: 2.0.5
+ require-directory: 2.1.1
+ string-width: 4.2.3
+ y18n: 5.0.8
+ yargs-parser: 20.2.9
+
+ /yocto-queue/0.1.0:
+ resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
+ engines: {node: '>=10'}
+
+ /zenscroll/4.0.2:
+ resolution: {integrity: sha512-jEA1znR7b4C/NnaycInCU6h/d15ZzCd1jmsruqOKnZP6WXQSMH3W2GL+OXbkruslU4h+Tzuos0HdswzRUk/Vgg==}
+ dev: true
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
new file mode 100644
index 0000000000..067a01bf0f
--- /dev/null
+++ b/pnpm-workspace.yaml
@@ -0,0 +1,3 @@
+packages:
+ # all packages in direct subdirs of packages/
+ - 'packages/*'
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 0000000000..4065820345
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,8 @@
+{
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
+ "extends": [
+ "config:base"
+ ],
+ "rangeStrategy": "update-lockfile",
+ "ignoreDeps": ["camelcase", "mysql", "swagger-ui-react"]
+}
diff --git a/scripts/copy-license.sh b/scripts/copy-license.sh
new file mode 100644
index 0000000000..10882314f2
--- /dev/null
+++ b/scripts/copy-license.sh
@@ -0,0 +1,13 @@
+
+FOLDERS=`ls -d packages/*`
+
+for i in $FOLDERS; do
+ echo "copying license to $i"
+ cp LICENSE $i
+ cp NOTICE $i
+ echo "adjusting SPDX in $i"
+ cd $i
+ node -e 'const fs = require("fs"); const pkg = JSON.parse(fs.readFileSync("package.json")); pkg.license = "Apache-2.0"; fs.writeFileSync("package.json", JSON.stringify(pkg, null, 2));'
+ cd ../..
+done
+
diff --git a/scripts/db-proxy.mjs b/scripts/db-proxy.mjs
new file mode 100644
index 0000000000..1456f98990
--- /dev/null
+++ b/scripts/db-proxy.mjs
@@ -0,0 +1,33 @@
+import net from 'net'
+import { pipeline } from 'stream'
+
+const addr = process.argv[2]
+console.log(addr)
+if (!addr) {
+ console.log('Usage: ')
+ process.exit(1)
+}
+
+const ports = [
+ 5432,
+ 3307,
+ 3306,
+ 3308
+]
+
+for (const port of ports) {
+ net.createServer(function (from) {
+ console.log('connecting to', port)
+ const to = net.createConnection({
+ host: addr,
+ port
+ })
+ pipeline(from, to, from, (err) => {
+ if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
+ console.error(err)
+ } else {
+ console.log('connection closed')
+ }
+ })
+ }).listen(port)
+}
diff --git a/scripts/gen-cli-doc.mjs b/scripts/gen-cli-doc.mjs
new file mode 100644
index 0000000000..e6aceeb9dd
--- /dev/null
+++ b/scripts/gen-cli-doc.mjs
@@ -0,0 +1,119 @@
+import { readFile, readdir, writeFile } from 'fs/promises'
+import { join } from 'desm'
+import path from 'path'
+
+let out = `---
+toc_max_heading_level: 4
+---
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+import TOCInline from '@theme/TOCInline';
+
+# Platformatic CLI
+
+## Installation and usage
+
+Install the Platformatic CLI as a dependency for your project:
+
+
+
+
+\`\`\`bash
+npm install platformatic
+\`\`\`
+
+
+
+
+\`\`\`bash
+yarn add platformatic
+\`\`\`
+
+
+
+
+\`\`\`bash
+pnpm add platformatic
+\`\`\`
+
+
+
+
+Once it's installed you can run it with:
+
+
+
+
+\`\`\`bash
+npx platformatic
+\`\`\`
+
+
+
+
+\`\`\`bash
+yarn platformatic
+\`\`\`
+
+
+
+
+\`\`\`bash
+pnpm platformatic
+\`\`\`
+
+
+
+
+:::info
+
+The \`platformatic\` package can be installed globally, but installing it as a
+project dependency ensures that everyone working on the project is using the
+same version of the Platformatic CLI.
+
+:::
+
+## Commands
+
+The Platformatic CLI provides the following commands:
+
+
+
+### help
+
+`
+
+const cliHelpDir = join(import.meta.url, '../packages/cli/help')
+const cliHelp = path.join(cliHelpDir, 'help.txt')
+
+const mainHelp = await readFile(cliHelp, 'utf8')
+
+out += `
+\`\`\`
+${mainHelp.trim()}
+\`\`\`
+
+
+### db
+
+\`\`\`bash
+platformatic db
+\`\`\`
+
+`
+
+const dbHelpsDir = join(import.meta.url, '../packages/db/help')
+const dbHelps = await readdir(dbHelpsDir)
+
+for (const dbHelp of dbHelps) {
+ const dbHelpPath = path.join(dbHelpsDir, dbHelp)
+ const content = await readFile(dbHelpPath)
+ out += `
+#### ${dbHelp.replace('.txt', '')}
+
+${content}
+ `
+}
+
+await writeFile(join(import.meta.url, '..', 'docs', 'reference', 'cli.md'), out)
diff --git a/scripts/postinstall.js b/scripts/postinstall.js
new file mode 100644
index 0000000000..6fa785d4f4
--- /dev/null
+++ b/scripts/postinstall.js
@@ -0,0 +1,8 @@
+'use strict'
+const { exec } = require('child_process')
+
+if (!process.env.CI) {
+ console.log('Running dashboard:build script')
+ const child = exec('pnpm run dashboard:build')
+ child.stdout.pipe(process.stdout)
+}
diff --git a/scripts/sync-version.sh b/scripts/sync-version.sh
new file mode 100755
index 0000000000..1960df776c
--- /dev/null
+++ b/scripts/sync-version.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+VERSION=`node -e "console.log(require('./package.json').version)"`
+
+echo Synchronizing all modules to version $VERSION
+
+for FILE in `ls packages/*/package.json`
+do
+ echo editing $FILE
+ node -e "const meta = require('./$FILE'); meta.version = '$VERSION'; console.log(JSON.stringify(meta, null, 2))" > $FILE.tmp
+ mv $FILE.tmp $FILE
+done