From c3fc8974f3ff2bf49c2aadb9250b8c6b9e6aaedd Mon Sep 17 00:00:00 2001 From: Jarrod Lowe Date: Thu, 15 Aug 2024 18:50:14 +1200 Subject: [PATCH] Add a mutation to graphql --- .codacy.yml | 4 + .gitignore | 1 + Makefile | 14 +- README.md | 4 +- graphql/graphql.mk | 20 + graphql/mutation/createGame/appsync.js | 38 + graphql/mutation/createGame/appsync.ts | 48 ++ graphql/node_modules/.package-lock.json | 13 + .../@aws-appsync/utils/CHANGELOG.md | 137 ++++ .../node_modules/@aws-appsync/utils/LICENSE | 175 ++++ .../@aws-appsync/utils/LICENSE-THIRD-PARTY | 0 .../node_modules/@aws-appsync/utils/README.md | 39 + .../@aws-appsync/utils/dynamodb.d.ts | 1 + .../@aws-appsync/utils/dynamodb.js | 1 + .../node_modules/@aws-appsync/utils/index.js | 1 + .../utils/lib/dynamodb-helpers.d.ts | 756 ++++++++++++++++++ .../utils/lib/dynamodb-helpers.js | 3 + .../utils/lib/dynamodb-helpers.js.map | 1 + .../utils/lib/dynamodb-utils.d.ts | 309 +++++++ .../@aws-appsync/utils/lib/dynamodb-utils.js | 15 + .../utils/lib/dynamodb-utils.js.map | 1 + .../@aws-appsync/utils/lib/http-utils.d.ts | 38 + .../@aws-appsync/utils/lib/http-utils.js | 3 + .../@aws-appsync/utils/lib/http-utils.js.map | 1 + .../@aws-appsync/utils/lib/index.d.ts | 482 +++++++++++ .../@aws-appsync/utils/lib/index.js | 25 + .../@aws-appsync/utils/lib/index.js.map | 1 + .../@aws-appsync/utils/lib/math-utils.d.ts | 41 + .../@aws-appsync/utils/lib/math-utils.js | 3 + .../@aws-appsync/utils/lib/math-utils.js.map | 1 + .../@aws-appsync/utils/lib/rds-helpers.d.ts | 376 +++++++++ .../@aws-appsync/utils/lib/rds-helpers.js | 3 + .../@aws-appsync/utils/lib/rds-helpers.js.map | 1 + .../@aws-appsync/utils/lib/rds-utils.d.ts | 120 +++ .../@aws-appsync/utils/lib/rds-utils.js | 3 + .../@aws-appsync/utils/lib/rds-utils.js.map | 1 + .../utils/lib/resolver-return-types.d.ts | 349 ++++++++ .../utils/lib/resolver-return-types.js | 3 + .../utils/lib/resolver-return-types.js.map | 1 + .../@aws-appsync/utils/lib/string-utils.d.ts | 13 + .../@aws-appsync/utils/lib/string-utils.js | 3 + .../utils/lib/string-utils.js.map | 1 + .../utils/lib/subscription-filter-types.d.ts | 38 + .../utils/lib/subscription-filter-types.js | 3 + .../lib/subscription-filter-types.js.map | 1 + .../@aws-appsync/utils/lib/time-utils.d.ts | 85 ++ .../@aws-appsync/utils/lib/time-utils.js | 3 + .../@aws-appsync/utils/lib/time-utils.js.map | 1 + .../utils/lib/transform-utils.d.ts | 244 ++++++ .../@aws-appsync/utils/lib/transform-utils.js | 3 + .../utils/lib/transform-utils.js.map | 1 + .../@aws-appsync/utils/lib/type-utils.d.ts | 8 + .../@aws-appsync/utils/lib/type-utils.js | 3 + .../@aws-appsync/utils/lib/type-utils.js.map | 1 + .../@aws-appsync/utils/lib/xml-utils.d.ts | 46 ++ .../@aws-appsync/utils/lib/xml-utils.js | 3 + .../@aws-appsync/utils/lib/xml-utils.js.map | 1 + .../@aws-appsync/utils/package.json | 31 + .../node_modules/@aws-appsync/utils/rds.d.ts | 1 + .../node_modules/@aws-appsync/utils/rds.js | 1 + graphql/package-lock.json | 27 + graphql/package.json | 7 + graphql/schema.graphql | 10 +- .../wildsea-dev/.terraform.lock.hcl | 38 + terraform/environment/wildsea-dev/plan | Bin 20576 -> 0 bytes terraform/module/iac-roles/policy.tf | 10 +- terraform/module/state-bucket/s3.tf | 31 +- terraform/module/wildsea/cognito.tf | 2 +- terraform/module/wildsea/graphql.tf | 59 ++ 69 files changed, 3691 insertions(+), 17 deletions(-) create mode 100644 .codacy.yml create mode 100644 graphql/graphql.mk create mode 100644 graphql/mutation/createGame/appsync.js create mode 100644 graphql/mutation/createGame/appsync.ts create mode 100644 graphql/node_modules/.package-lock.json create mode 100644 graphql/node_modules/@aws-appsync/utils/CHANGELOG.md create mode 100644 graphql/node_modules/@aws-appsync/utils/LICENSE create mode 100644 graphql/node_modules/@aws-appsync/utils/LICENSE-THIRD-PARTY create mode 100644 graphql/node_modules/@aws-appsync/utils/README.md create mode 100644 graphql/node_modules/@aws-appsync/utils/dynamodb.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/dynamodb.js create mode 100644 graphql/node_modules/@aws-appsync/utils/index.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/http-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/http-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/http-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/index.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/index.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/index.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/math-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/math-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/math-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/string-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/string-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/string-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/time-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/time-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/time-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/transform-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/type-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/type-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/type-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/xml-utils.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js create mode 100644 graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js.map create mode 100644 graphql/node_modules/@aws-appsync/utils/package.json create mode 100644 graphql/node_modules/@aws-appsync/utils/rds.d.ts create mode 100644 graphql/node_modules/@aws-appsync/utils/rds.js create mode 100644 graphql/package-lock.json create mode 100644 graphql/package.json delete mode 100644 terraform/environment/wildsea-dev/plan diff --git a/.codacy.yml b/.codacy.yml new file mode 100644 index 00000000..28bf9da6 --- /dev/null +++ b/.codacy.yml @@ -0,0 +1,4 @@ +--- +exclude_paths: + - "graphql/mutation/*/appsync.js" + - "graphql/query/*/appsync.js" diff --git a/.gitignore b/.gitignore index 7551e3cb..4a96fb25 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ override.tf.json # Ignore CLI configuration files .terraformrc terraform.rc + .validate .apply plan.tfplan diff --git a/Makefile b/Makefile index 1c0e8f87..b3f87132 100644 --- a/Makefile +++ b/Makefile @@ -10,6 +10,8 @@ RW_ROLE = arn:aws:iam::$(ACCOUNT_ID):role/GitHubAction-Wildsea-rw-dev all: $(TERRAFOM_VALIDATE) +include graphql/graphql.mk + .PHONY: terraform-format terraform-format: $(addprefix terraform-format-environment-,$(TERRAFORM_ENVIRONMENTS)) $(addprefix terraform-format-module-,$(TERRAFORM_MODULES)) @true @@ -35,13 +37,16 @@ terraform/environment/aws-dev/.apply: terraform/environment/aws-dev/*.tf terrafo ./terraform/environment/aws-dev/deploy.sh $(ACCOUNT_ID) dev touch $@ -terraform/environment/wildsea-dev/plan.tfplan: terraform/environment/wildsea-dev/*.tf terraform/module/wildsea/*.tf terraform/environment/wildsea-dev/.terraform +terraform/environment/wildsea-dev/plan.tfplan: terraform/environment/wildsea-dev/*.tf terraform/module/wildsea/*.tf terraform/environment/wildsea-dev/.terraform $(GRAPHQL) cd terraform/environment/wildsea-dev ; ../../../scripts/run-as.sh $(RO_ROLE) \ terraform plan -out=./plan.tfplan -terraform/environment/wildsea-dev/.apply: terraform/environment/wildsea-dev/plan.tfplan +terraform/environment/wildsea-dev/.apply: terraform/environment/wildsea-dev/plan.tfplan $(GRAPHQL) cd terraform/environment/wildsea-dev ; ../../../scripts/run-as.sh $(RW_ROLE) \ - terraform apply ./plan.tfplan + terraform apply ./plan.tfplan ; \ + status=$$? ; \ + rm -f $< ; \ + [ "$$status" -eq 0 ] touch $@ terraform/environment/wildsea-dev/.terraform: terraform/environment/wildsea-dev/*.tf terraform/module/wildsea/*.tf @@ -54,3 +59,6 @@ terraform/environment/wildsea-dev/.terraform: terraform/environment/wildsea-dev/ clean: rm -f terraform/environment/*/.validate rm -f terraform/environment/*/plan.tfplan + rm -f graphql/mutation/*/appsync.js + rm -f graphql/query/*/appsync.js + rm -rf graphql/node_modules diff --git a/README.md b/README.md index f7650631..fcaaad67 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Wildsea companion app * Log into Codacy, and connect the repo * Configure the rule to maximum * In Codacy, in the repo, go to code patterns, and edit the coding standard: - * Set the languages to: CSS, Go, JSON, Javascript, Markdown, Python, Shell, Terraform, Typescript, XML, YAML + * Set the languages to: CSS, Go, JSON, Markdown, Python, Shell, Terraform, Typescript, XML, YAML * Select every tool that is: * NOT client-side * NOT deprecated @@ -86,6 +86,8 @@ If you do not set the secret, then Cognito will be used as the identity source. ## Development Environment +Install esbuild with `sudo npm install -g --save-exact --save-dev esbuild` + After having set up the AWS Account, use `AWS_PROFILE= make dev` to deploy a development version. If this is a different AWS Account from the real deployment, you will need to create an S3 bucket for the state, in the same way diff --git a/graphql/graphql.mk b/graphql/graphql.mk new file mode 100644 index 00000000..93755da3 --- /dev/null +++ b/graphql/graphql.mk @@ -0,0 +1,20 @@ +graphql/%/appsync.js: graphql/node_modules graphql/%/appsync.ts + cd graphql && \ + esbuild $*/*.ts \ + --bundle \ + --external:"@aws-appsync/utils" \ + --format=esm \ + --platform=node \ + --target=esnext \ + --sourcemap=inline \ + --sources-content=false \ + --outdir=$* + +graphql/node_modules: graphql/package.json + cd graphql && npm install + +GRAPHQL := $(patsubst %.ts,%.js,$(wildcard graphql/*/*/appsync.ts)) + +.PHONY: graphql +graphql: $(GRAPHQL) + echo $(GRAPHQL) diff --git a/graphql/mutation/createGame/appsync.js b/graphql/mutation/createGame/appsync.js new file mode 100644 index 00000000..f57a83e6 --- /dev/null +++ b/graphql/mutation/createGame/appsync.js @@ -0,0 +1,38 @@ +// mutation/createGame/appsync.ts +import { util } from "@aws-appsync/utils"; +function request(context) { + if (!context.identity) { + util.error("Unauthorized: Identity information is missing."); + } + const identity = context.identity; + if (!identity.sub) { + util.error("Unauthorized: User ID is missing."); + } + const input = context.arguments.input; + const id = util.autoId(); + const timestamp = util.time.nowISO8601(); + return { + operation: "PutItem", + key: util.dynamodb.toMapValues({ PK: "GAME#" + id, SK: "GAME" }), + attributeValues: util.dynamodb.toMapValues({ + name: input.name, + description: input.description, + id, + fireflyUserId: identity.sub, + createdAt: timestamp, + updatedAt: timestamp + }) + }; +} +function response(context) { + if (context.error) { + util.appendError(context.error.message, context.error.type, context.result); + return; + } + return context.result; +} +export { + request, + response +}; +//# sourceMappingURL=data:application/json;base64,ewogICJ2ZXJzaW9uIjogMywKICAic291cmNlcyI6IFsiYXBwc3luYy50cyJdLAogICJtYXBwaW5ncyI6ICI7QUFBQSxTQUFTLFlBQStGO0FBY2pHLFNBQVMsUUFBUSxTQUFzRTtBQUMxRixNQUFJLENBQUMsUUFBUSxVQUFVO0FBQ25CLFNBQUssTUFBTSxnREFBMEQ7QUFBQSxFQUN6RTtBQUVBLFFBQU0sV0FBVyxRQUFRO0FBQ3pCLE1BQUksQ0FBQyxTQUFTLEtBQUs7QUFDZixTQUFLLE1BQU0sbUNBQTZDO0FBQUEsRUFDNUQ7QUFFQSxRQUFNLFFBQVEsUUFBUSxVQUFVO0FBQ2hDLFFBQU0sS0FBSyxLQUFLLE9BQU87QUFDdkIsUUFBTSxZQUFZLEtBQUssS0FBSyxXQUFXO0FBQ3ZDLFNBQU87QUFBQSxJQUNILFdBQVc7QUFBQSxJQUNYLEtBQUssS0FBSyxTQUFTLFlBQVksRUFBRSxJQUFJLFVBQVEsSUFBSSxJQUFJLE9BQU8sQ0FBQztBQUFBLElBQzdELGlCQUFpQixLQUFLLFNBQVMsWUFBWTtBQUFBLE1BQ3ZDLE1BQU0sTUFBTTtBQUFBLE1BQ1osYUFBYSxNQUFNO0FBQUEsTUFDbkI7QUFBQSxNQUNBLGVBQWUsU0FBUztBQUFBLE1BQ3hCLFdBQVc7QUFBQSxNQUNYLFdBQVc7QUFBQSxJQUNmLENBQUM7QUFBQSxFQUNMO0FBQ0o7QUFFTyxTQUFTLFNBQVMsU0FBMkI7QUFDaEQsTUFBSSxRQUFRLE9BQU87QUFDZixTQUFLLFlBQVksUUFBUSxNQUFNLFNBQVMsUUFBUSxNQUFNLE1BQU0sUUFBUSxNQUFNO0FBQzFFO0FBQUEsRUFDSjtBQUNBLFNBQU8sUUFBUTtBQUNuQjsiLAogICJuYW1lcyI6IFtdCn0K diff --git a/graphql/mutation/createGame/appsync.ts b/graphql/mutation/createGame/appsync.ts new file mode 100644 index 00000000..67b92a83 --- /dev/null +++ b/graphql/mutation/createGame/appsync.ts @@ -0,0 +1,48 @@ +import { util, Context, DynamoDBPutItemRequest, AppSyncIdentityCognito, PutItemInputAttributeMap } from '@aws-appsync/utils'; + +/** + * A CreateGameInput creates a Game. + * They are stored in DynamoDB with a PK of `GAME#` and an SK of `GAME`. + * The ID is a UUID + * The fireflyUserId is the Cognito ID of the user + */ + +interface CreateGameInput { + name: string; + description?: string; +} + +export function request(context: Context<{ input: CreateGameInput }>): DynamoDBPutItemRequest { + if (!context.identity) { + util.error('Unauthorized: Identity information is missing.' as string); + } + + const identity = context.identity as AppSyncIdentityCognito; + if (!identity.sub) { + util.error('Unauthorized: User ID is missing.' as string); + } + + const input = context.arguments.input; + const id = util.autoId(); + const timestamp = util.time.nowISO8601(); + return { + operation: 'PutItem', + key: util.dynamodb.toMapValues({ PK: "GAME#"+id, SK: 'GAME' }), + attributeValues: util.dynamodb.toMapValues({ + name: input.name, + description: input.description, + id: id, + fireflyUserId: identity.sub, + createdAt: timestamp, + updatedAt: timestamp + }) as PutItemInputAttributeMap + }; +} + +export function response(context: Context): unknown { + if (context.error) { + util.appendError(context.error.message, context.error.type, context.result); + return; + } + return context.result; +} diff --git a/graphql/node_modules/.package-lock.json b/graphql/node_modules/.package-lock.json new file mode 100644 index 00000000..715e477d --- /dev/null +++ b/graphql/node_modules/.package-lock.json @@ -0,0 +1,13 @@ +{ + "name": "graphql", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "node_modules/@aws-appsync/utils": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@aws-appsync/utils/-/utils-1.9.0.tgz", + "integrity": "sha512-TAZNHiMpJKafrur6sE0Ou48gbuRL1oJXUDfrnBbYAXDVF3+mzD/0QCRp91F/dcAUbbNkFaq8pAwLEBTmpvmm9g==" + } + } +} diff --git a/graphql/node_modules/@aws-appsync/utils/CHANGELOG.md b/graphql/node_modules/@aws-appsync/utils/CHANGELOG.md new file mode 100644 index 00000000..61813f74 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/CHANGELOG.md @@ -0,0 +1,137 @@ +# Change Log + +All notable changes to this project will be documented in this file. +See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. + +# [1.9.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.8.0...v1.9.0) (2024-07-17) + + +### Features + +* add from field ([e907050](https://github.com/aws/aws-appsync-toolkit/commit/e90705070fa225409109a435507a11d0fc2d2031)) + + + + + +# [1.8.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.7.0...v1.8.0) (2024-05-30) + + +### Bug Fixes + +* Move BatchGetItem projection to table ([f14122b](https://github.com/aws/aws-appsync-toolkit/commit/f14122bad14d44cb86b1a9d58ad756e1da2d1550)) + + +### Features + +* add invocationType to LambdaRequest for async lambda ([6978e0f](https://github.com/aws/aws-appsync-toolkit/commit/6978e0feb67bb0dc7475634e9618ef7309cac779)) + + + + + +# [1.7.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.6.0...v1.7.0) (2024-02-06) + + +### Features + +* extend type definition for env variables ([babbc0f](https://github.com/aws/aws-appsync-toolkit/commit/babbc0fe366916a105dc97e476a06800de1a2f84)) + + + + + +# [1.6.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.5.0...v1.6.0) (2023-11-27) + + +### Features + +* add rds helper ([#189](https://github.com/aws/aws-appsync-toolkit/issues/189)) ([45f5575](https://github.com/aws/aws-appsync-toolkit/commit/45f557520ad324479c0d1c92e8d6d0e0eee0d12a)) + + + + + +# [1.5.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.4.0...v1.5.0) (2023-09-18) + + +### Features + +* add projection expression support ([#154](https://github.com/aws/aws-appsync-toolkit/issues/154)) ([5256d72](https://github.com/aws/aws-appsync-toolkit/commit/5256d7274e3e9ffbda5dcbb74f8d98436c49f6ac)) + + + + + +## [1.3.1](https://github.com/aws/aws-appsync-toolkit/compare/v1.3.0...v1.3.1) (2023-08-30) + +**Note:** Version bump only for package @aws-appsync/utils + + + + + +# [1.3.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.6...v1.3.0) (2023-08-30) + + +### Features + +* Add type definition for DynamoDB helper ([#118](https://github.com/aws/aws-appsync-toolkit/issues/118)) ([a3a38d1](https://github.com/aws/aws-appsync-toolkit/commit/a3a38d10972ed332d7073c5b3d928cc8db90fe73)) + + + + + +## [1.2.6](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.5...v1.2.6) (2023-08-15) + +### Bug Fixes + +- change the return type of toElasticsearchQueryDSL to string ([#128](https://github.com/aws/aws-appsync-toolkit/issues/128)) ([a327297](https://github.com/aws/aws-appsync-toolkit/commit/a32729726fba4b9ec4335c446872c0d9aa142e15)) +- make open search request params optional ([#117](https://github.com/aws/aws-appsync-toolkit/issues/117)) ([4a764ef](https://github.com/aws/aws-appsync-toolkit/commit/4a764ef0a1d74e981d7181eb9cc57c3acdad0ef1)) +- return never for function that throw errors ([#122](https://github.com/aws/aws-appsync-toolkit/issues/122)) ([6c350e3](https://github.com/aws/aws-appsync-toolkit/commit/6c350e306e7e6be083e7e14a83ddfa667c27edf4)) +- update generics to support autocompletion for nullable fields ([#129](https://github.com/aws/aws-appsync-toolkit/issues/129)) ([960209f](https://github.com/aws/aws-appsync-toolkit/commit/960209fa08f81070c6e326f62208fff68f2f8f7d)) + +## [1.2.5](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.4...v1.2.5) (2023-04-17) + +### Bug Fixes + +- use shallow filter type to prevent recursion ([a98e5f8](https://github.com/aws/aws-appsync-toolkit/commit/a98e5f880051f7099a4d5b7adbf63d499d47f530)) + +## [1.2.4](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.3...v1.2.4) (2023-04-06) + +**Note:** Version bump only for package @aws-appsync/utils + +## [1.2.3](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.2...v1.2.3) (2023-04-06) + +### Bug Fixes + +- change default type in `toSubscriptionFilter` ([325e1e2](https://github.com/aws/aws-appsync-toolkit/commit/325e1e2ce0886ecc730ed410920dd464d8bcadfd)) + +## [1.2.2](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.1...v1.2.2) (2023-04-04) + +### Bug Fixes + +- update the return type of transform subscription filter ([#102](https://github.com/aws/aws-appsync-toolkit/issues/102)) ([db11c5d](https://github.com/aws/aws-appsync-toolkit/commit/db11c5ddeb465f649ff00589a4b9c339233087c1)) + +## [1.2.1](https://github.com/aws/aws-appsync-toolkit/compare/v1.2.0...v1.2.1) (2023-03-31) + +**Note:** Version bump only for package @aws-appsync/utils + +# [1.2.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.1.0...v1.2.0) (2023-03-31) + +### Bug Fixes + +- rename the DynamoDBGetItem to DynamoDBGetItemRequest ([#68](https://github.com/aws/aws-appsync-toolkit/issues/68)) ([09adf56](https://github.com/aws/aws-appsync-toolkit/commit/09adf566d8c0f3cbb258bc767aa11241ebdfce06)) +- update order of generics ([#67](https://github.com/aws/aws-appsync-toolkit/issues/67)) ([cc74b50](https://github.com/aws/aws-appsync-toolkit/commit/cc74b502dc3d8603480f1d2af8098ee22356638d)) + +### Features + +- add runtime namespace and declare globals ([9705388](https://github.com/aws/aws-appsync-toolkit/commit/9705388e4c055ec5f25684f5c946f8bd20f2df4b)) +- add typedefinition to utils ([#73](https://github.com/aws/aws-appsync-toolkit/issues/73)) ([172455c](https://github.com/aws/aws-appsync-toolkit/commit/172455ccf8cd3413f441abeefd81612e16b78a5a)) +- add typescript generic support to Context ([#56](https://github.com/aws/aws-appsync-toolkit/issues/56)) ([2ccb9b8](https://github.com/aws/aws-appsync-toolkit/commit/2ccb9b85d0333442e4ec34a6539f23f260421088)) + +# [1.1.0](https://github.com/aws/aws-appsync-toolkit/compare/v1.0.1...v1.1.0) (2023-01-31) + +### Bug Fixes + +- expose base64 encode and decode functions in util ([5a2b737](https://github.com/aws/aws-appsync-toolkit/commit/5a2b73792df61bc92e3013f6ab9a5129ddf86629)) diff --git a/graphql/node_modules/@aws-appsync/utils/LICENSE b/graphql/node_modules/@aws-appsync/utils/LICENSE new file mode 100644 index 00000000..67db8588 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/graphql/node_modules/@aws-appsync/utils/LICENSE-THIRD-PARTY b/graphql/node_modules/@aws-appsync/utils/LICENSE-THIRD-PARTY new file mode 100644 index 00000000..e69de29b diff --git a/graphql/node_modules/@aws-appsync/utils/README.md b/graphql/node_modules/@aws-appsync/utils/README.md new file mode 100644 index 00000000..d517b140 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/README.md @@ -0,0 +1,39 @@ +# Type definition for @aws-appsync/utils + +This project contains utility function definitions and type definitions for working with AWS AppSync Resolvers written in JavaScript using the APPSYNC_JS runtime. This includes the `util` and `extensions` utilities. For more information on these utilities, see the AppSync [documentation](https://docs.aws.amazon.com/appsync/latest/devguide/resolver-util-reference-js.html). + +## Usage + +Install the type definition by running + +```bash +npm install @aws-appsync/utils +``` + +In your AppSync function code definition: + +```js +import { util } from '@aws-appsync/utils'; +import * as ddb from '@aws-appsync/utils/dynamodb'; + +/** + * Creates a new item in a DynamoDB table + * @param ctx contextual information about the request + */ +export function request(ctx) { + const item = ctx.arguments.input; + return ddb.put({ key: { id: util.autoId() }, item }); +} + +/** + * Returns the result + * @param ctx contextual information about the request + */ +export function response(ctx) { + const { error, result } = ctx; + if (error) { + return util.appendError(error.message, error.type, result); + } + return ctx.result; +} +``` diff --git a/graphql/node_modules/@aws-appsync/utils/dynamodb.d.ts b/graphql/node_modules/@aws-appsync/utils/dynamodb.d.ts new file mode 100644 index 00000000..754d6ba4 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/dynamodb.d.ts @@ -0,0 +1 @@ +export * from './lib/dynamodb-helpers'; \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/dynamodb.js b/graphql/node_modules/@aws-appsync/utils/dynamodb.js new file mode 100644 index 00000000..754d6ba4 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/dynamodb.js @@ -0,0 +1 @@ +export * from './lib/dynamodb-helpers'; \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/index.js b/graphql/node_modules/@aws-appsync/utils/index.js new file mode 100644 index 00000000..11aece60 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/index.js @@ -0,0 +1 @@ +export * from './lib/index'; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.d.ts new file mode 100644 index 00000000..bc18d22b --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.d.ts @@ -0,0 +1,756 @@ +import { DynamoDBScanRequest, DynamoDBQueryRequest, DynamoDBPutItemRequest, DynamoDBSyncRequest, DynamoDBDeleteItemRequest, DynamoDBUpdateItemRequest, DynamoDBGetItemRequest } from './resolver-return-types'; +import { DynamoDBFilterObject, DynamoDBEqualityOperators } from './transform-utils'; +import { Decrement, Prettify } from './type-utils'; +type DynamoDBSelectAttributes = 'ALL_ATTRIBUTES' | 'ALL_PROJECTED_ATTRIBUTES' | 'SPECIFIC_ATTRIBUTES'; +type DynamoDBKeyPrimitives = string | number; +type DynamoDBPrimitiveFields = { + [K in keyof T]: T[K] extends DynamoDBKeyPrimitives ? K : never; +}[keyof T]; +export type DynamoDBKey = T extends Record ? Prettify>> : {}; +type DynamoDBKeyConditionScalarNumberOperators = DynamoDBEqualityOperators & { + le?: T; + lt?: T; + ge?: T; + gt?: T; +}; +type DynamoDBKeyConditionNumberOperators = DynamoDBKeyConditionScalarNumberOperators & { + between?: [T, T]; +}; +type DynamoDBKeyConditionStringOperators = DynamoDBKeyConditionNumberOperators & { + beginsWith?: T; +}; +type DynamoDBKeyConditionExpressionOperation = TOperand extends number ? DynamoDBKeyConditionNumberOperators> : TOperand extends string ? DynamoDBKeyConditionStringOperators> : never; +type DynamoDBKeyCondition = T extends Record ? Prettify : never; +}, DynamoDBPrimitiveFields>> : {}; +type KeyStringLiteral = `${Literal & string}`; +type DynamoDBProjectionKey = (Depth extends never ? {} : { + [K in keyof T]: NonNullable extends Record ? `${KeyStringLiteral}${KeyStringLiteral}` | DynamoDBProjectionKey, `${KeyStringLiteral}${KeyStringLiteral}.`, Decrement> : `${KeyStringLiteral}${KeyStringLiteral}`; +})[keyof T]; +export type DynamoDBProjection = T extends Record ? DynamoDBProjectionKey> : string; +export type ScanInput = { + /** + * optional name of the index to scan + */ + index?: string | null; + /** + * optional max number of results to return + */ + limit?: number | null; + /** + * optional filter to apply to the results after retrieving it from the table + */ + filter?: DynamoDBFilterObject | null; + /** + * Optional pagination token to continue a previous query. This would have been obtained from a previous query + */ + nextToken?: string | null; + /** + * an optional boolean to indicate consistent reads when querying DynamoDB defaults to false. + */ + consistentRead?: boolean | null; + /** + * optional list of attributes to return from DynamoDB + */ + projection?: DynamoDBProjection[] | string[]; + totalSegments?: number; + segment?: number; + /** + * attributes to return from DynamoDB. By default, the AWS AppSync DynamoDB resolver only returns + * attributes that are projected into the index. The supported values are + * `ALL_ATTRIBUTES` + * - Returns all of the item attributes from the specified table or index. + * If you query a local secondary index, DynamoDB fetches the entire item from + * the parent table for each matching item in the index. If the index is + * configured to project all item attributes, all of the data can be obtained from + * the local secondary index and no fetching is required. + * + * `ALL_PROJECTED_ATTRIBUTES` + * - Returns all attributes that have been projected into the index. + * If the index is configured to project all attributes, this return value is + * equivalent to specifying `ALL_ATTRIBUTES`. + * + * `SPECIFIC_ATTRIBUTES` + * - Returns only the attributes listed in `ProjectionExpression`. + * This return value is equivalent to specifying `ProjectionExpression` without + * specifying any value for `AttributesToGet`. + * + */ + select?: DynamoDBSelectAttributes; + /** + * optional boolean to indicate whether the query is performed in ascending or descending order. + * + * @default true + */ + scanIndexForward?: boolean | null; +}; +export type QueryInput = ScanInput & { + /** + * specify a key condition that describes items to query. For a given index the + * the condition for partition key should be an equality and sort key can be + * a comparison or a beginsWith (when its a string). Only number and string + * types are supported for partition key and sort key. + * + * @example + * If there is an UserType + * ```typescript + * type UserType = { + * id: string; + * name: string; + * age: number; + * isVerified: boolean; + * friendsIds: string[] + * } + * ``` + * The query can only include the following fields: + * `id`, `name` and `age` + * ```typescript + * + * const query: QueryInput = { + * query: { + * name: { eq: 'John' }, + * age: { gt: 20 }, + * } + * } + ``` + * @See [DynamoDB Key Conditions](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.KeyConditions.html) for details + */ + query: DynamoDBKeyCondition>; +}; +export type RemoveInput = { + /** + * A required parameter that specifies the key of the item in DynamoDB that is being removed. + * DynamoDB items may have a single hash key, or a hash key and sort key. + * @example + * If a table user has only hash key with user id then key would look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * } + * const key: DynamoDBKey = { + * id: 1, + * } + * ``` + * + * If the table user has a hash key (id) and sort key(name) then key would + * look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * friendsIds: string[] + * } + * const key: DynamoDBKey = { + * id: 1, + * name: 'XXXXXXXXXX', + * } + * ``` + */ + key: DynamoDBKey; + /** + * When you remove an object in DynamoDB by using the remove, you can optionally + * specify a condition expression that controls whether the request should succeed + * or not, based on the state of the object already in DynamoDB before the operation + * is performed. + * + * @See [Condition expression](https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-condition-expressions) + * @See [Condition expression syntax](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html) + * @example + * The following is a DeleteItem expression containing a condition that allows the operation succeed only if the owner of the document matches + * the user making the request. + * ```typescript + * type Task = { + * id: string; + * title: string; + * description: string; + * owner: string; + * isComplete: boolean; + * } + * const condition: DynamoDBFilterObject = { + * owner: { eq: 'XXXXXXXXXXXXXXXX' }, + * } + * remove({ + * key: { + * id: 'XXXXXXXXXXXXXXXX', + * }, + * condition, + * }); + ``` + */ + condition?: DynamoDBFilterObject; + /** + * When enabled, customPartitionKey value modifies the format of the ds_sk and ds_pk + * records used by the delta sync table when versioning has been enabled. + * When enabled, the processing of the populateIndexFields entry is also enabled. + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + customPartitionKey?: string; + /** + * A boolean value that, when enabled along with the customPartitionKey, + * creates new entries for each record in the delta sync table, specifically + * in the gsi_ds_pk and gsi_ds_sk columns. For more information, + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + populateIndexFields?: boolean; + _version?: number; +}; +export type PutInput = { + /** + * A required parameter that specifies the key of the item in DynamoDB that is being put. + * DynamoDB items may have a single hash key, or a hash key and sort key. + * @example + * If a table user has only hash key with user id then key would look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * } + * const key: DynamoDBKey = { + * id: 1, + * } + * ``` + * + * If the table user has a hash key (id) and sort key(name) then key would + * look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * friendsIds: string[] + * } + * const key: DynamoDBKey = { + * id: 1, + * name: 'XXXXXXXX', + * } + * ``` + */ + key: DynamoDBKey; + /** + * The rest of the attributes of the item to be put into DynamoDB. + */ + item: Partial; + /** + * When you put an objects in DynamoDB by using the put, you can optionally + * specify a condition expression that controls whether the request should succeed + * or not, based on the state of the object already in DynamoDB before the operation + * is performed. + * + * @See [Condition expression](https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-condition-expressions) + * @See [Condition expression syntax](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html) + * @example + * The following put condition expression that allows the operation succeed only if the owner of the document matches + * the user making the request. + * ```typescript + * type Task = { + * id: string; + * title: string; + * description: string; + * owner: string; + * isComplete: boolean; + * } + * const condition: DynamoDBFilterObject = { + * owner: { eq: 'XXXXXXXXXXXXXXXX' }, + * } + * put({ + * key: { + * id: 'XXXXXXXXXXXXXXXX', + * }, + * condition, + * item: { + * title: 'New Task', + * description: 'New Task Description', + * owner: 'XXXXXXXXXXXXXXXX', + * isComplete: false, + * } + * }); + ``` + */ + condition?: DynamoDBFilterObject | null; + /** + * When enabled, this string value modifies the format of the ds_sk and ds_pk + * records used by the delta sync table when versioning has been enabled. + * When enabled, the processing of the populateIndexFields entry is also enabled. + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + customPartitionKey?: string; + /** + * A boolean value that, when enabled along with the customPartitionKey, + * creates new entries for each record in the delta sync table, specifically + * in the gsi_ds_pk and gsi_ds_sk columns. For more information, + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + populateIndexFields?: boolean; + _version?: number; +}; +export type GetInput = { + /** + * A required parameter that specifies the key of the item in DynamoDB. + * DynamoDB items may have a single hash key, or a hash key and sort key. + * @example + * If a table user has only hash key with user id then key would look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * } + * const key: DynamoDBKey = { + * id: 1, + * } + * ``` + * + * If the table user has a hash key (id) and sort key(name) then key would + * look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * friendsIds: string[] + * } + * const key: DynamoDBKey = { + * id: 1, + * name: 'XXXXXXXXXX', + * } + * ``` + */ + key: DynamoDBKey; + /** + * Optional boolean to specify if you want to perform a strongly consistent read with DynamoDB + * @default false + */ + consistentRead?: boolean; + /** + * optional list of attributes to return from DynamoDB + */ + projection?: DynamoDBProjection[] | string[]; +}; +export interface DynamoDBOperationAdd { + _type: 'add'; +} +export interface DynamoDBOperationRemove { + _type: 'remove'; +} +export interface DynamoDBOperationReplace { + _type: 'replace'; +} +export interface DynamoDBOperationIncrement { + _type: 'increment'; +} +export interface DynamoDBOperationDecrement { + _type: 'decrement'; +} +export interface DynamoDBOperationAppend { + _type: 'append'; +} +export interface DynamoDBOperationPrepend { + _type: 'prepend'; +} +export interface DynamoDBOperationUpdateListItem { + _type: 'updateListItem'; +} +type DynamoDBOperation = { + /** + * Helper function to add a new attribute item when updating DynamoDB. + * @param payload + * @example + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * export function request(ctx) { + * const updateObj = { + * address: operations.add({ + * street1: '123 Main St', + * city: 'New York', + * zip: '10001', + * }), + * }; + * return update({ key: { id: 1 }, update: updateObj }); + * } + * ``` + */ + add(payload: T): DynamoDBOperationAdd; + /** + * Helper function to remove an attribute from the item when updating DynamoDB. + */ + remove(): DynamoDBOperationRemove; + /** + * Helper function to replace an existing attribute when updating an item in DynamoDB. + * This is useful when you want to update entire object or sub object in the attribute and not just + * the keys in the payload + * @param payload + */ + replace(payload: T): DynamoDBOperationReplace; + /** + * Helper function to increment the existing attribute value in the item when updating DynamoDB. + * @optional @param by number to add to the existing attribute value. + */ + increment(by?: number): DynamoDBOperationIncrement; + /** + * Helper function to decrement the existing attribute value in the item when updating DynamoDB. + * @optional @param by number to subtract to the existing attribute value. + */ + decrement(by?: number): DynamoDBOperationDecrement; + /** + * Helper function to append to the existing list in DynamoDB + * @param payload array of items to append to the existing attribute value. + */ + append(payload: T[]): DynamoDBOperationAppend; + /** + * Helper function to prepend to the existing list in DynamoDB + * @param payload array of items to prepend to the existing attribute value. + */ + prepend(payload: T[]): DynamoDBOperationPrepend; + /** + * Helper function to replace an item in list + * @param payload item to update in the list in DynamoDB + * @param index index of the item to update in the list + */ + updateListItem(payload: T, index: number): DynamoDBOperationUpdateListItem; +}; +export type DynamoDBUpdateArrayOperators = DynamoDBOperationAdd | DynamoDBOperationReplace | DynamoDBOperationRemove | DynamoDBOperationAppend | DynamoDBOperationPrepend | DynamoDBOperationUpdateListItem[] | T[]; +export type RemoveOpOnOptional = T[K] extends {} ? never : DynamoDBOperationRemove; +export type DynamoDBUpdateObjectShallow = T extends Record ? { + [k in keyof T]?: DynamoDBUpdateObjectShallow> | RemoveOpOnOptional; +} | DynamoDBOperationAdd | DynamoDBOperationReplace : T extends (infer U)[] ? DynamoDBUpdateArrayOperators | T : T extends number ? T | DynamoDBOperationIncrement | DynamoDBOperationDecrement : T | DynamoDBOperationAdd | DynamoDBOperationReplace; +export type DynamoDBUpdateObject = T extends Record ? { + [k in keyof T]?: DynamoDBUpdateObjectShallow> | RemoveOpOnOptional; +} : {}; +export type DynamoDBUpdateInput = { + /** + * A required parameter that specifies the key of the item in DynamoDB that is being updated. + * DynamoDB items may have a single hash key, or a hash key and sort key. + * @example + * If a table user has only hash key with user id then key would look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * } + * const key: DynamoDBKey = { + * id: 1, + * } + * ``` + * + * If the table user has a hash key (id) and sort key(name) then key would + * look like this + * ```typescript + * type User = { + * id: number; + * name: string; + * age: number; + * isVerified: boolean; + * friendsIds: string[] + * } + * const key: DynamoDBKey = { + * id: 1, + * name: 'XXXXXXXXX', + * } + * ``` + */ + key: DynamoDBKey; + /** + * An object that specifies the attributes to be updated and the new values for them. + * The update object can be used `add`, `remove`,`replace`, `increment`, `decrement`, `append`, `prepend`, `updateListItem` + * @example + * Given the following User table + * ```typescript + * type User = { + * id: string; + * name: string; + * age?: number; + * address?: { + * street1: string; + * street2?: string; + * city: string; + * zip: string; + * }; + * friendsCount: number; + * isVerified: boolean; + * friendsIds: number[]; + * } + * ``` + * ---------- + * To add a address to the table the update object would look like this + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const updateObj: DynamoDBUpdateObject = { + * address: operations.add({ + * street1: '123 Main St', + * city: 'New York', + * zip: '10001', + * }), + * }; + * update({key: { id: 1 }, update: updateObj}); + * ``` + * ---------- + * to remove address + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const updateObj: DynamoDBUpdateObject = { + * address: operations.remove(), + * }; + * update({key: { id: 1 }, update: updateObj}); * ``` + * ---------- + * to replace address + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const updateObj: DynamoDBUpdateObject = { + * address: operations.replace({ + * street1: '123 Main St', + * street2: 'Apt. 1', + * city: 'New York', + * zip: '10001', + * }), + * }; + * update({key: { id: 1 }, update: updateObj}); + * ``` + * ---------- + * to increment friendsCount by 10 + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const updateObj: DynamoDBUpdateObject = { + * friendsCount: operations.increment(10), + * }; + * update({key: { id: 1 }, update: updateObj}); + * ``` + * ---------- + * to decrement friendsCount by 10 + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const updateObj: DynamoDBUpdateObject = { + * friendsCount: operations.decrement(10), + * }; + * update({key: { id: 1 }, update: updateObj}); + * ``` + * ---------- + * to append friendsIds with friendId + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const newFriendIds = [101, 104, 111]; + * const updateObj: DynamoDBUpdateObject = { + * friendsIds: operations.append(newFriendIds), + * }; + * update({key: { id: 1 }, update: updateObj}); + *``` + * ---------- + * to prepend friendsIds with friendId + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const newFriendIds = [101, 104, 111]; + * const updateObj: DynamoDBUpdateObject = { + * friendsIds: operations.prepend(newFriendIds), + * }; + * update({key: { id: 1 }, update: updateObj}); + *``` + * ---------- + * to to update 2nd and 3rd item in the friends list + * ```typescript + * import { update, operations } from '@aws-appsync/utils/dynamodb'; + * const newFriendIds = [ + * operations.updateListItem('102', 1), + * operations.updateListItem('112', 2), + * ]; + * const updateObj: DynamoDBUpdateObject = { + * friendsIds: newFriendIds + * } + * update({key: { id: 1 }, update: updateObj}); + * ``` + */ + update: DynamoDBUpdateObject; + /** + * When you update an objects in DynamoDB by using the update method, you can optionally + * specify a condition expression that controls whether the request should succeed + * or not, based on the state of the object already in DynamoDB before the operation + * is performed. + * + * @See [Condition expression](https://docs.aws.amazon.com/appsync/latest/devguide/resolver-mapping-template-reference-dynamodb.html#aws-appsync-resolver-mapping-template-reference-dynamodb-condition-expressions) + * @See [Condition expression syntax](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html) + * @example + * The following DeleteItem a condition expression that allows the operation succeed only if the owner of the document matches + * the user making the request. + * ```typescript + * type User = { + * id: string; + * name: string; + * age?: number; + * address?: { + * street1: string; + * street2?: string; + * city: string; + * zip: string; + * }; + * friendsCount: number; + * isVerified: boolean; + * friendsIds: number[]; + * } + * + * const condition: DynamoDBFilterObject = { + * name: { eq: 'XXXXXXXXXXXXXXXX' }, + * } + * ddbHelper.update({ + * key: { + * id: 'XXXXXXXXXXXXXXXX', + * }, + * condition, + * update: { + * isVerified: true, + * } + * }); + *``` + */ + condition?: DynamoDBFilterObject; + /** + * When enabled, customPartitionKey value modifies the format of the ds_sk and ds_pk + * records used by the delta sync table when versioning has been enabled. + * When enabled, the processing of the populateIndexFields entry is also enabled. + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + customPartitionKey?: string; + /** + * A boolean value that, when enabled along with the customPartitionKey, + * creates new entries for each record in the delta sync table, specifically + * in the gsi_ds_pk and gsi_ds_sk columns. For more information, + * @see[Conflict detection and sync](https://docs.aws.amazon.com/appsync/latest/devguide/conflict-detection-and-sync.html) + */ + populateIndexFields?: boolean; + _version?: number; +}; +export type DynamoDBSyncInput = { + /** + * The partition key of the Base table used when performing a Sync operation. + * This field allows a Sync operation to be performed when the table utilizes a + * custom partition key. This is an optional field. + */ + basePartitionKey?: string; + /** + * The index used for the Sync operation. This index is required to enable a Sync operation on the + * whole delta store table when the table uses a custom partition key. + * The Sync operation will be performed on the GSI (created on gsi_ds_pk and gsi_ds_sk). + * This field is optional. + */ + deltaIndexName?: string; + /** + * optional maximum number of items to evaluate at a single time. + * If omitted, the default limit will be set to 100 items. + * The maximum value for this field is 1000 items. + */ + limit?: number | null; + nextToken?: string | null; + /** + * The moment, in epoch milliseconds, when the last successful Sync operation started. + * If specified, only items that have changed after lastSync are returned. + * This field is optional, and should only be populated after retrieving all pages + * from an initial Sync operation. If omitted, results from the Base table will be + * returned, otherwise, results from the Delta table will be returned. + */ + lastSync?: number; + /** + * optional filter to apply to the results after retrieving it from the table + */ + filter?: DynamoDBFilterObject | null; +}; +/** + * Generates DynamoDBQueryRequest object to make a [Query](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-query) + * request to DynamoDB. + * @param payload {QueryInput} - Query input object + * @returns {DynamoDBQueryRequest} + * @example + * ```typescript + * import * as ddb from '@aws-appsync/utils/dynamodb'; + * + * export function request(ctx) { + * return ddb.query({ query: { id: { eq: ctx.args.id } } }); + * } + * ``` + */ +export declare function query(payload: QueryInput): DynamoDBQueryRequest; +/** + * Generates DynamoDBScanRequest to make a [Scan](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-scan) request + * to DynamoDB. + * @param payload lets you specify + * @returns { DynamoDBScanRequest } + */ +export declare function scan(payload: ScanInput): DynamoDBScanRequest; +/** + * Generates a DynamoDBUpdateItemRequest to make a [UpdateItem](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-updateitem) + * request to DynamoDB. + * @param payload + */ +export declare function update(payload: DynamoDBUpdateInput): DynamoDBUpdateItemRequest; +/** + * Generates DynamoDBDeleteItemRequest object to make a [DeleteItem](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-deleteitem) + * request to DynamoDB. + * @param payload + * @returns { DynamoDBDeleteItemRequest } + * @example + * ```typescript + * import * as ddb from '@aws-appsync/utils/dynamodb'; + * + * export function request(ctx) { + * return ddb.remove({ key: { id: ctx.args.id } }); + * } + * ``` + */ +export declare function remove(payload: RemoveInput): DynamoDBDeleteItemRequest; +/** + * Generates a DynamoDBPutItemRequest object to make a [PutItem](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-putitem) + * request to DynamoDB. + * @param payload + * @returns DynamoDBPutItemRequest + * @example + * ```typescript + * import * as ddb from '@aws-appsync/utils/dynamodb'; + * + * export function request(ctx) { + * return ddb.put({ key: { id: util.autoId() }, item: ctx.args }); + * } + * ``` + */ +export declare function put(payload: PutInput): DynamoDBPutItemRequest; +/** + * Generates a DynamoDBGetItemRequest object to make a [GetItem](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-getitem) + * request to DynamoDB. + * @param payload + * @returns DynamoDBGetItemRequest + * @example + * ```typescript +* import { get } from '@aws-appsync/utils/dynamodb'; +* export function request(ctx) { +* return get({ key: { id: ctx.args.id } }); +* } + ``` + */ +export declare function get(payload: GetInput): DynamoDBGetItemRequest; +/** + * Generates an DynamoDBSyncRequest object to make a [Sync](https://docs.aws.amazon.com/appsync/latest/devguide/js-resolver-reference-dynamodb.html#js-aws-appsync-resolver-reference-dynamodb-sync) + * request and receive only data altered since last query (the delta updates). requests can only be made to versioned DynamoDB data sources. + * @param payload + * @returns DynamoDBGetItemRequest + * @example + * ```typescript + * import * as ddb from '@aws-appsync/utils/dynamodb'; + * + * export function request(ctx) { + * const { limit = 10, nextToken, lastSync } = ctx.args; + * return ddb.sync({ limit, nextToken, lastSync }); + * } + * ``` + */ +export declare function sync(payload: DynamoDBSyncInput): DynamoDBSyncRequest; +export declare const operations: DynamoDBOperation; +export {}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js new file mode 100644 index 00000000..63e1e405 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=dynamodb-helpers.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js.map b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js.map new file mode 100644 index 00000000..3c20a62e --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"dynamodb-helpers.js","sourceRoot":"","sources":["../src/dynamodb-helpers.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.d.ts new file mode 100644 index 00000000..e0fe5ce2 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.d.ts @@ -0,0 +1,309 @@ +export type DynamoDBStringResult = { + S: string; +}; +export type DynamoDBStringSetResult = { + SS: string[]; +}; +export type DynamoDBNumberResult = { + N: string; +}; +export type DynamoDBNumberSetResult = { + NS: string[]; +}; +export type DynamoDBBinaryResult = { + B: string; +}; +export type DynamoDBBinarySetResult = { + BS: string[]; +}; +export type DynamoDBBooleanResult = { + BOOL: boolean; +}; +export type DynamoDBNullResult = { + NULL: null; +}; +export type DynamoDBReturnType = T extends string ? DynamoDBStringResult : T extends number ? DynamoDBNumberResult : T extends boolean ? DynamoDBBooleanResult : T extends null ? DynamoDBNullResult : T extends Record ? DynamoDBMapResult : T extends Array ? DynamoDBListResult : null; +export type DynamoDBMapResult> = { + M: { + [K in keyof T]: DynamoDBReturnType; + }; +}; +export type DynamoDBListResult = { + L: DynamoDBReturnType[]; +}; +export type OptionalInputType = T | null | undefined; +export type DynamodbUtil = { + /** + * General object conversion tool for DynamoDB that converts input objects to the appropriate + * DynamoDB representation. It's opinionated about how it represents some types: for instance it will + * use lists ("L") rather than sets ("SS", "NS", "BS"). This returns an object that describes + * the DynamoDB attribute value. + * + * String example: + * Input: + * ``` + * util.dynamodb.toDynamoDB("foo") + `* `` + * Output: + * ``` + * { "S" : "foo" } + * ``` + * + * Object example: + * Input: + * ``` + * util.dynamodb.toDynamoDB({ "foo": "bar", "baz" : 1234, "beep": [ "boop" ] }) + * ``` + * Output: + * ``` + * { + * "M": { + * "foo": { "S": "bar" }, + * "baz": { "N": 1234 }, + * "beep": { + * "L": [{ "S": "boop" }] + * } + * } + * } + * ``` + * @param obj + * @returns {object|null} - DynamoDB attribute object + */ + toDynamoDB(obj: T): DynamoDBReturnType; + /** + * Convert an input string to the DynamoDB string format. This returns an object that describes + * the DynamoDB attribute value. + * @param {string|null|undefined} obj Object to convert to DynamoDB attribute value + * @returns {DynamoDBReturnType} - DynamoDB attribute object as string + */ + toString(obj: OptionalInputType): DynamoDBStringResult | null; + /** + * Converts a lists with Strings to the DynamoDB string set format. This returns an object that + * describes the DynamoDB attribute value. + + * Input: + * ``` + * util.dynamodb.toStringSet([ "foo", "bar", "baz" ]) + * ``` + * Output: + * ``` + * { "SS" : [ "foo", "bar", "baz" ] } + * ``` + * @param list - List to convert to DynamoDB attribute value + * @returns {any} - DynamoDB attribute object + */ + toStringSet(list: OptionalInputType): DynamoDBStringSetResult | null; + /** + * Converts a number to the DynamoDB number format. This returns an object that describes the + * DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toNumber(12345) + * ``` + * Output: + * ```` + * { "N" : 12345 } + * ``` + * @param {number} num - Number to convert to DynamoDB attribute value + * @returns {DynamoDBNumberResult} - DynamoDB attribute object + */ + toNumber(num: OptionalInputType): DynamoDBNumberResult | null; + /** + * Converts a list of numbers to the DynamoDB number set format. This returns an object that + * describes the DynamoDB attribute value. + * ``` + * Input: util.dynamodb.toNumberSet([ 1, 23, 4.56 ]) + * Output: { "NS" : [ 1, 23, 4.56 ] } + * ``` + * @param {number[]} numbers - Numbers to convert to DynamoDB number set + * @returns {DynamoDBNumberSetResult} - DynamoDB attribute object + */ + toNumberSet(numbers: OptionalInputType): DynamoDBNumberSetResult | null; + /** + * Converts binary data encoded as a base64 string to DynamoDB binary format. This returns an + * object that describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toBinary("foo") + * ``` + * Output: + * ``` + * { "B" : "foo" } + * ``` + * @param {string} value - Base64 encoded string + * @returns {DynamoDBBinaryResult} - DynamoDB attribute object + */ + toBinary(value: OptionalInputType): DynamoDBBinaryResult | null; + /** + * Converts a list of binary data encoded as base64 strings to DynamoDB binary set format. This + * returns an object that describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toBinarySet([ "foo", "bar", "baz" ]) + * ``` + * Output: + * ``` + * { "BS" : [ "foo", "bar", "baz" ] } + * ``` + * @param {string[]} values - Base64 encoded string array + * @returns {DynamoDBBinarySetResult} - DynamoDB attribute object + */ + toBinarySet(values: OptionalInputType): DynamoDBBinarySetResult | null; + /** + * Converts a Boolean to the appropriate DynamoDB Boolean format. This returns an object that + * describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toBoolean(true) + * ``` + * Output: + * ``` + * { "BOOL" : true } + * ``` + * @param {boolean} value - value to convert to DynamoDB attribute + * @returns {DynamoDBBooleanResult} - DynamoDB attribute object + */ + toBoolean(value: OptionalInputType): DynamoDBBooleanResult | null; + /** + * Converts a Boolean to the appropriate DynamoDB Boolean format. This returns an object that + * describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toNull() + * ``` + * Output: + * ``` + * { "NULL" : null } + * ``` + * @returns {DynamoDBNullResult} - DynamoDB attribute object + */ + toNull(): DynamoDBNullResult; + /** + * Converts a list of object to DynamoDB list format. Each item in the list is also converted + * to its appropriate DynamoDB format. It's opinionated about how it represents some of the + * nested objects: e.g., it will use lists ("L") rather than sets ("SS", "NS", "BS"). This + * returns an object that describes the DynamoDB attribute value. + * Input: + * ```util.dynamodb.toList([ "foo", 123, { "bar" : "baz" } ])``` + * Output: + * ``` + * { + * "L": [ + * { "S": "foo" }, + * { "N": 123 }, + * { + * "M": { + * "bar": { "S": "baz" } + * } + * } + * ] + * } + * ``` + * @param {unknown[]} value - value to convert to DynamoDB attribute + * @returns {DynamoDBListResult} - DynamoDB attribute object + */ + toList(value: OptionalInputType): T extends Array ? DynamoDBListResult : null; + /** + * Converts a map to DynamoDB map format. Each value in the map is also converted to its + * appropriate DynamoDB format. It's opinionated about how it represents some of the nested + * objects: e.g., it will use lists ("L") rather than sets ("SS", "NS", "BS"). This returns + * an object that describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toMap({ "foo": "bar", "baz" : 1234, "beep": [ "boop"] }) + * ``` + * Output: + * ``` + * { + * "M": { + * "foo": { "S": "bar" }, + * "baz": { "N": 1234 }, + * "beep": { + * "L": [{ "S": "boop" }] + * } + * } + * } + * ``` + * @param {Record} value - value to convert to DynamoDB attribute + * @returns {{ M: Record}} - DynamoDB attribute object + */ + toMap(value: T): T extends Record ? DynamoDBMapResult : null; + /** + * Creates a copy of the map where each value has been converted to its appropriate DynamoDB + * format. It's opinionated about how it represents some of the nested objects: for instance it will + * use lists ("L") rather than sets ("SS", "NS", "BS"). + * ``` + * Input: + * ``` + * util.dynamodb.toMapValues({ "foo": "bar", "baz" : 1234, "beep": [ "boop"] }) + * ``` + * Output: + * ``` + * { + * "foo": { "S": "bar" }, + * "baz": { "N": 1234 }, + * "beep": { + * "L": [{ "S": "boop" }] + * } + * } + * ``` + * Note: this is slightly different to `util.dynamodb.toMap(Map)` as it returns only the + * contents of the DynamoDB attribute value, but not the whole attribute value itself. For + * example, the following statements are exactly the same: + * ``` + * util.dynamodb.toMapValues(obj) + * util.dynamodb.toMap(obj)["M"] + * ``` + * + * @param {Readonly>} value - value to convert to DynamoDB attribute + * @returns {Record } - DynamoDB attribute object + */ + toMapValues(value: T): T extends Record ? { + [K in keyof T]: DynamoDBReturnType; + } : null; + /** + * Converts the key, bucket and region into the DynamoDB S3 Object representation. This returns + * an object that describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toS3Object("foo", "bar", "baz") + * ``` + * Output: + * ``` + * { "S" : "{ \"s3\" : { \"key\" : \"foo\", \"bucket\" : \"bar", \"region\" : \"baz" } }" } + * ``` + * @param {string} key - S3 object key + * @param {string} bucket - S3 bucket + * @param {string} region - AWS Region + * @returns {DynamoDBStringResult} - DynamoDB attribute object + */ + toS3Object(key: string, bucket: string, region: string): DynamoDBStringResult | null; + /** + * Converts the key, bucket, region and optional version into the DynamoDB S3 Object + * representation. This returns an object that describes the DynamoDB attribute value. + * Input: + * ``` + * util.dynamodb.toS3Object("foo", "bar", "baz", "beep") + * ``` + * Output: + * ``` + * { + * "S": "{ \"s3\" : { \"key\" : \"foo\", \"bucket\" : \"bar\", \"region\" : \"baz\", \"version\" = \"beep\" } }" + * } + * + * ``` + * @param {string} key - S3 object key + * @param {string} bucket - S3 bucket + * @param {string} region - AWS Region + * @param {string} version - S3 object version + * @returns {DynamoDBStringResult} - DynamoDB attribute object + */ + toS3Object(key: string, bucket: string, region: string, version: string): DynamoDBStringResult | null; + /** + * Accepts the string value of a DynamoDB S3 Object and returns a map that contains the key, + * bucket, region and optional version. + * @param s3ObjectString S3 object key + * @returns {any} DynamoDB attribute object + */ + fromS3ObjectJson(s3ObjectString: string): any; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js new file mode 100644 index 00000000..a46c52a9 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const ATTRIBUTE_TYPE_MAP = { + _null: 'NULL', + string: 'S', + stringSet: 'SS', + number: 'N', + numberSet: 'NS', + binary: 'B', + binarySet: 'BS', + boolean: 'BOOL', + list: 'L', + map: 'M', +}; +//# sourceMappingURL=dynamodb-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js.map new file mode 100644 index 00000000..6ec0f58c --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/dynamodb-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"dynamodb-utils.js","sourceRoot":"","sources":["../src/dynamodb-utils.ts"],"names":[],"mappings":";;AAAA,MAAM,kBAAkB,GAAG;IACzB,KAAK,EAAE,MAAM;IACb,MAAM,EAAE,GAAG;IACX,SAAS,EAAE,IAAI;IACf,MAAM,EAAE,GAAG;IACX,SAAS,EAAE,IAAI;IACf,MAAM,EAAE,GAAG;IACX,SAAS,EAAE,IAAI;IACf,OAAO,EAAE,MAAM;IACf,IAAI,EAAE,GAAG;IACT,GAAG,EAAE,GAAG;CACA,CAAC"} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/http-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.d.ts new file mode 100644 index 00000000..5fba5ac3 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.d.ts @@ -0,0 +1,38 @@ +export type HttpUtils = { + /** + * Copies the header from the map without the restricted set of HTTP headers. You can use this + * to forward request headers to your downstream HTTP endpoint. + * @param {any} headers - Headers object + * @returns {any} - Copy of headers minus restricted HTTP headers + */ + copyHeaders(headers: T): any; + /** + * Adds a single custom header with the name (String) and value (Object) of the response. The + * following limitations apply: + * * Header names can't match any of the existing or restricted AWS or AWS AppSync headers. + * * Header names can't start with restricted prefixes, such as `x-amzn-` or `x-amz-`. + * * The size of custom response headers can't exceed 4 KB. This includes header names and values. + * * You should define each response header once per GraphQL operation. However, if you define a + * custom header with the same name multiple times, the most recent definition appears in the + * response. All headers count towards the header size limit regardless of naming. + * ``` + * util.http.addResponseHeader("itemsCount", 7) + * util.http.addResponseHeader("render", context.args.render) + * ``` + * @param {string} name - Header name + * @param {any} value - Header value + */ + addResponseHeader(name: string, value: any): void; + /** + * Adds multiple response headers to the response from the specified map of names (String) and + * values (Object). + @see {@link HttpUtils.addResponseHeader} for quick reference to the restrictions. + * also apply to this method. + * ``` + * const headersMap = {headerInt: 12, headerString: 'stringValue', headerObject: {field1: 7, field2: 'string'}} + * util.http.addResponseHeaders(headersMap) + * ``` + * @param {any} headers - Headers map + */ + addResponseHeaders(headers: Record): void; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js new file mode 100644 index 00000000..a72d3b73 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=http-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js.map new file mode 100644 index 00000000..4a65ba4f --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/http-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"http-utils.js","sourceRoot":"","sources":["../src/http-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/index.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/index.d.ts new file mode 100644 index 00000000..fa213f73 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/index.d.ts @@ -0,0 +1,482 @@ +export * from './resolver-return-types'; +import { DynamodbUtil } from './dynamodb-utils'; +import { RdsUtil } from './rds-utils'; +import { StringUtils } from './string-utils'; +import { TimeUtils } from './time-utils'; +import { HttpUtils } from './http-utils'; +import { XmlUtils } from './xml-utils'; +import { MathUtils } from './math-utils'; +import { TransformUtils } from './transform-utils'; +import { SubscriptionFilter } from './subscription-filter-types'; +type SubscriptionInvalidationObject = { + subscriptionField: string; + payload: Record; +}; +/** + * The Util object contains general utility methods to help you work with data. + */ +export type Util = { + /** + * Returns the input string as a JavaScript escaped string. + * @param {string} value - String value to escape + * @returns {string} - JavaScript escaped string + */ + escapeJavaScript(value: string): string; + /** + * Returns the input string as an `application/x-www-form-urlencoded` encoded string. + * @param {string} value - string value to encode + * @returns {string} - Url encoded string + */ + urlEncode(value: string): string; + /** + * Decodes an `application/x-www-form-urlencoded` encoded string back to its non-encoded form. + * @param {string} value - String value to decode + * @returns {string} - Url decoded string + */ + urlDecode(value: string): string; + /** + * Encodes string to a base64 string + * @param {string} value - string to be encoded + * @returns {string} - base64 encode string + */ + base64Encode(bytes: string): string; + /** + * Decodes a base64 encoded string + * @param {string} value - base64 encoded string + * @returns {string} - base64 decoded string + */ + base64Decode(value: string): string; + /** + * Returns a 128-bit randomly generated UUID. + * @returns {string} - Randomly generated UUID + */ + autoId(): string; + /** + * Returns a 128-bit randomly generated ULID (Universally Unique Lexicographically Sortable + * Identifier). + * @returns {string} - Randomly generated UUID + */ + autoUlid(): string; + /** + * Returns a 128-bit randomly generated KSUID (K-Sortable Unique Identifier) base62 encoded as + * a String with a length of 27. + * @returns {string} - Randomly generated UUID + */ + autoKsuid(): string; + /** + * Throws Unauthorized for the field being resolved. Use this in request or response mapping + * templates to determine whether to allow the caller to resolve the field. + */ + unauthorized(): never; + /** + * Throws a custom error. Use this in request or response mapping templates to detect an error + * with the request or with the invocation result. You can also specify an `errorType` and a + * `data` field, and an `errorInfo` field. The `data` value will be added to the corresponding + * `error` block inside `errors` in the GraphQL response. Note: `data` will be filtered based + * on the query selection set. The `errorInfo` value will be added to the corresponding `error` + * block inside `errors` in the GraphQL response. Note: `errorInfo` will NOT be filtered based + * on the query selection set. + * @param {string} msg - Custom error message + * @param {string} errorType? - Custom error type + * @param {any} data? - Custom data object + * @param {any} errorInfo? - Error info object + */ + error(msg: string, errorType?: string, data?: any, errorInfo?: any): never; + /** + * Appends a custom error. Use this in request or response mapping templates to detect an error + * with the request or with the invocation result. You can also specify an `errorType` and a + * `data` field, and an `errorInfo` field. The `data` value will be added to the corresponding + * `error` block inside `errors` in the GraphQL response. Note: `data` will be filtered based + * on the query selection set. The `errorInfo` value will be added to the corresponding `error` + * block inside `errors` in the GraphQL response. Note: `errorInfo` will NOT be filtered based + * on the query selection set. Unlike `Util.error`, the template evaluation will not be + * interuppted, so that data can be returned to the caller. + * @param {string} msg - Custom error message + * @param {string} errorType? - Custom error type + * @param {any} data? - Custom data object + * @param {any} errorInfo? - Error info object + * @returns void + */ + appendError(msg: string, errorType?: string, data?: any, errorInfo?: any): void; + /** + * Returns true if the specified pattern in the first argument matches the supplied data in the + * second argument. The pattern must be a regular expression such as `Util.matches("a*b", + * "aaaaab")`. The functionality is based on Pattern, which you can reference for further + * documentation. + * @param {string} pattern - Regex pattern to match + * @param {string} value - Value to match pattern against + * @returns {boolean} - Indicates match was found + */ + matches(pattern: string, value: string): boolean; + /** + * Returns a String describing the multi-auth type being used by a request, returning back + * either "IAM Authorization", "User Pool Authorization", "Open ID Connect Authorization", or + * "API Key Authorization". + * @returns {string} - Auth type + */ + authType(): string; + /** + * The `util.time` variable contains datetime methods to help generate timestamps, convert + * between datetime formats, and parse datetime strings. The syntax for datetime formats is + * based on DateTimeFormatter which you can reference for further documentation. Below we + * provide some examples, as well as a list of available methods and descriptions. + */ + time: TimeUtils; + /** + * `util.dynamodb` contains helper methods that make it easier to write and read data to Amazon + * DynamoDB, such as automatic type mapping and formatting. These methods are designed to make + * mapping primitive types and Lists to the proper DynamoDB input format automatically, which + * is a Map of the format `{ "TYPE" : VALUE }`. + */ + dynamodb: DynamodbUtil; + /** + * `util.rds` contains helper methods that makes it easier to write and read data from Amazon + * RDS. + */ + rds: RdsUtil; + /** + * The `util.http` utility provides helper methods that you can use to manage HTTP request + * parameters and to add response headers. + */ + http: HttpUtils; + /** + * `util.xml` contains helper methods that can make it easier to translate XML responses + * to JSON or a Dictionary. + */ + xml: XmlUtils; + /** + * `util.transform` contains helper methods that make it easier to perform complex operations + * against data sources, such as Amazon DynamoDB filter operations. + */ + transform: TransformUtils; + /** + * `util.math1 contains methods to help with common Math operations. + */ + math: MathUtils; + /** + * `util.str` contains methods to help with common String operations. + */ + str: StringUtils; +}; +type Context | unknown = any, TStash extends Record = Record, TPrev extends Record | undefined = any, TSource extends Record | undefined = any, TResult extends any = any> = { + /** + * A map that contains env variables for GraphQLApi. + */ + readonly env: Record; + /** + * A map that contains all GraphQL arguments for this field. + */ + arguments: TArgs; + /** + * A map that contains all GraphQL arguments for this field. + */ + args: TArgs; + /** + * An object that contains information about the caller. For more information about the + * structure of this field, see Identity. + */ + identity: Identity; + /** + * A map that contains the resolution of the parent field. + */ + source?: TSource; + /** + * Contains potential error generated by a request. + */ + error?: { + /** + * Details about the message + */ + message: string; + /** + * type of error + */ + type: string; + }; + /** + * The stash is a map that is made available inside each resolver and function mapping + * template. The same stash instance lives through a single resolver execution. This means + * that you can use the stash to pass arbitrary data across request and response mapping + * templates, and across functions in a pipeline resolver. The stash exposes the same + * methods as the Java Map data structure. + */ + stash: TStash; + /** + * A container for the results of this resolver. This field is available only to response + * mapping templates. + * + * For example, if you're resolving the author field of the following query: + * ``` + * query { + * getPost(id: 1234) { + * postId + * title + * content + * author { + * id + * name + * } + * } + * } + * ``` + * + * Then the full context variable that is available when processing a response mapping template might be: + * ``` + * { + * "arguments" : { + * id: "1234" + * }, + * "source": {}, + * "result" : { + * "postId": "1234", + * "title": "Some title", + * "content": "Some content", + * "author": { + * "id": "5678", + * "name": "Author Name" + * } + * }, + * "identity" : { + * "sourceIp" : ["x.x.x.x"], + * "userArn" : "arn:aws:iam::123456789012:user/appsync", + * "accountId" : "666666666666", + * "user" : "AIDAAAAAAAAAAAAAAAAAA" + * } + * } + * ``` + */ + result: TResult; + /** + * The result of whatever previous operation was executed in a pipeline resolver. If the + * previous operation * was the pipeline resolver request mapping template, then + * `context.prev.result` represents the output of the evaluation of the template, and is made + * available to the first function in the pipeline. If the previous operation was the first + * function, then context.prev.result represents the output of the first function, and is made + * available to the second function in the pipeline. If the previous operation was the last + * function, then context.prev.result represents the output of the first function, and is made + * available to the second function in the pipeline. If the previous operation was the last + * function, then context.prev.result represents the output of the last function, and is made + * available to the pipeline resolver response mapping template. + */ + prev: TPrev; + /** + * AWS AppSync supports passing custom headers from clients and accessing them in your GraphQL + * resolvers by using context.request.headers. You can then use the header values for actions + * such as inserting data into a data source or authorization checks. + */ + request: Request; + /** + * An object that contains information about the GraphQL request. For the structure of this + * field, see Info. + */ + info: Info; +}; +export type Identity = AppSyncIdentityIAM | AppSyncIdentityCognito | AppSyncIdentityOIDC | AppSyncIdentityLambda | undefined | null; +export type AppSyncIdentityIAM = { + /** + * The AWS account ID of the caller. + */ + accountId: string; + /** + * The Amazon Cognito identity pool ID associated with the caller. + */ + cognitoIdentityPoolId: string; + /** + * The Amazon Cognito identity ID of the caller. + */ + cognitoIdentityId: string; + /** + * The source IP address of the caller that AWS AppSync receives. If the request doesn't + * include the `x-forwarded-for` header, the source IP value contains only a single IP address + * from the TCP connection. If the request includes a `x-forwarded-for` header, the source IP + * is a list of IP addresses from the `x-forwarded-for` header, in addition to the IP address + * from the TCP connection. + */ + sourceIp: string[]; + /** + * The user name of the authenticated user. In the case of `AMAZON_COGNITO_USER_POOLS` + * authorization, the value of username is the value of attribute `cognito:username`. In the + * case of `AWS_IAM` authorization, the value of username is the value of the AWS user + * principal. If you're using IAM authorization with credentials vended from Amazon Cognito + * identity pools, we recommend that you use `cognitoIdentityId`. + */ + username: string; + /** + * The Amazon Resource Name (ARN) of the IAM user. + */ + userArn: string; + /** + * Either authenticated or unauthenticated based on the identity type. + */ + cognitoIdentityAuthType: string; + /** + * A comma-separated list of external identity provider information used in obtaining the + * credentials used to sign the request. + */ + cognitoIdentityAuthProvider: string; +}; +export type AppSyncIdentityCognito = { + /** + * The source IP address of the caller that AWS AppSync receives. If the request doesn't + * include the `x-forwarded-for` header, the source IP value contains only a single IP address + * from the TCP connection. If the request includes a `x-forwarded-for` header, the source IP + * is a list of IP addresses from the `x-forwarded-for` header, in addition to the IP address + * from the TCP connection. + */ + sourceIp: string[]; + /** + * The user name of the authenticated user. In the case of `AMAZON_COGNITO_USER_POOLS` + * authorization, the value of username is the value of attribute `cognito:username`. In the + * case of `AWS_IAM` authorization, the value of username is the value of the AWS user + * principal. If you're using IAM authorization with credentials vended from Amazon Cognito + * identity pools, we recommend that you use `cognitoIdentityId`. + */ + username: string; + /** + * The groups the authenticated user belongs to. + */ + groups: string[] | null; + /** + * The UUID of the authenticated user. + */ + sub: string; + /** + * The token issuer. + */ + issuer: string; + /** + * The claims that the user has. + */ + claims: any; + /** + * The default authorization strategy for this caller (ALLOW or DENY). + */ + defaultAuthStrategy: string; +}; +export type AppSyncIdentityOIDC = { + /** + * The UUID of the authenticated user. + */ + sub: string; + /** + * The token issuer. + */ + issuer: string; + /** + * The claims that the user has. + */ + claims: any; +}; +export type AppSyncIdentityLambda = { + /** + * content returned by the Lambda function authorizing the request. + */ + resolverContext: any; +}; +export type Extensions = { + /** + * Evicts an item from the AWS AppSync server-side cache. The first argument is the type name. + * The second argument is the field name. The third argument is an object containing key-value pair + * items that specify the caching key value. You must put the items in the object in the same order + * as the caching keys in the cached resolver's cachingKey. + * __Note:__ This utility works only for mutations, not queries. + */ + evictFromApiCache(typeName: string, fieldName: string, keyValuePair: Record): void; + /** + * Defines enhanced subscription filters. Each subscription notification event is + * evaluated against provided subscription filters and delivers notifications to + * clients if all filters evaluate to true. + * @param filter + */ + setSubscriptionFilter(filter: SubscriptionFilter): void; + /** + * Defines subscription invalidation filters. Subscription filters are evaluated + * against the invalidation payload, then invalidate a given subscription if the + * filters evaluate to true + * @param filter + */ + setSubscriptionInvalidationFilter(filter: SubscriptionFilter): void; + /** + * Used to initiate a subscription invalidation from a mutation. + * + * The InvalidationObject defines the following: + * * subscriptionField – The GraphQL schema subscription to invalidate. A single subscription, + * defined as a string in the subscriptionField, is considered for invalidation. + * * payload – A key-value pair list that's used as the input for invalidating subscriptions + * if the invalidation filter evaluates to true against their values. + * + * @param obj + */ + invalidateSubscriptions(obj: SubscriptionInvalidationObject): void; +}; +export type Request = { + /** + * AWS AppSync supports passing custom headers from clients and accessing them in your GraphQL + * resolvers by using context.request.headers. You can then use the header values for actions + * such as inserting data into a data source or authorization checks. + */ + headers: any; + /** + * AWS AppSync supports configuring a custom domain that you can use to access your GraphQL and + * real-time endpoints for your APIs. When making a request with a custom domain name, you can + * get the domain name using context.request.domainName. + */ + domainName: string | null; +}; +export type Info = { + /** + * The name of the field that is currently being resolved. + */ + fieldName: string; + /** + * The name of the parent type for the field that is currently being resolved. + */ + parentTypeName: string; + /** + * A map which holds all variables that are passed into the GraphQL request. + */ + variables: Record; + /** + * A list representation of the fields in the GraphQL selection set. Fields that are aliased + * are referenced only by the alias name, not the field name. The following example shows this + * in detail. + */ + selectionSetList: string[]; + /** + * A string representation of the selection set, formatted as GraphQL schema definition + * language (SDL). Although fragments aren't merged into the selection set, inline fragments + * are preserved, as shown in the following example. + */ + selectionSetGraphQL: string; +}; +/** + * The runtime object provides information and control over the current execution AppSync function + * or resolver. + */ +type Runtime = { + /** + * Invoking this function will halt execution of the current function (AppSync Function) or + * resolver (Unit or Pipeline Resolver) depending on the current context and return the specified + * object as the result. + * * When called in an AppSync function request handler, the data Source and response handler + * are skipped and the next function request handler (or the pipeline resolver response + * handler if this was the last AppSync function) is called. + * * When called in an AppSync pipeline resolver request handler, the pipeline execution is + * skipped, and the pipeline resolver response handler is called immediately. + * + * @param obj Optional return value + */ + earlyReturn(obj?: unknown): never; +}; +declare global { + var util: Util; + var extensions: Extensions; + var runtime: Runtime; +} +declare const util: Util; +declare const extensions: Extensions; +declare const runtime: Runtime; +export { DynamoDBBinaryResult, DynamoDBBooleanResult, DynamoDBBinarySetResult, DynamoDBListResult, DynamoDBMapResult, DynamoDBNullResult, DynamoDBNumberResult, DynamoDBNumberSetResult, DynamoDBStringResult, DynamoDBStringSetResult, DynamoDBReturnType, } from './dynamodb-utils'; +export { DynamoDBExpressionOperation, DynamoDBFilterObject, OpenSearchQueryObject, OpenSearchQueryOperation, ShallowSubscriptionFilterObject, SubscriptionFilterExcludeKeysType, SubscriptionFilterObject, SubscriptionFilterOperation, SubscriptionFilterRuleObject, } from './transform-utils'; +export { SubscriptionFilter } from './subscription-filter-types'; +export { util, extensions, runtime, Context }; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/index.js b/graphql/node_modules/@aws-appsync/utils/lib/index.js new file mode 100644 index 00000000..fe132f79 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/index.js @@ -0,0 +1,25 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.runtime = exports.extensions = exports.util = void 0; +__exportStar(require("./resolver-return-types"), exports); +const util = {}; +exports.util = util; +const extensions = {}; +exports.extensions = extensions; +const runtime = {}; +exports.runtime = runtime; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/index.js.map b/graphql/node_modules/@aws-appsync/utils/lib/index.js.map new file mode 100644 index 00000000..cebd04a2 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;AAAA,0DAAwC;AAojBxC,MAAM,IAAI,GAAS,EAAU,CAAC;AAgCrB,oBAAI;AA/Bb,MAAM,UAAU,GAAe,EAAgB,CAAC;AA+BjC,gCAAU;AA9BzB,MAAM,OAAO,GAAY,EAAa,CAAC;AA8BZ,0BAAO"} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/math-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.d.ts new file mode 100644 index 00000000..3e12f272 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.d.ts @@ -0,0 +1,41 @@ +export type MathUtils = { + /** + * Takes a double and rounds it to the nearest integer. + * @param {number} input + * @returns {number} - Rounded integer + */ + roundNum: (input: number) => number; + /** + * Takes two numbers and returns the minimum value between the two numbers. + * @param {number} input1 + * @param {number} input2 + * @returns {number} - Minimum value + */ + minVal: (input1: number, input2: number) => number; + /** + * Takes two numbers and returns the maximum value between the two numbers. + * @param {number} input1 + * @param {number} input2 + * @returns {number} - Maximum value + */ + maxVal: (input1: number, input2: number) => number; + /** + * Returns a random double between 0 and 1. + * + * This function shouldn't be used for anything that needs high entropy + * randomness (for example, cryptography). + * @returns {number} - Random double between 0 and 1 + */ + randomDouble: () => number; + /** + * Returns a random integer value within the specified range, + * with the first argument specifying the lower value of the range and the second argument specifying the upper value of the range. + * + * This function shouldn't be used for anything that needs high entropy + * randomness (for example, cryptography). + * @param {number} start - lower value of the range + * @param {number} end - upper value of the range + * @returns {number} Random integer value within the specified range + */ + randomWithinRange: (start: number, end: number) => number; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js new file mode 100644 index 00000000..55972df3 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=math-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js.map new file mode 100644 index 00000000..2e131703 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/math-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"math-utils.js","sourceRoot":"","sources":["../src/math-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.d.ts new file mode 100644 index 00000000..3dd67ff4 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.d.ts @@ -0,0 +1,376 @@ +import { Prettify } from './type-utils'; +import { RDSRequest, RDSResponse } from './resolver-return-types'; +interface RDSTypeHint { +} +/** + * A helper type that provides methods to create type hints for different SQL data types. + * This helper facilitates the declaration of specific SQL types like DECIMAL, JSON, TIMESTAMP, etc. + * Each method returns an instance of RDSTypeHint which represents the SQL type hint for a value. + */ +export declare type RDSTypeHintHelper = { + DECIMAL(value: string | number): RDSTypeHint; + JSON(value: any): RDSTypeHint; + TIMESTAMP(value: string): RDSTypeHint; + TIME(value: string): RDSTypeHint; + UUID(value: string): RDSTypeHint; + DATE(value: string): RDSTypeHint; +}; +/** + * helper methods to assign a SQL type hint to a value, + * such as indicating a value should be treated as a TIMESTAMP, + * JSON, UUID, etc., in SQL operations. + */ +export declare const typeHint: RDSTypeHintHelper; +type RDSEqualityOperator = { + ne?: T | null; + eq?: T | null; +}; +type RDSScalarOperator = Prettify & { + le?: T | null; + lt?: T | null; + gt?: T | null; + ge?: T | null; +}>; +type RDSNumberOperator = Prettify & { + between?: [T, T] | null; +}>; +type RDSStringOperator = Prettify & { + beginsWith?: T | null; + contains?: T | null; + size?: RDSNumberOperator; +}>; +type RDSAttributeExistsOperator = { + attributeExists?: boolean | null; +}; +type RDSBooleanOperator = RDSEqualityOperator; +type RDSExpressionOperation = TOperand extends boolean ? Prettify> & RDSAttributeExistsOperator> : TOperand extends number ? Prettify> & RDSAttributeExistsOperator> : TOperand extends string ? Prettify> & RDSAttributeExistsOperator> : never; +type RDSConditionShallowObject = T extends Record ? Prettify<{ + [P in keyof T]?: Prettify>; +}> : any; +type RDSConditionObject = T extends Record ? Prettify> & { + and?: RDSConditionObject>[]; + or?: RDSConditionObject>[]; + not?: RDSConditionObject>[]; +}> : {}; +type ColumnName = T extends Record ? keyof T : string; +export type OrderBy = { + column: ColumnName; + dir?: 'ASC' | 'DESC' | 'asc' | 'desc'; +}; +type RDSAliasObject = { + [key: string]: SQLStatement | SelectStatement | string; +}; +type RDSBasePayloadWithTable = { + /** + * The name of the database table involved in the SQL operation. This is a required field if + * from is not included. + */ + table: string; + from?: never; +}; +type RDSBasePayloadWithFrom = { + /** + * The name of the database table involved in the SQL operation. This is a required field if + * table is not included. + */ + from: string | RDSAliasObject; + table?: never; +}; +type RDSBasePayload = RDSBasePayloadWithTable | RDSBasePayloadWithFrom; +type RDSBaseConditionalPayload = Prettify = { + * customerId: { + * eq: 12123 + * }, + * or: [ + * { + * status: { eq: "PENDING"}, + * }, + * { + * status: { eq: "SHIPPING"}, + * } + * ] + * } + * ``` + * This will generate the following WHERE clause + * `WHERE customerId = 12123 OR (status = "PENDING" OR status = "SHIPPING")` + */ + where?: RDSConditionObject; +}>; +export type SelectPayload = Prettify & { + /** + * Specifies the list of columns to be returned in the result. Defaults to '*' (all columns). + */ + columns?: ColumnName[] | '*'; + /** + * Specifies the order by which the results should be sorted. + */ + orderBy?: OrderBy[]; + /** + * Limits the number of results returned. + */ + limit?: number; + /** + * Specifies the offset from where to start returning results. + */ + offset?: number; +}>; +export type InsertPayload = Prettify> & { + /** + * required parameter values that contains data to be inserted + */ + values: T extends Record ? T : Record; +}>; +export type UpdatePayload = Prettify & Partial> & { + /** + * required parameter values that contains values to be updated in the record + */ + values: T extends Record ? Partial : Record; +}>; +export type RemovePayload = Prettify> & RDSBaseConditionalPayload>; +type PGReturningPayload = { + /** + * a list of columns that should be returned. Accepts `*` to return all the columns. + * Only supported in Postgres + */ + returning: ColumnName[] | '*'; +}; +export interface SQLStatement { +} +export interface SelectStatement extends SQLStatement { +} +export interface InsertStatement extends SQLStatement { +} +export interface UpdateStatement extends SQLStatement { +} +export interface RemoveStatement extends SQLStatement { +} +/** + * Generates select SQL statement + * @example + * ```typescript + * import { select, createMySQLStatement } from '@aws-appsync/utils/rds'; + * export function request(ctx) { + * const statement1 = select({ + * table: 'test', + * where: { + * name: { + * contains: 'son' + * } + * } + * }); + * return createMySQLStatement(statement1); + * } + * ``` + * @param payload + */ +export declare function select(payload: SelectPayload): SelectStatement; +/** + * Generates insert SQL statement + * @example + * ```typescript + * import { insert, createMySQLStatement } from '@aws-appsync/utils/rds'; + * export function request(ctx) { + * const statement1 = insert({ + * table: 'test', + * values: { + * name: "Luke Skywalker" + * }, + * }); + * return createMySQLStatement(statement1); + * } + * ``` + * @param payload + */ +export declare function insert(payload: InsertPayload): InsertStatement; +/** + * Generates update SQL statement + * @example + * ```typescript + * import { update, createMySQLStatement } from '@aws-appsync/utils/rds'; + * export function request(ctx) { + * const statement1 = update({ + * table: 'test', + * values: { + * name: "Luke Skywalker" + * }, + * where: { + * id: { + * eq: 12123 + * } + * } + * }); + * return createMySQLStatement(statement1); + * } + * ``` + * @param payload + */ +export declare function update(payload: UpdatePayload): UpdateStatement; +/** + * A helper to generate DELETE SQL statement to delete records from table + * @example + * ```typescript + * import { remove, createMySQLStatement } from '@aws-appsync/utils/rds'; + * export function request(ctx) { + * const statement1 = remove({ + * table: 'test', + * where: { id: { eq: 112321 }} + * }); + * return createMySQLStatement(statement1); + * } + * ``` + * @param payload + */ +export declare function remove(payload: RemovePayload): RemoveStatement; +type Statement = SQLStatement | string; +/** + * Generates an RDSRequest to interacts with Postgres database. The generated + * statements are quoted and escaped using `"` as quoting character. + * + * @param statement1 required statement + * @param statement2 optional second statement + */ +export declare function createPgStatement(statement1: Statement, statement2?: Statement | null): RDSRequest; +/** + * Generates an RDSRequest to interacts with MySQL RDS database. The generated + * statements are quoted and escaped using backtick as quoting character. + * + * @param statement1 required statement + * @param statement2 optional second statement + */ +export declare function createMySQLStatement(statement1: Statement, statement2?: Statement | null): RDSRequest; +/** + * The tagged template allows writing a static statement that can receive dynamic values at runtime through + * template expressions. AWS AppSync builds a variable map from the expression values to construct a + * SqlParameterized query sent to the Amazon Aurora Serverless Data API. + */ +export declare function sql(literal: TemplateStringsArray, ...args: any[]): SQLStatement; +/** + * Returns a object by transforming the stringified raw Amazon Relational Database Service + * (Amazon RDS) Data API operation result format to a more concise object. + * The returned object is a serialized list of SQL records of the result set. + * Every record is represented as a collection of key-value pairs. The keys are + * the corresponding column names. + + * If the corresponding statement in the input was a SQL query that causes a mutation + * (for example INSERT, UPDATE, DELETE), then an empty list is returned. + * For example, the query `select * from Books limit 2` provides the raw result + * from the Amazon RDS Data operation: + + * ``` + * { + * "sqlStatementResults": [ + * { + * "numberOfRecordsUpdated": 0, + * "records": [ + * [ + * { + * "stringValue": "Mark Twain" + * }, + * { + * "stringValue": "Adventures of Huckleberry Finn" + * }, + * { + * "stringValue": "978-1948132817" + * } + * ], + * [ + * { + * "stringValue": "Jack London" + * }, + * { + * "stringValue": "The Call of the Wild" + * }, + * { + * "stringValue": "978-1948132275" + * } + * ] + * ], + * "columnMetadata": [ + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "author", + * "precision": 200, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "author" + * }, + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "title", + * "precision": 200, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "title" + * }, + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "ISBN-13", + * "precision": 15, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "ISBN-13" + * } + * ] + * } + * ] + * } + ``` + + * The util.rds.toJson of this JSON block is: + + ``` + * [ + * { + * "author": "Mark Twain", + * "title": "Adventures of Huckleberry Finn", + * "ISBN-13": "978-1948132817" + * }, + * { + * "author": "Jack London", + * "title": "The Call of the Wild", + * "ISBN-13": "978-1948132275" + * } + * ] + * + ``` + @param data - Serialized SQL result + @returns Array>> - Object representing SQL results + */ +export declare function toJsonObject(data: string | RDSResponse): Array>>; +export {}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js new file mode 100644 index 00000000..9c8a0f13 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=rds-helpers.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js.map b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js.map new file mode 100644 index 00000000..dc646a93 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rds-helpers.js","sourceRoot":"","sources":["../src/rds-helpers.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.d.ts new file mode 100644 index 00000000..030f63e7 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.d.ts @@ -0,0 +1,120 @@ +import { RDSResponse } from "."; +export type RdsUtil = { + /** + * Returns a object by transforming the stringified raw Amazon Relational Database Service + * (Amazon RDS) Data API operation result format to a more concise object. + * The returned object is a serialized list of SQL records of the result set. + * Every record is represented as a collection of key-value pairs. The keys are + * the corresponding column names. + + * If the corresponding statement in the input was a SQL query that causes a mutation + * (for example INSERT, UPDATE, DELETE), then an empty list is returned. + * For example, the query select * from Books limit 2 provides the raw result + * from the Amazon RDS Data operation: + + * ``` + * { + * "sqlStatementResults": [ + * { + * "numberOfRecordsUpdated": 0, + * "records": [ + * [ + * { + * "stringValue": "Mark Twain" + * }, + * { + * "stringValue": "Adventures of Huckleberry Finn" + * }, + * { + * "stringValue": "978-1948132817" + * } + * ], + * [ + * { + * "stringValue": "Jack London" + * }, + * { + * "stringValue": "The Call of the Wild" + * }, + * { + * "stringValue": "978-1948132275" + * } + * ] + * ], + * "columnMetadata": [ + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "author", + * "precision": 200, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "author" + * }, + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "title", + * "precision": 200, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "title" + * }, + * { + * "isSigned": false, + * "isCurrency": false, + * "label": "ISBN-13", + * "precision": 15, + * "typeName": "VARCHAR", + * "scale": 0, + * "isAutoIncrement": false, + * "isCaseSensitive": false, + * "schemaName": "", + * "tableName": "Books", + * "type": 12, + * "nullable": 0, + * "arrayBaseColumnType": 0, + * "name": "ISBN-13" + * } + * ] + * } + * ] + * } + ``` + + * The util.rds.toJson of this JSON block is: + + ``` + * [ + * { + * "author": "Mark Twain", + * "title": "Adventures of Huckleberry Finn", + * "ISBN-13": "978-1948132817" + * }, + * { + * "author": "Jack London", + * "title": "The Call of the Wild", + * "ISBN-13": "978-1948132275" + * } + * ] + * + ``` + * @param serializedSQLResult - Serialized SQL result + * @returns {Record | null} - Object representing SQL results + */ + toJsonObject(serializedSQLResult: string | RDSResponse): Record | null; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js new file mode 100644 index 00000000..149ee279 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=rds-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js.map new file mode 100644 index 00000000..b2c4ee0e --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/rds-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rds-utils.js","sourceRoot":"","sources":["../src/rds-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.d.ts new file mode 100644 index 00000000..f9843022 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.d.ts @@ -0,0 +1,349 @@ +export type Key = { + [key: string]: AttributeValue; +}; +export type AttributeValue = unknown; +export type AttributeValueList = AttributeValue[]; +export type ConsistentRead = boolean; +export type PutItemInputAttributeMap = { + [key: string]: AttributeValue; +}; +export type AttributeMap = { + [key: string]: AttributeValue; +}; +export type AttributeName = string; +export type AttributeNameList = AttributeName[]; +export type ExpressionAttributeNameMap = { + [key: string]: AttributeName; +}; +export type ExpressionAttributeNameVariable = string; +export type ExpressionAttributeValueMap = { + [key: string]: AttributeValue; +}; +export type ExpressionAttributeValueVariable = string; +export type ConditionCheckExpression = { + expression: string; + expressionNames?: ExpressionAttributeNameMap; + expressionValues?: ExpressionAttributeValueMap; + equalsIgnore?: string[]; + consistentRead?: boolean; + conditionalCheckFailedHandler?: { + strategy: 'Custom' | 'Reject'; + lambdaArn?: string; + }; +}; +export type TransactConditionCheckExpression = { + expression: string; + expressionNames?: ExpressionAttributeNameMap; + expressionValues?: ExpressionAttributeValueMap; + returnValuesOnConditionCheckFailure: boolean; +}; +export type DynamoDBExpression = { + expression: string; + expressionNames?: ExpressionAttributeNameMap; + expressionValues?: ExpressionAttributeValueMap; +}; +export type DynamoDBProjectionExpression = { + expression: string; + expressionNames?: Record; +}; +export type DynamoDBGetItemRequest = { + operation: 'GetItem'; + key: Key; + consistentRead?: ConsistentRead; + projection?: DynamoDBProjectionExpression; +}; +/** + * @deprecated Use DynamoDBGetItemRequest + */ +export type DynamoDBGetItem = DynamoDBGetItemRequest; +export type DynamoDBPutItemRequest = { + operation: 'PutItem'; + key: Key; + attributeValues: PutItemInputAttributeMap; + condition?: ConditionCheckExpression; + customPartitionKey?: string; + populateIndexFields?: boolean; + _version?: number; +}; +export type DynamoDBUpdateItemRequest = { + operation: 'UpdateItem'; + key: Key; + update: DynamoDBExpression; + condition?: ConditionCheckExpression; + customPartitionKey?: string; + populateIndexFields?: boolean; + _version?: number; +}; +export type DynamoDBDeleteItemRequest = { + operation: 'DeleteItem'; + key: Key; + condition?: ConditionCheckExpression; + customPartitionKey?: string; + populateIndexFields?: boolean; + _version?: number; +}; +export type DynamoDBQueryRequest = { + operation: 'Query'; + query: DynamoDBExpression; + index?: string; + nextToken?: string; + limit?: number; + scanIndexForward?: boolean; + consistentRead?: boolean; + select?: 'ALL_ATTRIBUTES' | 'ALL_PROJECTED_ATTRIBUTES'; + filter?: DynamoDBExpression; + projection?: DynamoDBProjectionExpression; +}; +export type DynamoDBScanRequest = { + operation: 'Scan'; + index?: string; + limit?: number; + consistentRead?: boolean; + nextToken?: string; + totalSegments?: number; + segment?: number; + filter?: DynamoDBExpression; + projection?: DynamoDBProjectionExpression; +}; +export type DynamoDBSyncRequest = { + operation: 'Sync'; + basePartitionKey?: string; + deltaIndexName?: string; + limit?: number; + nextToken?: string; + lastSync?: number; + filter?: DynamoDBExpression; +}; +export type DynamoDBBatchGetItemRequest = { + operation: 'BatchGetItem'; + tables: { + [tableName: string]: { + keys: Key[]; + consistentRead?: boolean; + projection?: DynamoDBProjectionExpression; + }; + }; +}; +export type DynamoDBBatchDeleteItemRequest = { + operation: 'BatchDeleteItem'; + tables: { + [tableName: string]: Key[]; + }; +}; +export type DynamoDBBatchPutItemRequest = { + operation: 'BatchPutItem'; + tables: { + [tableName: string]: PutItemInputAttributeMap[]; + }; +}; +export type DynamoDBTransactGetItemsRequest = { + operation: 'TransactGetItems'; + transactItems: { + table: string; + key: Key; + }[]; +}; +export type DynamoDBTransactWriteItemsRequest = { + operation: 'TransactWriteItems'; + transactItems: TransactItem[]; +}; +type TransactItem = TransactWritePutItem | TransactWriteUpdateItem | TransactWriteDeleteItem | TransactWriteConditionCheckItem; +type TransactWritePutItem = { + table: string; + operation: 'PutItem'; + key: Key; + attributeValues: PutItemInputAttributeMap; + condition?: TransactConditionCheckExpression; +}; +type TransactWriteUpdateItem = { + table: string; + operation: 'UpdateItem'; + key: Key; + update: DynamoDBExpression; + condition?: TransactConditionCheckExpression; +}; +type TransactWriteDeleteItem = { + table: string; + operation: 'DeleteItem'; + key: Key; + condition?: TransactConditionCheckExpression; +}; +type TransactWriteConditionCheckItem = { + table: string; + operation: 'ConditionCheck'; + key: Key; + condition?: TransactConditionCheckExpression; +}; +export type HTTPRequest> = { + method: 'PUT' | 'POST' | 'GET' | 'DELETE' | 'PATCH'; + params?: { + query?: { + [key: string]: any; + }; + headers?: { + [key: string]: string; + }; + body?: T; + }; + resourcePath: string; +}; +export type HTTPResult = { + statusCode: number; + headers?: Record; + body: string; +}; +export type RDSRequest = { + statements: string[]; + variableMap: Record; + variableTypeHintMap: Record; +}; +type RDSColumnMetadata = { + /** + * Name of the column + */ + name?: string; + /** + * The label for the column. + */ + label?: string; + /** + * The database-specific data type of the column. + */ + typeName?: string; + /** + * The name of the schema that owns the table that includes the column. + */ + schemaName?: string; + /** + * The name of the table that includes the column. + */ + tableName?: string; + /** + * A value that indicates whether the column is nullable. + */ + nullable?: number; + /** + * A value that indicates whether the column increments automatically. + */ + isAutoIncrement?: boolean; + /** + * A value that indicates whether the column is case-sensitive. + */ + isCaseSensitive?: boolean; + /** + * The type of the column. + */ + type?: number; + /** + * A value that indicates whether an integer column is signed. + */ + isSigned?: boolean; + /** + * A value that indicates whether the column contains currency values. + */ + isCurrency?: boolean; + /** + * The precision value of a decimal number column. + */ + precision?: number; + /** + * The scale value of a decimal number column. + */ + scale?: number; + /** + * The type of the column. + */ + arrayBaseColumnType?: number; +}; +type RDSStructValue = { + attributes: RDSRecordColumnValue[]; +}; +type RDSRecordColumnValue = { + /** + * A NULL value. + */ + isNull: boolean; + /** + * A value for a column of BIT data type. + */ + bitValue: boolean; + /** + * A value for a column of big integer data type. + */ + bigIntValue: number; + /** + * A value for a column of integer data type. + */ + intValue: number; + /** + * A value for a column of double data type. + */ + doubleValue: number; + /** + * A value for a column of real data type. + */ + realValue: number; + /** + * A value for a column of string data type. + */ + stringValue: string; + /** + * A value for a column of BLOB data type. + */ + blobValue: ArrayBuffer; + /** + * An array of column values. + */ + arrayValues: RDSRecordColumnValue[]; + /** + * A value for a column of STRUCT data type. + */ + structValue: RDSStructValue; +}; +type RDSSqlStatementResult = { + numberOfRecordsUpdated: number; + records: Array; + columnMetadata: RDSColumnMetadata[]; +}; +export type RDSResponse = { + sqlStatementResults: RDSSqlStatementResult[]; +}; +export type OpenSearchRequest = { + operation: 'GET' | 'POST' | ' PUT' | 'HEAD' | 'DELETE'; + path: string; + params?: Partial<{ + headers: unknown; + queryString: unknown; + body: unknown; + }>; +}; +export type LambdaRequest = { + operation: 'Invoke' | 'BatchInvoke'; + invocationType?: 'RequestResponse' | 'Event'; + payload: unknown; +}; +export type NONERequest = { + payload: unknown; +}; +export type PutEventsRequest = { + operation: 'PutEvents'; + events: { + source: string; + detail: { + [key: string]: any; + }; + detailType: string; + resources?: string[]; + time?: string; + }[]; +}; +export type PutEventsResult = { + Entries: Array<{ + EventId: string; + } | { + ErrorCode: string; + ErrorMessage: string; + }>; + FailedEntryCount: number; +}; +export {}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js new file mode 100644 index 00000000..999aec45 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=resolver-return-types.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js.map b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js.map new file mode 100644 index 00000000..5117f80f --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/resolver-return-types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"resolver-return-types.js","sourceRoot":"","sources":["../src/resolver-return-types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/string-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.d.ts new file mode 100644 index 00000000..b07163fb --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.d.ts @@ -0,0 +1,13 @@ +export type NormalizationType = 'nfc' | 'nfd' | 'nfkc' | 'nfkd'; +export type StringUtils = { + /** + * Normalizes a string using one of the four unicode normalization forms: NFC, NFD, NFKC, or + * NFKD. The first argument is the string to normalize. The second argument is either "nfc", + * "nfd", "nfkc", or "nfkd" specifying the normalization type to use for the normalization + * process. + * @param {string} value - Value to be normalized + * @param {('nfc' | 'nfd' | 'nfkc' | 'nfkd')} normalizationType Normalization type + * @returns {string} Normalized string + */ + normalize(value: string, normalizationType: NormalizationType): string; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js new file mode 100644 index 00000000..57fce8f1 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=string-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js.map new file mode 100644 index 00000000..b700d760 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/string-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"string-utils.js","sourceRoot":"","sources":["../src/string-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.d.ts new file mode 100644 index 00000000..71eb78c4 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.d.ts @@ -0,0 +1,38 @@ +type SubscriptionFilterValue = string | number; +export interface SubscriptionFilterGeneric { + fieldName: string; + operator: string; + value: SubscriptionFilterValue | boolean | SubscriptionFilterValue[]; +} +export interface SubscriptionFilterEquality extends SubscriptionFilterGeneric { + operator: 'eq' | 'ne'; + value: SubscriptionFilterValue | boolean; +} +export interface SubscriptionFilterComparator extends SubscriptionFilterGeneric { + operator: 'le' | 'lt' | 'ge' | 'gt'; + value: SubscriptionFilterValue; +} +export interface SubscriptionFilterContains extends SubscriptionFilterGeneric { + operator: 'contains' | 'notContains'; + value: SubscriptionFilterValue; +} +export interface SubscriptionFilterBeginsWith extends SubscriptionFilterGeneric { + operator: 'beginsWith'; + value: string; +} +export interface SubscriptionFilterIn extends SubscriptionFilterGeneric { + operator: 'in' | 'notIn'; + value: SubscriptionFilterValue[]; +} +export interface SubscriptionFilterBetween extends SubscriptionFilterGeneric { + operator: 'between'; + value: [SubscriptionFilterValue, SubscriptionFilterValue]; +} +export type SubscriptionFilterEntry = SubscriptionFilterEquality | SubscriptionFilterComparator | SubscriptionFilterContains | SubscriptionFilterBeginsWith | SubscriptionFilterIn | SubscriptionFilterBetween; +export type SubscriptionFilterGroup = { + filters: SubscriptionFilterEntry[]; +}; +export type SubscriptionFilter = { + filterGroup: SubscriptionFilterGroup[]; +}; +export {}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js new file mode 100644 index 00000000..2c361eef --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=subscription-filter-types.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js.map b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js.map new file mode 100644 index 00000000..894e5f01 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/subscription-filter-types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"subscription-filter-types.js","sourceRoot":"","sources":["../src/subscription-filter-types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/time-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.d.ts new file mode 100644 index 00000000..f0ae43e3 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.d.ts @@ -0,0 +1,85 @@ +export type TimeUtils = { + /** + * Returns a String representation of UTC in ISO8601 format. + * @returns {string} - Current time formatted ISO8601 + */ + nowISO8601(): string; + /** + * Returns the number of seconds from the epoch of 1970-01-01T00:00:00Z to now. + * @returns {number} - Current time in seconds + */ + nowEpochSeconds(): number; + /** + * Returns the number of milliseconds from the epoch of 1970-01-01T00:00:00Z to now. + * @returns {number} - Current time in milliseconds + */ + nowEpochMilliSeconds(): number; + /** + * Returns a string of the current timestamp in UTC using the specified format from a String + * input type. + * @param {string} formatString - Date format string + * @returns {string} - Current time formatted + */ + nowFormatted(formatString: string): string; + /** + * Returns a string of the current timestamp for a timezone using the specified format and + * timezone from String input types. + * @param {string} formatString - Date format string + * @param {string} timezone - Timezone + * @returns {string} - Current time formatted + */ + nowFormatted(formatString: string, timezone: string): string; + /** + * Parses a timestamp passed as a String, along with a format, and return the timestamp as + * milliseconds since epoch. + * @param {string} timestamp - Formatted timestamp + * @param {string} formatString - Date format string + * @returns {number} - Parsed time + */ + parseFormattedToEpochMilliSeconds(timestamp: string, formatString: string): number; + /** + * Parses a timestamp passed as a String, along with a format and time zone, and return the + * timestamp as milliseconds since epoch. + * @param {string} timestamp - Formatted timestamp + * @param {string} formatString - Date format string + * @param {string} timezone - Timezone + * @returns {number} - Parsed time + */ + parseFormattedToEpochMilliSeconds(timestamp: string, formatString: string, timezone: string): number; + /** + * Parses an ISO8601 timestamp, passed as a String, and return the timestamp as milliseconds + * since epoch. + * @param {string} timestamp - ISO 8601 timestamp + * @returns {number} - Parsed timestamp in milliseconds + */ + parseISO8601ToEpochMilliSeconds(timestamp: string): number; + /** + * Converts an epoch milliseconds timestamp to an epoch seconds timestamp. + * @param {number} milliseconds - Milliseconds since epoch + * @returns {number} - Seconds since epoch + */ + epochMilliSecondsToSeconds(milliseconds: number): number; + /** + * Converts a epoch milliseconds timestamp to an ISO8601 timestamp. + * @param {number} milliseconds - Milliseconds since epoch + * @returns {string} - Date in ISO 8601 format + */ + epochMilliSecondsToISO8601(milliseconds: number): string; + /** + * Converts a epoch milliseconds timestamp, passed as long, to a timestamp formatted according + * to the supplied format in UTC. + * @param {number} milliseconds - Milliseconds since epoch + * @param {string} formatString - Date format string + * @returns {string} - Formatted timestamp + */ + epochMilliSecondsToFormatted(milliseconds: number, formatString: string): string; + /** + * Converts a epoch milliseconds timestamp, passed as long, to a timestamp formatted according + * to the supplied format in UTC. + * @param {number} milliseconds - Milliseconds since epoch + * @param {string} formatString - Date format string + * @param {string} timezone - Timezone + * @returns {string} - Formatted timestamp + */ + epochMilliSecondsToFormatted(milliseconds: number, formatString: string, timezone: string): string; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js new file mode 100644 index 00000000..e12dd28c --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=time-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js.map new file mode 100644 index 00000000..7daf91dc --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/time-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"time-utils.js","sourceRoot":"","sources":["../src/time-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.d.ts new file mode 100644 index 00000000..1525229d --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.d.ts @@ -0,0 +1,244 @@ +import { SubscriptionFilter } from './subscription-filter-types'; +import { Prettify } from './type-utils'; +export type DynamoDBAttributeTypeValues = '_null' | 'string' | 'stringSet' | 'number' | 'numberSet' | 'binary' | 'binarySet' | 'boolean' | 'list' | 'map'; +type DynamoDBAttributeOperator = { + attributeType?: DynamoDBAttributeTypeValues | null; + attributeExists?: boolean | null; +}; +type DynamoDBOperator = DynamoDBAttributeOperator & T; +export type DynamoDBEqualityOperators = { + ne?: T | null; + eq?: T | null; +}; +type DynamoDBScalarNumberOperators = DynamoDBEqualityOperators & { + le?: T | null; + lt?: T | null; + ge?: T | null; + gt?: T | null; + in?: T[] | null; +}; +type DynamoDBNumberOperators = DynamoDBScalarNumberOperators & { + between?: [T, T] | null; +}; +type DynamoDBStringOperators = DynamoDBScalarNumberOperators & { + beginsWith?: T | null; + contains?: T | null; + notContains?: T | null; +}; +type DynamoDBBooleanOperators = DynamoDBEqualityOperators; +type DynamoDBArrayOperators = { + contains?: T | null; + notContains?: T | null; +}; +export type DynamoDBExpressionOperation = TOperand extends (infer U)[] ? DynamoDBOperator>> : TOperand extends boolean ? DynamoDBOperator>> : TOperand extends number ? DynamoDBOperator>> : TOperand extends string ? DynamoDBOperator>> : TOperand extends Record ? ShallowDynamoDBFilterObject : any; +export type ShallowDynamoDBFilterObject = T extends Record ? Prettify<{ + [k in keyof T]?: DynamoDBExpressionOperation> | null; +}> : any; +export type DynamoDBFilterObject = T extends Record ? Prettify> & { + and?: DynamoDBFilterObject | DynamoDBFilterObject[] | null; + or?: DynamoDBFilterObject | DynamoDBFilterObject[] | null; + not?: DynamoDBFilterObject | null; +}> : {}; +type OpenSearchBaseOperators = { + exists?: boolean; +}; +type OpenSearchEqualityOperators = { + eq?: T; + ne?: T; +}; +type OpenSearchScalarNumberOperators = OpenSearchEqualityOperators & { + gt?: T; + gte?: T; + lt?: T; + lte?: T; +}; +type OpenSearchNumberOperators = OpenSearchScalarNumberOperators & { + range?: [T, T]; +}; +type OpenSearchStringOperators = OpenSearchEqualityOperators & { + match?: T; + matchPhrase?: T; + matchPhrasePrefix?: T; + wildcard?: T; + regexp?: T; + multiMatch?: T; +}; +type OpenSearchBooleanOperators = OpenSearchEqualityOperators; +export type OpenSearchQueryOperation = TOperand extends boolean ? OpenSearchBaseOperators & OpenSearchBooleanOperators : TOperand extends number ? OpenSearchBaseOperators & OpenSearchNumberOperators : TOperand extends string ? OpenSearchBaseOperators & OpenSearchStringOperators : TOperand extends boolean[] ? OpenSearchBaseOperators & OpenSearchBooleanOperators : TOperand extends number[] ? OpenSearchBaseOperators & OpenSearchNumberOperators : TOperand extends string[] ? OpenSearchBaseOperators & OpenSearchStringOperators : any; +export type ShallowOpenSearchQueryObject = T extends Record ? Prettify<{ + [k in keyof T]?: OpenSearchQueryOperation>; +}> : any; +export type OpenSearchQueryObject = T extends Record ? Prettify> & { + and?: OpenSearchQueryObject[]; + or?: OpenSearchQueryObject[]; + not?: OpenSearchQueryObject; +}> : {}; +export type SubscriptionFilterEqualityOperators = { + eq?: T; + ne?: T; +}; +export type SubscriptionFilterScalarNumberOperators = SubscriptionFilterEqualityOperators & { + gt?: T; + ge?: T; + lt?: T; + le?: T; + in?: T[]; +}; +export type SubscriptionFilterNumberOperators = SubscriptionFilterScalarNumberOperators & { + between?: [T, T]; +}; +export type SubscriptionFilterStringOperators = SubscriptionFilterScalarNumberOperators & { + beginsWith?: T; + contains?: T; + notContains?: T; + between?: [T, T]; +}; +export type SubscriptionFilterBooleanOperators = SubscriptionFilterEqualityOperators; +export type SubscriptionFilterArrayOperators = { + contains?: T; + notContains?: T; + containsAny?: T[]; +}; +export type SubscriptionFilterOperation = TOperand extends boolean ? SubscriptionFilterBooleanOperators : TOperand extends number ? SubscriptionFilterNumberOperators : TOperand extends string ? SubscriptionFilterStringOperators : TOperand extends boolean[] ? SubscriptionFilterArrayOperators : TOperand extends number[] ? SubscriptionFilterArrayOperators : TOperand extends string[] ? SubscriptionFilterArrayOperators : any; +export type ShallowSubscriptionFilterObject = T extends Record ? Prettify<{ + [k in keyof T]?: SubscriptionFilterOperation>; +}> : any; +export type SubscriptionFilterObject = T extends Record ? Prettify> & { + and?: SubscriptionFilterObject[]; + or?: SubscriptionFilterObject[]; +}> : {}; +export type SubscriptionFilterRuleObject = T extends Record ? ShallowSubscriptionFilterObject : {}; +type ExcludeConditions = Exclude; +export type SubscriptionFilterExcludeKeysType = T extends Record ? ExcludeConditions[] : string[]; +export type TransformUtils = { + /** + * Converts an input string to a filter expression for use with DynamoDB. + * Input: + * ``` + * util.transform.toDynamoDBFilterExpression({ + * "title":{ + * "contains":"Hello World" + * } + * }) + * ``` + * Output: + * ``` + * { + * "expression" : "contains(#title, :title_contains)" + * "expressionNames" : { + * "#title" : "title", + * }, + * "expressionValues" : { + * ":title_contains" : { "S" : "Hello World" } + * }, + * } + * ``` + * @param {DynamoDBFilterObject} filterObject - Object representing DynamoDB filter + * @returns {string} - DynamoDB filter expression stringified object + */ + toDynamoDBFilterExpression]: any; + } = Record>(filterObject: DynamoDBFilterObject): string; + /** + * Converts the given input into its equivalent DynamoDB condition expression, returning it as a Json string. + * + * The default Operator is assumed to be `AND`. The method behaves similarly as toDynamoDBFilterExpression, except + * that it supports the remaining method that Dynamo condition expression supports, such as size, attribute_exists. + * @param {any} conditionObject Object representing DynamoDB condition + * @returns string the evaluated DynamoDB condition + */ + toDynamoDBConditionExpression]: any; + } = Record>(conditionObject: DynamoDBFilterObject): string; + /** + * Converts the given input into its equivalent OpenSearch Query DSL expression, returning it + * as a JSON string. + * Input: + * ``` + * util.transform.toElasticsearchQueryDSL({ + * "upvotes":{ + * "ne":15, + * "range":[ + * 10, + * 20 + * ] + * }, + * "title":{ + * "eq":"hihihi", + * "wildcard":"h*i" + * } + * }) + * ``` + + * Output: + * ``` + * { + * "bool":{ + * "must":[ + * { + * "bool":{ + * "must":[ + * { + * "bool":{ + * "must_not":{ + * "term":{ + * "upvotes":15 + * } + * } + * } + * }, + * { + * "range":{ + * "upvotes":{ + * "gte":10, + * "lte":20 + * } + * } + * } + * ] + * } + * }, + * { + * "bool":{ + * "must":[ + * { + * "term":{ + * "title":"hihihi" + * } + * }, + * { + * "wildcard":{ + * "title":"h*i" + * } + * } + * ] + * } + * } + * ] + * } + * } + * ``` + * The default operator is assumed to be AND. + * @param {any} obj - Object representing OpenSearch Query + * @returns {string} - JSON string of an OpenSearch Query DSL expression + */ + toElasticsearchQueryDSL]: any; + } = Record>(obj: OpenSearchQueryObject): string; + /** + * Converts a Map input object to a `SubscriptionFilter` expression object. The + * `util.transform.toSubscriptionFilter` method is used as an input to the + * `extensions.setSubscriptionFilter()` extension. + * For more information, @see {@link https://docs.aws.amazon.com/appsync/latest/devguide/extensions.html#extensions-setSubscriptionInvalidationFilter|extensions.setSubscriptionFilter} + + * @param {Record} obj - a Map input object that's converted to the `SubscriptionFilter` expression object + * @param {string[]} ignoredFields - a List of field names that will be ignored in the first obj + * @param {Record} rules - a Map input object of strict rules that's + * included while constructing the `SubscriptionFilter` expression object. These strict rules are + * included in the `SubscriptionFilter` expression object in such a way that at least one of the + * rules will be satisfied to pass the subscription filter. + * @returns {SubscriptionFilter} Subscription Filter expression object + */ + toSubscriptionFilter>(obj: SubscriptionFilterObject, ignoredFields?: SubscriptionFilterExcludeKeysType, rules?: SubscriptionFilterRuleObject): SubscriptionFilter; +}; +export {}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js new file mode 100644 index 00000000..40ad0367 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=transform-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js.map new file mode 100644 index 00000000..04e4b544 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/transform-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"transform-utils.js","sourceRoot":"","sources":["../src/transform-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/type-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.d.ts new file mode 100644 index 00000000..f772531e --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.d.ts @@ -0,0 +1,8 @@ +export type Prettify = { + [k in keyof T]: T[k]; +} & {}; +/** + * Util type decrement the number. Supports number upto 9. Helps in preventing + * excessive recursion when using recursive types and no type information is passed + */ +export type Decrement = S extends '9' ? '8' : S extends '8' ? '7' : S extends '7' ? '6' : S extends '6' ? '5' : S extends '5' ? '4' : S extends '4' ? '3' : S extends '3' ? '2' : S extends '2' ? '1' : never; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js new file mode 100644 index 00000000..3cc79891 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=type-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js.map new file mode 100644 index 00000000..a029f162 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/type-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"type-utils.js","sourceRoot":"","sources":["../src/type-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.d.ts b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.d.ts new file mode 100644 index 00000000..f39aed94 --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.d.ts @@ -0,0 +1,46 @@ +export type XmlUtils = { + /** + * Converts an XML string to a Dictionary. + * ``` + * Input: + * + * + * + * + * 1 + * Getting started with GraphQL + * + * + * + * Output (JSON representation): + * + * { + * "posts":{ + * "post":{ + * "id":1, + * "title":"Getting started with GraphQL" + * } + * } + * } + * ``` + * @param {string} xml - XML string + * @returns Record - Object representation of XML + */ + toMap(xml: string): Record | null; + /** + * Converts an XML string to a JSON string. This is similar to toMap, except that the output is + * a string. This is useful if you want to directly convert and return the XML response from an + * HTTP object to JSON. + * @param {string} xml - XML string + * @returns {string} - JSON representation of XML + */ + toJsonString(xml: string): string; + /** + * Converts an XML string to a JSON string with an optional Boolean parameter to determine if + * you want to string-encode the JSON. + * @param {string} xml - XML string + * @param {boolean} stringEncode - Boolean to determine if you want to string-encode the JSON + * @returns {string} - String encoded JSON representation of XML + */ + toJsonString(xml: string, stringEncode: boolean): string; +}; diff --git a/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js new file mode 100644 index 00000000..05cca43e --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=xml-utils.js.map \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js.map b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js.map new file mode 100644 index 00000000..a2150dcf --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/lib/xml-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml-utils.js","sourceRoot":"","sources":["../src/xml-utils.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/package.json b/graphql/node_modules/@aws-appsync/utils/package.json new file mode 100644 index 00000000..aa4f8fbc --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/package.json @@ -0,0 +1,31 @@ +{ + "name": "@aws-appsync/utils", + "version": "1.9.0", + "description": "This project contains type definitions for AppSync resolver types.", + "main": "lib/index.js", + "license": "Apache-2.0", + "types": "./lib/index.d.ts", + "author": "Amazon Web Services", + "keywords": [ + "graphql", + "appsync", + "aws", + "types" + ], + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "tsd": "^0.29.0", + "typescript": "^5.1.0" + }, + "scripts": { + "clean": "rimraf lib", + "build": "tsc", + "test": "tsd" + }, + "tsd": { + "directory": "tests-d" + }, + "gitHead": "56168d0b2066ff174113c1e0132c0fe42b7f6cdc" +} diff --git a/graphql/node_modules/@aws-appsync/utils/rds.d.ts b/graphql/node_modules/@aws-appsync/utils/rds.d.ts new file mode 100644 index 00000000..551fa30d --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/rds.d.ts @@ -0,0 +1 @@ +export * from './lib/rds-helpers'; \ No newline at end of file diff --git a/graphql/node_modules/@aws-appsync/utils/rds.js b/graphql/node_modules/@aws-appsync/utils/rds.js new file mode 100644 index 00000000..551fa30d --- /dev/null +++ b/graphql/node_modules/@aws-appsync/utils/rds.js @@ -0,0 +1 @@ +export * from './lib/rds-helpers'; \ No newline at end of file diff --git a/graphql/package-lock.json b/graphql/package-lock.json new file mode 100644 index 00000000..d45b4c7b --- /dev/null +++ b/graphql/package-lock.json @@ -0,0 +1,27 @@ +{ + "name": "graphql", + "version": "1.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "graphql", + "version": "1.0.0", + "dependencies": { + "@aws-appsync/utils": "^1.7.0" + } + }, + "node_modules/@aws-appsync/utils": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@aws-appsync/utils/-/utils-1.9.0.tgz", + "integrity": "sha512-TAZNHiMpJKafrur6sE0Ou48gbuRL1oJXUDfrnBbYAXDVF3+mzD/0QCRp91F/dcAUbbNkFaq8pAwLEBTmpvmm9g==" + } + }, + "dependencies": { + "@aws-appsync/utils": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@aws-appsync/utils/-/utils-1.9.0.tgz", + "integrity": "sha512-TAZNHiMpJKafrur6sE0Ou48gbuRL1oJXUDfrnBbYAXDVF3+mzD/0QCRp91F/dcAUbbNkFaq8pAwLEBTmpvmm9g==" + } + } +} diff --git a/graphql/package.json b/graphql/package.json new file mode 100644 index 00000000..fa3dfb01 --- /dev/null +++ b/graphql/package.json @@ -0,0 +1,7 @@ +{ + "name": "graphql", + "version": "1.0.0", + "dependencies": { + "@aws-appsync/utils": "^1.7.0" + } +} diff --git a/graphql/schema.graphql b/graphql/schema.graphql index 16067205..bfb4a720 100644 --- a/graphql/schema.graphql +++ b/graphql/schema.graphql @@ -1,21 +1,17 @@ type Mutation { - createGame(input: CreateGameInput!): Game! + createGame(input: CreateGameInput!): Game! @aws_cognito_user_pools } type Query { - getGame(input: ID!): Game! + getGame(input: ID!): Game! @aws_cognito_user_pools } input CreateGameInput { name: String! description: String - publicNotes: String - privateNotes: String - fireflyUserId: ID! - players: [ID!]! } -type Game { +type Game @aws_cognito_user_pools { id: ID! name: String! description: String diff --git a/terraform/environment/wildsea-dev/.terraform.lock.hcl b/terraform/environment/wildsea-dev/.terraform.lock.hcl index 169a35a3..4360bd0c 100644 --- a/terraform/environment/wildsea-dev/.terraform.lock.hcl +++ b/terraform/environment/wildsea-dev/.terraform.lock.hcl @@ -22,3 +22,41 @@ provider "registry.terraform.io/hashicorp/aws" { "zh:ffb40a66b4d000a8ee4c54227eeb998f887ad867419c3af7d3981587788de074", ] } + +provider "registry.terraform.io/hashicorp/local" { + version = "2.5.1" + hashes = [ + "h1:8oTPe2VUL6E2d3OcrvqyjI4Nn/Y/UEQN26WLk5O/B0g=", + "zh:0af29ce2b7b5712319bf6424cb58d13b852bf9a777011a545fac99c7fdcdf561", + "zh:126063ea0d79dad1f68fa4e4d556793c0108ce278034f101d1dbbb2463924561", + "zh:196bfb49086f22fd4db46033e01655b0e5e036a5582d250412cc690fa7995de5", + "zh:37c92ec084d059d37d6cffdb683ccf68e3a5f8d2eb69dd73c8e43ad003ef8d24", + "zh:4269f01a98513651ad66763c16b268f4c2da76cc892ccfd54b401fff6cc11667", + "zh:51904350b9c728f963eef0c28f1d43e73d010333133eb7f30999a8fb6a0cc3d8", + "zh:73a66611359b83d0c3fcba2984610273f7954002febb8a57242bbb86d967b635", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7ae387993a92bcc379063229b3cce8af7eaf082dd9306598fcd42352994d2de0", + "zh:9e0f365f807b088646db6e4a8d4b188129d9ebdbcf2568c8ab33bddd1b82c867", + "zh:b5263acbd8ae51c9cbffa79743fbcadcb7908057c87eb22fd9048268056efbc4", + "zh:dfcd88ac5f13c0d04e24be00b686d069b4879cc4add1b7b1a8ae545783d97520", + ] +} + +provider "registry.terraform.io/hashicorp/null" { + version = "3.2.2" + hashes = [ + "h1:zT1ZbegaAYHwQa+QwIFugArWikRJI9dqohj8xb0GY88=", + "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", + "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", + "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", + "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", + "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", + "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", + "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", + "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", + "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", + "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", + ] +} diff --git a/terraform/environment/wildsea-dev/plan b/terraform/environment/wildsea-dev/plan deleted file mode 100644 index 43dadce6d187308d3ccc29cf1314a4f517378156..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20576 zcmd4XQ;;ZKmoDnEZQHi(T4md|ZQHhOyH?q@ZQD3s_aEK+-}^-GIF~0g=1tyY=7^YM z=6v#%mjVVs0f6}DTndEO0Qk293;+_q+0@?Jz*a>W6aX+ju*Y=C%gS17+~u#y$t6r-|0ApHg-89+p@c4i3}QY#kB~?lL;PuLZfkIo{Tfz+_P9W=T{sxTtm(o zZSoQ_%h6ck$kut^+)NRGSVRi#q9ehc)(}ald`SwTKPRa&sluLA#_8(e@AnZ;_I{q# zznw9(<=n%CmZETBG i#{~VU$?M@8;d_Pd9+J|E;D`u`O$UdUNT)-at6>eGQK(5% z)Jy}V?=yxQ4$_wTtl}ndHo_yBHmfZ3uIkb4WX&=jv^MU42nLPmG6ojLiteOsIAe3Z zNR<>~za|T|q0ctKwfJ~2-uLzvef}1TK5++xOU#d;5od%NCUO`Fk#5==P;ySUBlSMU zn%=d{FRq%*2jEA4?jH8PDu!l~wK$C#f~1Ns))mbMFtV`Ozijor4V8gN&%wBHBrioQmkS!elsA{i*{Bp)6kGZ!70)P$A95jy_!;$)FZJC#IZUce@F0pA*l4#&~<1 zK`BF;kjD0EJO6`{m|_+~ z$gcb2MpM}UJg@AAy6h5uv!x)}>q$cu&7+Qi>JL3}WlohDuG{m5&H6+?2LUf)^YKgk z`+jiaa??fMI@OKwUWA@^!~Fq(4d!HnX$TD&pSaPn-gX;G0v*lMT2b$v)2Rd?+k^rE z1g+$>v!Coh6(ytl=(%=vlmR>DS_5-ezxo57uD&FO_ZUy@PaL$ak(uRl%50r!4P|rN zXWjH>9JB#SN&_9-$0>kBBfjr!3YvuijWVI0+)T@fy@nKYY)z#RjZ{{^sUXD}opHUK zGiUrIl9z-htqEUGjR`fYrfYxFx>M7L^k?EV@!r^sW(Qk}gzR9u!dT|$m5H(RF zoB>+N%eE#oA_E^s_EiR^m&-m*AOLHRdF0shA_IG8Zn%PJZnYVr@^iYkP4l_Nn5*r^ z1w|j(pW8M8vGv-t!b+(*mBE_;qGgn8(<*%p&XAVbbP#MC zqlxI&bGs0-uWL`TuQ1KoNz)DHv;a;d=BiE|i!vYr!`5^SB@$e66^euJc=et*@fJ9Q z%=q=!Tf{NL)nvZ03|EILAL2oB@X?w4#~|do{N9A0fK6SeKL6)RxnxSBq>Use-#UQf zOO-r>I?>@(eKP(#E>-Hg2NvKE9P>v?@gHY+!u#E8yZ6oSguJWY6I@m|{a;FrN|umG zF#Gd{Nu@hj+;0=kCQ>3x)M49+D{+!}?)4q?G5YrycNunQfruMdb6Qx;(j+`e-Vks<&=ZF8L8wGC|+SG7*}3HaIg9vPhE95CF>5 z2?{HHCfkphk60#{`yCOYog4}0A?)E9EzvWt0Qp2ed-vDGubqNaKW>co)gZao?*}e6 z7=(e<0fEcc8Vf%t5XO>g3aduS`k+WZ1onV^^6hPwvp>4-N~^MEp2kqPjOo>Xv+PJ z;2rDxx1N!y)&0$*T*{_vZurGv zp<**d4FO3;z=w7D5ebXHgA_it2dIDdx8C|Nuw>%m^6)V)S7EJCVGoUpGF!8OvD?jF zm+tfaS#FYoedaGFo}8u00?0kdA31H0x9FnwRQ|L=dCwbjo=jkQZ=Yn~apFKlE`FRb z$b^7w0)S*Q)19O~n-nh8JB~RbywghkT$l~GENz296pC=7XovtN4qwUe_qD6>LUpjE z7@C@0v>ptfp?YyXKqvg-zYC!%7H&#%FNxSmmq%QJyVXVT+uFV-os1=&*lj-psr%Fv zgpQ0HPOHY9ZJ^S%P0>OfG{C8CU+-6r&V({r%~gnPWfJw|Y_A)W4{x zMeg6!#*&Z)@6F_f0Sa3}+-_^}@naXVCS;d`Lb^`sr%X z>D$wA2JH3x$h%h62G{!K=5)lLnRoM9nEJMQ0Ug$4d0cKlabrlR4IO`Racldy-C2mB zSiIobhrwKc)+Nr_0(+s){Li9^3$B}=tLU}hsSWz1b ztwNwP%VY~clplYED;$EdO7V`v?UA)X))JI7TdlD+!U=(dx=}FA*VNx@wts#HB=F_c zuh;!G+=KPt1!o_1aSk%UpmsWYnw$6DdA#wI2(5`eA~w-!zkfwt2N(C$ePYyI12=-2 zY>C;HcLjL$yS4QYH=W=I(ZACJUY|wt>IZjxuRMCMT((g*g)yZ6eZP+cm08d z+~fKD`1C}AkfTPxY8YvzqLG7gdnG~YR^7N6ScH3iaYNh?n%PkVQ4{dzhJc{5$i5EN zB}Wz>{>k|h>a|1+3O!hgfBUbO$)>gCyM@w`U1o2?D)hsjXHbv0CK?Nqmij|^ZuZB9 z4t^BmnIR_15q0{q*sXq|0fqtz_U8#d9@tR0nT-W`s}hnrw^*5B-My!$1Bk@Mp_6%SJ% zDXdZQI%Q*kp3_se)%rt%<;6))hq3;rtw6&#E~2Jeo|bF`)5D$oCAZWS(mNV_r+C zncze8PIn_)IG8dy7}%%P)2qO7N5V%uC*M(fP4u36a@+WQm_UoUw_9 zpl_9JR+r1*9|@N!9K&iEz_!68()B9`yK8dH& z?jJHbAoPkAcCgR5UJbsujf@=>SZ=#*?dzK59rbA&p9)L03f1Rd1<~!!ig=b>@c2kR z&7UXka2}T@w@MeDw~DjS@n}ibV!3bm9N_oFX6csF*|=9DNMvQnvX*79jGY}v)`u^l z=S#Y>gPg3gk%gkVY}^eTcwI;PGzwS5*zKqxx}y${4)v?d;KJ_UP47vhK{%Uiw^tQw zUuyzh7Djhd{l|giwf!xedddn=Ze8Q7gd7bDc|qo-;zswwrMa<)r?~}|(c${`+J4Z@ zT~H{T#EszJ&oMvN>m6dBwWS9^r6b-X0v=2mqmQfr5v~mW{ey$Pc;Z z*mpQ0$8`kpLhwe3&6gL{PT>RY!D*}I>UT`wB zL@Xc%_J@8dtDQ^7P^CoZNC#826vC=-Nw&Y*dB6>kN_LbBl@HIA-vJNLJ0E-pfwl0Q z>1*(;s8Y=8!8X)_y9~-6t-+18G`cUsOFye&i)LaUa&KcXtPJMIm_S~x_qXukFHy0n z{QNlY_xoj^TIpcMKB^g%gvW{kLF*#PmPZPIus1C} zE_5_2tmS8_pNgoUMqBd}(VUAmiYRz1uNXv4@MHwZx)W1u-Ag#~LN3sek)*;^9KDC@ z(pVTK+SwQjnz0JGDX?j_D{3QYd4Y&44E3trBZ1ySx7mweH(hAm&r9_VwV|qzrP>1Z z3Y#Gok=g^1NlTt-4l(~gtf>U5-z(z7rFxJU&j&9Ag7prrJ2w=k2)yA-DXYnRUQQ`# z`R_YY_^?0@y8wl}G=?Nj)9fnneh;BH}gL&Fv0=->^P(yuhI+taiYKSz7Cme7L62czD4u9WAvHC$HmxwJaE8GETDwM1 z(>_Nd2IzyJF1c1M+k*vZ7Qt<>>f3F!f3$AO$ zmN|XxEPSkU-;oIkxx#zt!vE!DfA@a&^|UzhGyJ}9VbzvoOQ`xyCx`_HDGsC~Of6V! zw|gq!wqLg=qyDMD2@SaVKq2-r>vlLaRBM0YvG7sU-J**P4JEOazH+k{<$*_;IrU6Lov=+od1Z@pPvn zG7rx;q8-7+Hu4OQ*!Ss*i6Ljp9%e|z743ele01Ao#w}fqXFTdF9~>IxfF~Jl)P~0s zq7<7$0q3PMrvoWp!H9!M*FV|YwYkt3=ac+X3#dB0JZ@!L>F1YYGgCSwNTe>b>kJ@2 z-Ii$S*eL@GQ7{ncq7^0@=njL3?E!!;M9NYMSB@j2dR_|fqZY%)6OUrC*EQr+X*hKg zUQ^63zM>T%q$8QIyS2W)Q_H?_{o;i9zMy+f6_~34Lm!W!*B78Vkte22Pd&4lNNQIA zjEOKeZt8N9TUe{7A z$zN~K1;)0CEwje~XKq9K!9u>Wc(J6&CRsmflSn>M?qadiBiq<#)8Dgsv2V*30J8_p~e zk1DIpNSl^bG*+Ai{rf2=OsSzOeVA^+W7voN+m|YWJZD2XeW+TrUD<|J<4X4S%yM$y z)-HH91z}&oeCGXxLy%7o2kLBuip+rTJ!a|=Qa0=YuWxXVO>lLOLk9Goiyaq1l;i|L zsgMJq3PowSTGUqzT$jg|{#fIGg=J_ZSxst<$N9GmSO~skCkjyA8o=^Y&??mUqydD= zH~@)an46TrVWulgrZZ4gtU_#M{?b6I1j|6L*|%Ut?|4nUXxn%Z z$iQc;c3%6{6E6>un;qySxl@I`*BQl(F@39{JD4LC%0U^~sj8Nr#o0YQt_bgsLw&dP6c7< zJ59^ZKuWO1r_k6Ag2>)%!F6Wy@m3zN!B*=FI`>bH+0N`ne)9>!Hq3`Y$V*3%XeIq3 z!p4~2Co{N!rr**s1f%{9Hh_c7K0F=cj1I3G-_- z$q7{*MlwJ=s3zZe_%vCY`D{2a<#sh=Dg}&196{L!R%1+e1}yW9l$eDItVN>%>Ht#HuZ5YqTCqe1>O)UZ zsv$Hr;c5p3Qn2;22FciV^ohjNv5X+;!ZIQDCk4p;#R8Gf%tQXr#!e9FyfgSskadKJ zEB=ILAkcjyLhhi$CR?i1>3%R<0l-l4Wf0Yag1i!e1rL4NR05*}V2SGj2;{f`x*)jw zMkvD6EfNU!N)_K!CHx~0E%N?LEJ9?~1W1zGhc~M`5iPIB#a>&RUQY1$8U7C#onS%+ zc=2-HbKjr0B9o=Syb?GP*2R!}Wh9rH+PY#t+;?-DntUg#v5u_E2_SKb-KjH6vvu5h z_m=%N6@O7H3HK^3w5Zo$^a|NZixMr7el=xrL>42QpAG>fSR0a&tev{-Xl)=;lwm-Q zx>c(ekk4;1_DkMDcTeyr{CH_3YS6!ps5 zg&b%U7Q%cdUsTBl{85O;ffUK7{o{YSK%kd_{XWBS$0NlSlCWd|6PxcS%TffEjjS^-~E!RO5-aOEQV!!};Yf-v-w;YGE!hl6Z_e84^gC z#ovb~Q6wh9Fu_}Xzz^Mh@+0{=J^!N{J1!qWE<=fx)_`Io?PCTN%$us1!moU9))Hz7 zy0f`QrU_L0)z*E{sWFUi;ipL_H!cN`*!&k zSD5e1Me>GjoK!(wnM^#`5Ja^5XJ5B7l1kz*#!{-$C7OWorEX7U$|mAZAkZwnzHy#B zphqPE!#E46@X#K@ET7{T9PV;QJs24CnEU9PYy0c!>(HJtdeBaRXpA(w6IaHfs1Nur z{rulp!rz|~U9(Phzg2u;3giCs1IEBTqoWFR+#>vMWl*-#7ZKHs6n+$UmS(_lc*t~aSR!csYT=b<9-N5=72OQs2_$Rw(wj|d;Rzu~t#~;{`LUrM@2`mo9*GlG=hFq1DM9#+I ztAi=My87xMVg@Q_5tB8LFt759Y(1QLK$9c>ll$r4k2mAGuQWz9VoFLa29Wrh7ZT{q z6tJuor9_6D`ed87%?mG6ARei}9s16tZRe^A#KXCGQfxmbtpB|KnR!%V*}7m@+WAsVZ3NqR{rPTgALI4rKY=6<|nSp+e- zW}qxV!xdvQxd!$y#q%4^Ijy1`7W{ebCFm2ew49-j${K;>XZB+*Q0X)nBd5IsD8=q0 zMbcEvIYa_T($>Y0`Oc?T_$tU*U3@mAzn4}@259p?_Xsq3N2p>^VvpWa5xFN{Pv-I| zrg5cv2V6TGh@w}O7W^icaT!2sjAUO^hgUeNSEaU|Pad6|vt&ezJ zO9;(WR{$-~W{&>PRRuJVAuOzLlbgsfma7=b0JQsUK{6vVUCI?-rB&aP?Zi)$*r;$+ zkZ52rEz@1{yU0ckivFg#n;E!Cks~Zf^Nx5YhQL(++A!e#Pn+=>BM^7RS}VmKCm)8o zx$i{a`F7n7zVIdUrOa8IfYyZGWD>N(AxEK_#cFn~mJipOd5$_e*NbaeuScs+z|GUZ z&QxI?|L_@f2ZGS~*fxk4P|~%1Nw?RpyG36#p48_<=J=#&TRN=l6E9**L=%UUNBP3Y zWRT1R%qTPt5;!7a>SRJBj*X<CSATY?bWMNW5>}^=)H}JIs$UvB9>K_+I+?!-aKpj*+h~acmaLbLSnKBcJ zF#agc7Mj>#+Uj&Jfvl}l2M%EbgE^+R*PATbprf0>*>aRlLc%3jSzX}-t8TO~;}X%+ zu?;mQz4&~Fa|6y$8L%8v=!J;p44~aO@v~ci!>9m0v(#4Fk=7FcSsG57+Q88`7Gq?r-O$-+Shw2|m!cY>!Pf^QjIMvByT%P1Ce(z9@dkAmp!GRewNqKHw=9QiY=yZ3%%*fFr>Aax{4sa-=hh+%sNYJqJ4>o7gxT zH1&et)O#H+BbP6wKD^FiGH#1@kh}QWkQcFj(PZ}0;A&F^#s{`?5ciF42`GCF#w%l#$aiApB4^n(r3m7EwlKPj9c=T`)ZLJ4BUC3HH}eX& z16-SaUah-+W;w4v>--I!__Mnv?+pGhjfP#{KcctRbvTWFu2=x5vQrR9F7P{smMp-U zwrUe1vhSTk<ed5ZNlj#m4;^NUZC3U=Dy0u(V?sCS0e3#t6yp=`e7ZM~7De>`WQK zkGy$*yn+%2Rd_Cpn^NWCDH_C6A5lwTBEA;wRZ-d#)PAN41%76L?!mMa}Hmw|DF5jjM?Hv}4D5o)$AI z`uU2j;vFr?;~7iUxvN-}Yv00NPV-_S@tN)VQS@fBtIy=doxgp;&w^?^H4ZL|vQ~(JG(*E;JRS00g z(+`7`HQl`hQKXO1?$OCfqcf*=Iujy*0TbvX#VJvXm_0L_Ut$p0^ine3OP#KgWsA`f z|KZ`r3mH)5@I@4xnRIPNvO4_04g(@9mHljV4Brg6cg4%+y4$5!dzClZ1^ZR-YHl6?KN|ZSm}S$uIX3n>9N9szyG2_>q>UvqX&Jv+f@E*PCxJOzo+(Z8ZmP^oM)H zhyUHvAt)y(uH5aJRKc~ITXg1>J*oa?HmS;Z+rTgK>ICxJa~{KQ=PlC4@c=fHc1bvl zJ*@brYcM8iDzO^2p-@(M9dJ^=eSN6{MTxHQq}p7rB>_lso4s(=1O6sltl)S6HJk0u ze?y;nwP}N4tB%>OiT!;0chhZy`>hdw$ScRMLGQ%1=S|1m^!3R32Xn&ra?s)Tdi2CL z7r~DtBOAa+cilzBaX5EQk1~;&`ZpxvGd%m@Qae`!AjM6JJhpQA!)4%HviDe?ZV&Hd zoG;V)%YMV!)&{p{R-3*nkj(b#{M^Kr4MFK#tl$s0k&zT-=oMr!TT$oR0=^~oMbgD$ zpoWPy_^O9;lAH>kc$z1Rz`C>QV&=0RA?%2i7y-n z!Edz@*C`SYyr3)FW)RsNukfDKBpJ$FVGw zF`Xjr{)+zx6hGh%XWq^D&&tW;_T(EnUSGul;rFHN;*r%G z|1NdcC~6kG+V^`yf|pOg9wUmuXZOwpOJ>f?$gN87@sTf5OZmAf(C#7CxFgXQT}lR- zW(V^N2iMDU5cZ79(03jFjt|$ljgPNxq`M^eUH>0S)!X%ow3NR)=8!#w9n6<jfGQ0KO;Mf7h*rd4s*AF8 z&9g*+tF^1_u2|POA2U86TfDIKP(<{$61?2wui5m6IvJuZ8~xK%^UcFpR;-!Vu08YC zyGAi7n25)K)rHR`@Qu82Ca!Tc-^qq_`b`HPub*r3C89r7_1|dY_f*d5GA+f~2t{{@X=@^ItSC;$MsIsZ3kg!y04NNewC;`(peXrZ*p^&i^k{q5_KYlbBu zG?19_NeKGq98?IBR_DwdMN3k|cs#{eXyR^nHz92(svE?du*dnt(dhC_f{TkQEB0tp zE!04ln2w>ybkw{@br#71E-ac`U%`-V#dSrqRF4i}H2xolyx~WZM#LcmMM-9s>&!(l z9c}M<)Uzi^AEhnU_3<*n2z9vpWrbNZXchnJ?5}H~Bx@Cz8$}U(J7!C6XmdRIu-jJ# zfJO@B=wmBHnp_787a9d`k@3L z+eq@S40-+SCf(OTesJ)-xEVQqn=8F+{M=So?sGK5}v`1Sc)>~_A-nSMYh#-<+4 z%r^3bfIRr=ij6I2%@Jil!yV~<`#rPkGwYnLLp;+EQwXON7yl3TO+xXyLmV>)mU5ga z2s#l8w2X*|^noyg|Jo|e_FgF!qhRU(ZRhfsmVUmiYns-G-cSdVI?I9scz?W>VrAVf z1q)_^G~8Y>P#~ZiT^gVx36Y14sQ{)DPg3cm^#4^Uih&>z#d@Q=&#uH^rZl8JpG|ls zkT0jEe8}SF-rRf^>)Q6EBhp)^*2&8^i_V9p8AUhGPkAUqT&;?HY*ilDGA4#2ZG6@= zhUK~=FS4gvp*a!R5M*2cdbcG3X&l46U05fV;8{ACiakGrkdud+qOfTN#>I1n_jIf# zsLmgSCW-n1K~c52{=FkWd^MkjjsV=w)UOn!Fs@l|Xw%9XG=*N)L|C?>^pC>dfL96s#fhg z1|@D^<=NTp!9X4!5h2u5SxkWZ6kHT*Rq|zx2)6r|D89ASqH6D$dxwK8nuzP4i5q~7 zafSjTskwsE)DHTT4QdPdza_f&)u-v<|BdCyO}%qVSv$|r%33~UCzwGX*5z;*Zq~Mq zB}%fT`?}?aEiv|Bj4>*`4;Oa(3IYfr%isKyIa(vurDDgbd3}9qfae#59sCk46t*5T zBfc7nANIYPfgnaGtF zN_yk$!y<@t7iz4Qur+l^bKoT5(Tj`VN&cb-!+#ho-xaUfRKgTbDqjeVBH+QJ41{dP zy~I_`}%_fmHi zmD%!GtS?uw?P4S?_J9T~i90n7VN{VY*3!^+`8g~U4*pTGWy+2Jl;!9gef8;;igG~g ze`(@VVGh%;ZgbN8+QxW@Hkfwiyku2IlDQIKFgwlxwqH1;u9S=#!?1nk&Kx7U$b6bp zv1h2i<>WDi^`zlUU|Zg6brt1jP$>>%lo5MoeQnA!3zD)etiXL z)?&ZE%^wa#cjqKYGFcPaup!FmH9_>K=P$Gs^k7s4ZTPh_u5HK2lGiW&{T=K> z7)Y^N#M!>Ec@cd^uAw4JvACG<;J&5cx ziC7ly5d@?F7Sw?ZaaWKeAj*~5Z~?*wQiP)w4!?--gpmxLfcONY87CG#`4=(foAPXEsi_^DV@D*#d{#Mp!iCz479m@N_P zWnzEGaX9M~U=^)jFu;k4zs)umL-eQb4xqZ&mT;9fgADo)Pu%<;JP|x>`;djD&}jI9 z%|8BU4GF#;e4(5%B^wg`{XP#=*Xg(T^Tgbbe$=>pAgL57W>P&8%^;UIWC4r1f(ozl ztx0px4s=_6s3{>44j2uCfazbJ7#)5M?HO2$cwgbaPzz`JbLR5f0veKF(7wpjsJAiN_Mld+zifp2!i1gew644X(7B^e<2B`A<*e{l^n; z>CQ=?8{RT-5F`J|Tz!Ojom2S=V%aHcCDa&!L^Sw;)*0GLDI=k@@A&Iq21}ACf(A&I zQeqm$h8n*p5cryZ=0IB^a0qKVN85#Y`T5Af$ht6|;p#4pIWw0faq29&-TfkcUt(O` z>it|wzn@fAs9yM{ECjAiSvI0MKM?vQe9W#^Zf&gu4=CT35tK5W4|St zVP`@czs^? zCH56%Gta8)23ylxY~XsRvvlJ)-#N2d$j!<8`dZ{8mXhyvQNT4gWbVF)U}AWP~=UJw4? zpvZ7A<>d4o15%(ibSja_G4Vpl!U~DK>`mmX{J(=DaUV#CSGh&jPPQCiJp_s+hv{#( zSK})0Bu3QylCq6@|7a`M!Y~e{uuMipIuQ1Z@@9*Q zp`Gqv0ZmM7>)QiMSGd|E9Haz97T6@9mcGOqOV}fjKpYudsVEENQevy7qJXjNH#%e# zgv7kpHy`<+qR+UjF$Z<0jz>+>X&B6aG@j>iGq@IZ64^n_^|<>`PQt3Ib-B}4VW>c) zhUJ=T6m8blTtF#mK)YimNGoEzbs$Ck$XKSa4NB3{VN*)mn6?Q|aaS$+TpR;$#IuS) z5XrUEjMqPLO6(z&IYQKYo>dcVMs@>YX=G`~D&!)M+gn0i)I91!&vQV+q5xh{B-Zqs-l6+~YlBaKf7eyAU>x)9(ogR^|j~oYX{04cw+EfR>!V zGNN-lO4L7PbUriOq;AsI;YtZ0Fu9h7wV(vR3SL@TF^=`Bns&Yu9Pxi&(ILoj7Ub+a%MMgoj{)x|X|SKATpl z?$)wmojy;?_H0|&`O2{Cdk8UsuT2G{Uw9*8BcfcpPwMN*2jJ#9yUoJ$Uz_zCY4GEP zagLvz6!u&6vLDbF($qTQ{ZUp(6kujDnhz9M8gT{@!YuSJ;(hP=ZrhF%Kke)kp2^nF z0XE;n0iCDEB@w5~035Fr6qlp~jG}F;!C*mPkZ#Ywp@ZDsvdj-*Z-(}1dLgLAt$-KNEeW_exH(ZQO|IgIB7ec3hw*_TpX~)P=Y$CQgSq7;Ug<0lUq7M1B5^Kn?w#=6i zUNqe~XoxWxx}-hm0g*W@r05gJHJhS68i_GL4dh+>i}C?a&tZ4lD)o&{dkQi@rit)_bsCnRswf(S-82QXgup+xxnm0)!MhawP9>J%2I3DUL%KO&b|73C0OJ+}C&b*&7`Vx%olE8sBCDlgZCj(^j4 zo7v~L+g|tU><{FbFOOf^@gM-xSmgD~Cwfm)kK@?Sx&?qT2P3h_x7Qt-QW55yW0xXX z>*Ok_0rAPW_+B*?87cfs@>^1jZGC*WCIrC@<|-#XnRxxw=#lwY0)`oQXTms1_U*g! z26i+~?STS$7AW3u;}D+4j9R)Q&7<6)ib^D8HfnUdL_kdZ6XW?)yWebCT^kY+DQJ4# zOj6_rBGGIdDrfAg!_Q}gbqE6mW0 zqXvpvKxCFhMfzlZQOcZHrdA=>6};Yt%IBg;o$>kw3KWT}{C8h1RFoXH`z8LIgnI{T(GZB&!%Qr)F~t4E-9P3&y*sQ&2K)4wNAufxlcMoDfKQ{{>ci4lL!D^q`)n{C4*kIhOX0)wm zJ>C3Xb6?|nufreo%=}%Wck0spqT^}ua^(7jG5Pzv-~Rhz(5tr)k)Q1 zD0fzeGMv*5Tx4=G2K3nu(nf-s&MB^=?KCwUi z;5hMqD~!l*3ymnHAd496CWhquOff^E&a$+$0%;4+>%he8s&*&uU@t_B21ZS}aI)Wy zt)B zA+ZBDb7WXh-6aW)!4Au#E2q!v`SbqwQB-B(IEQ}S>K)|sxc9~{;W{SCFd56J>@n4@ zUX5K__RIdiVx~SbpsEe3mF=5XR#rL$ry31Ee|eGcAa{o-AvGyEy3BVL>AGk zs+Y}M|8b8dpEy=kQ;}rw`pA_uzOfgc)OFt07p2G!|LMrf-CHB!6twrcffp^3(W*HE zwr?qEK=A?MN03uBx4Iwx|I$PogG)Z>e>Cwe`~Rki$p5K{Ms~KQ7H0G|wDdLx7PfTG zrvHtK#tHm!MGOeTUVf3}mlMz^YD&d&nOZfiBFJ9h)fk(NuVD-E0p1ZYp3mbQS#GHO zhv+h_P3ewt$mu+RW!ThI|1>}t^bloDa6lEfu{?a-POImnuRsz$=#O2V zR^k@s#hgG$zBY?1RX=6aR=`oz>3jpk5~>%Khql%9Xi)YwM#bsMUBcchJ0MEGv_LHt*#1Gu&L2mt{AV1fQ`rN;f=OYLT1 zZR})XKyPGcW^3VW_wPloR+F*Yy-0E!3`fl5kCpS(7^!?5-ssG)A^g zBFT@HoX`6~o~qR(2APT!$rKl^XoP9+AUB~GeBxfJ4wfr*!%1CDlmuCj{3X-vu(!Lj zlN0ao{aV(Q6dX@x6w1_oUNch8g;^jCQ88a%N#V@+?JL;@^qcGkq7wl!tH8-rVfoYp zF9D+HL=`jQ$Y@TF(S^Sn*8dpj3)1805$)x16+9WEG@|EQvn z>T3oS>M0uiC%0sRr<3nmi>s$Qd%OPqefZ1!K164_H|JM>@v@5k3Ae&{HR_S`lT(Mr z*7UK7Z{6^R8E=<#nIyot6t-J!4Xt$V4{e7##L+q6+CcX!pW7ikeyyol6H_b`wbEOV zD{v|=#3yY?0)pZ&%}V3w24vD;uSY!_r+IQo4E4j7I=rKwOndG0evg2hS)QMFPo@GB z8MxXkgo*Fxy=^n=m)q!7ZLUoxsWC5cz3^MRBsA^>G7aeMQ4GS0vNibot1evb#-HYVsSnyUGe1j-7S9|gOP z#f*(aR}n#rQLTj1SI_(1$$Lr1?oiH2Uu{2A7*aYre11kd`2F2P7G<$b4A5x%tM--| zFSo50_Oa}D>(J&fH7~nYuI+Q4s%^X(Y2Zkr)^=R)3XdVU9E8u~mZSY69}3e_47ZxG ze5XN&0>yyJPBWQEc$qYof-x>zy*cN{C?;+`XcpQ^tU^>hQ?WcEvv&npk znZg5IW1D8+>$yX$omRj3fHX1G`hFCC_M-aWpE=^N+-#Dp-+y}AzjE+(gJ7PCAOHXZ z|NOA}pL6j4LzgpiG_W^!u>SWhx25J|h0TflyPIdYhL+2RuqnA2WXKSf)ZEV!q14r5 zuND~ZHfp8Qyr86&JE89?rU6d^PuquDq^3$(lp%vX0 zmF;I2AU`9ckb$N+R8nw?Nc2o-t_wH8_m;{Wz+25(P=1ba)ZC=M90nT}fX)c%52HcD zBL=|{Q`E^)u+o4@bwnZ#qzX`{evv7QgH#BzYPo2Ny%{--hp`&ODCUEuNdM=)Jwx*k zO^pVY=oFVU=XyASisJ~8atF7|z0UIwhIUd@|FFyNqrW$14}M}+|DGdCyHdtKJ6Oth zPwQn^v}42&CbTIDMcbB#6tt)ZB~@vIsbc#Ek)^p6%NkVEKLrf5&~4leGy^OwD|6E| zdYk`GA6Fg@W!J`KOA?7nlZ++GT9#x@$dY9+me;;zWG8!*CE3N;vJD~`OP0w#LXjnA zEU!K5SiZ3h2{BpX8`pQeSw3&S=Q@8pf1KZQUH5a&`Q7(@-RB;8e`>F^#6iULc?=+4 z-(t9=63$BlH*V?BZ{u^cCRND0l;1!#>^L_Do8S<5Zkv2NAZi^`4P$=-(5vb#-n_a+ z`k6a~;5yJv;9Gj_yA$=bU~Q(OrgOGBNPMcUl1sCd?=?#UGh5u0KP9;(IpZZ|-8y)@ z*|>-3uhHfR0dhx)!KF0tQW9Okb$Zm$$@Oduf2bV?j_iJ^&LtP^zYkAouZ;k|; zcf7IoYqflo{?_pE8E95^T$zhj9mhbj8D}g_S(E9j2;C`-s@*;?GfQ;;#|Vi0FQ|U? zDPX#2UuFXL*Nem1hJj{ej_$d$R-vMIy+EwGYC(Jm<0M8`Mq`9^kX?%$&8o>IgG?@( zgh9CcFmzMX`Z@wFwwlxw9>gV)DptPCL-S6dm?{Mz-#E`Rcjso2`TS$YCGQ>-d)&*060qP&P?!q9)5g|1(u!|1hs^2ISL zTu;!5lwovWHTTESBv{hNMUAj$*Y`sL{7=1;&w>J=p)Uqi=gh!Adf9SPo-{AV%{ww; zrFbK@)1GGlJAX||(-i}Q(_U%=))nAaOq4pSx~TCiz=>HU4`+7`ii%D`qZD?T5bO(+ zF&W>u=e@kKm5BHG{Gj3)xawD$8Fo?C2EBM+-(#i3v+Z1{QMZSz1==kNb{k^Q*fq^b49LeLpSw@iJyq@YXeLrmRGt z5VV)1(;Wcvt-Y~!0z*BVuXz-N*-L!uBz_5an+Yfyp{oV;o_e4sz8y_jmhgHuy{mF@ z$E!sI_+uKy5{8SOA}rQRN9ey^$(+!d?Mv)DE8-jY(51Y>&={ zj&H!pPU`WnAQIM~LMBTSvb@G9v&NzT)s*T}Rgj{ccc>&U6_)3+=SM6H81ZCX5=^?k z8+~i_YjaoL5k&pDU*?kGBSSqQg48UlGEz zNhrS!W%FAWp066kJEUmYSQM;y+_N z*JQ+#Ah~2wcZ&!~Oh|mFq!d`GI8Z((C(hPlYx!1<;W|6OR__ywQcve3^S$4yCVT;k zY9Y4rNgx{P-&O7E$!2l4uylYN)-<1(2D`P}=Yy7^5wha5bmF_GfO2hZnms7TxrePA zE%YUvY(??#IImltV{)kslyeiRwUZP9K zvZNlG!G@sanlk4Lzu#T)hE>@&d?8+!jF;`pwmOM0qY@A3TW!RR>G+x-21SGvLxDY{ zUj(V%-JtOJiNZ!1;GDTBMNH}k(MoU>uX7B!YmJ}QNC#+1lujdcQHwe=8~#`bo^%?$iw zm~`=oshzDn93XB2c5coGt(78Xz?yl)LBuMkNT(F63GNASF(G=A#?qVu6v2kqXdt*0 zaqi~)@Z0?jn+S!reDibAEhu!WdXpO=p1)=D&fMBko3u29_tN=)wgQmKvc`kB#R!KSW`{qp3tf7V`Vf_^;qy7+@M zgweP9^*y|Yo3D=#|ArxKP%6VCy)y%|QEn3L6?zxvzx5+4n)6Z3c!=3c$WtkkV#qTd zJZ{Uib9I#&+JPCH{O)tlcZj_U=iO;miCN!)66l^*rH_x}aeiyPo0~hFg^Zu#}23fVghql`O){Djnq38hKCAQJ0`Y{k%U!D4#ZIPI$;1WK`egLE^GJI*2xv5rs^qDMv{XTYlm;%?zdPwienAej!!;ZO}BqJ_W)!U zNQm9qj$YM0KKpR?Y`@~}01jNgXCJMe*KE=N$-HtOIj=c8cs{?RK z9bx)IPU$%3VRzst=ULekIZue-j&mN?(tStb0BFS%Igh&)#|aNB#=Zm}U{dKs!v7QG yNajwHl_vjCvCvyEsCNxzkDEGfiO?>8wYG|Rp|Mp)WrkyGP diff --git a/terraform/module/iac-roles/policy.tf b/terraform/module/iac-roles/policy.tf index e1c8ffd7..ca5d900b 100644 --- a/terraform/module/iac-roles/policy.tf +++ b/terraform/module/iac-roles/policy.tf @@ -46,9 +46,9 @@ data "aws_iam_policy_document" "ro" { sid = "CognitoIdpGlobal" actions = [ "cognito-idp:DescribeUserPoolDomain", - "appsync:SetWebACL", "wafv2:GetWebACLForResource", "wafv2:GetWebAcl", + "appsync:GetResolver", ] resources = [ "*" @@ -194,6 +194,10 @@ data "aws_iam_policy_document" "rw" { "cognito-identity:SetIdentityPoolRoles", "cognito-identity:TagResource", "cognito-identity:UntagResource", + "appsync:CreateResolver", + "appsync:DeleteResolver", + "appsync:UpdateResolver", + "appsync:SetWebACL", ] resources = [ "*" @@ -389,6 +393,10 @@ data "aws_iam_policy_document" "rw_boundary" { "appsync:SetWebACL", "wafv2:GetWebACLForResource", "wafv2:GetWebAcl", + "appsync:CreateResolver", + "appsync:DeleteResolver", + "appsync:UpdateResolver", + "appsync:GetResolver", ] resources = [ "*" diff --git a/terraform/module/state-bucket/s3.tf b/terraform/module/state-bucket/s3.tf index ebcb36bc..41276820 100644 --- a/terraform/module/state-bucket/s3.tf +++ b/terraform/module/state-bucket/s3.tf @@ -1,8 +1,4 @@ resource "aws_s3_bucket" "state" { - # checkov:skip=CKV_AWS_18:Access logging is overkill for us - # checkov:skip=CKV_AWS_144:Cross-Region replication not required - # checkov:skip=CKV2_AWS_62:Event notifications not required - # checkov:skip=CKV_AWS_145:AWS Key is sufficient bucket = "terraform-state-${data.aws_caller_identity.current.account_id}" force_destroy = false @@ -55,6 +51,33 @@ resource "aws_s3_bucket_versioning" "state" { } } +resource "aws_s3_bucket_policy" "state" { + bucket = aws_s3_bucket.state.name + + policy = jsonencode({ + Version = "2012-10-17" + Id = "disallow-https" + Statement = [ + { + Sid = "HTTPSOnly" + Effect = "Deny" + Principal = "*" + Action = "s3:*" + Resource = [ + aws_s3_bucket.state.arn, + "${aws_s3_bucket.state.arn}/*", + ] + Condition = { + Bool = { + "aws:SecureTransport" = "false" + } + } + }, + ] + }) +} + + output "arn" { description = "ARN of the state bucket" value = aws_s3_bucket.state.arn diff --git a/terraform/module/wildsea/cognito.tf b/terraform/module/wildsea/cognito.tf index bc16d35f..d4166544 100644 --- a/terraform/module/wildsea/cognito.tf +++ b/terraform/module/wildsea/cognito.tf @@ -27,7 +27,7 @@ resource "aws_cognito_identity_provider" "idp" { resource "aws_cognito_user_pool_client" "cognito" { name = var.prefix user_pool_id = aws_cognito_user_pool.cognito.id - generate_secret = true + generate_secret = false explicit_auth_flows = ["ALLOW_REFRESH_TOKEN_AUTH", "ALLOW_USER_PASSWORD_AUTH", "ALLOW_USER_SRP_AUTH"] allowed_oauth_flows_user_pool_client = true callback_urls = ["https://TODO"] diff --git a/terraform/module/wildsea/graphql.tf b/terraform/module/wildsea/graphql.tf index 0ff576bc..cef72559 100644 --- a/terraform/module/wildsea/graphql.tf +++ b/terraform/module/wildsea/graphql.tf @@ -226,3 +226,62 @@ resource "aws_iam_role_policy_attachment" "graphql_datasource" { role = aws_iam_role.graphql_datasource.name policy_arn = aws_iam_policy.graphql_datasource.arn } + +locals { + mutations = distinct([for d in fileset("${path.module}/../../../graphql/mutation", "**") : dirname(d)]) + queries = distinct([for d in fileset("${path.module}/../../../graphql/query", "**") : dirname(d)]) + + mutations_map = { + for mutation in local.mutations : replace(mutation, "../../../graphql/mutation/", "") => { + "type" : "Mutation", + "path" : "../../../graphql/mutation/${mutation}/appsync.js", + "make" : "graphql/mutation/${mutation}/appsync.js", + "source" : "../../../graphql/mutation/${mutation}/appsync.ts" + } + } + + queries_map = { + for query in local.queries : replace(query, "../../../graphql/query/", "") => { + "type" : "Query", + "path" : "../../../graphql/query/${query}/appsync.js", + "make" : "graphql/query/${query}/appsync.js", + "source" : "../../../graphql/query/${query}/appsync.ts" + } + } + + all = merge(local.mutations_map, local.queries_map) +} + +resource "aws_appsync_resolver" "resolver" { + for_each = local.all + + api_id = aws_appsync_graphql_api.graphql.id + type = each.value.type + field = each.key + data_source = aws_appsync_datasource.graphql.name + code = data.local_file.graphql_code[each.key].content + + runtime { + name = "APPSYNC_JS" + runtime_version = "1.0.0" + } +} + +resource "null_resource" "graphql_compile" { + for_each = local.all + + provisioner "local-exec" { + command = "cd ${path.module}/../../.. && make ${each.value.make}" + } + + triggers = { + source_change = filesha256(each.value.source) + dest_file = each.value.path + } +} + +data "local_file" "graphql_code" { + for_each = local.all + + filename = null_resource.graphql_compile[each.key].triggers.dest_file +}