diff --git a/.env.example b/.env.example index ef95f7528..b8122675c 100644 --- a/.env.example +++ b/.env.example @@ -20,6 +20,9 @@ DB_PAGE_LIMIT=50 DB_ENCRYPTION_KEY= # Uncomment below if you wish to run DB migrations manually. #DB_MANUAL_MIGRATION=true +# Specify postgres schema to use. In production, you will need to create the schema first +# to use this option. Jackson will not create it for you. +POSTGRES_SCHEMA= # Admin Portal settings # SMTP details for Magic Links diff --git a/lib/env.ts b/lib/env.ts index 80aa50aec..af2c3786f 100644 --- a/lib/env.ts +++ b/lib/env.ts @@ -50,6 +50,9 @@ const db: DatabaseOption = { writeCapacityUnits: process.env.DB_DYNAMODB_RCUS ? Number(process.env.DB_DYNAMODB_WCUS) : undefined, }, manualMigration: process.env.DB_MANUAL_MIGRATION === 'true', + postgres: { + schema: process.env.POSTGRES_SCHEMA, + }, }; /** Indicates if the Jackson instance is hosted (i.e. not self-hosted) */ diff --git a/npm/migration/postgres/1640877103193-Initial.ts b/npm/migration/postgres/1640877103193-Initial.ts index 3eb48d67f..3e61b53d2 100644 --- a/npm/migration/postgres/1640877103193-Initial.ts +++ b/npm/migration/postgres/1640877103193-Initial.ts @@ -1,26 +1,30 @@ import {MigrationInterface, QueryRunner} from "typeorm"; +const schema = process.env.POSTGRES_SCHEMA || "public"; +const jacksonStoreTableName = `${schema}.jackson_store`; +const jacksonIndexTableName = `${schema}.jackson_index`; +const jacksonTTLTableName = `${schema}.hackson_ttl`; + export class Initial1640877103193 implements MigrationInterface { name = 'Initial1640877103193' public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query(`CREATE TABLE "jackson_store" ("key" character varying(1500) NOT NULL, "value" text NOT NULL, "iv" character varying(64), "tag" character varying(64), CONSTRAINT "PK_87b6fc1475fbd1228d2f53c6f4a" PRIMARY KEY ("key"))`); - await queryRunner.query(`CREATE TABLE "jackson_index" ("id" SERIAL NOT NULL, "key" character varying(1500) NOT NULL, "storeKey" character varying(1500) NOT NULL, CONSTRAINT "PK_a95aa83f01e3c73e126856b7820" PRIMARY KEY ("id"))`); - await queryRunner.query(`CREATE INDEX "_jackson_index_key" ON "jackson_index" ("key") `); - await queryRunner.query(`CREATE INDEX "_jackson_index_key_store" ON "jackson_index" ("key", "storeKey") `); - await queryRunner.query(`CREATE TABLE "jackson_ttl" ("key" character varying(1500) NOT NULL, "expiresAt" bigint NOT NULL, CONSTRAINT "PK_7c9bcdfb4d82e873e19935ec806" PRIMARY KEY ("key"))`); - await queryRunner.query(`CREATE INDEX "_jackson_ttl_expires_at" ON "jackson_ttl" ("expiresAt") `); - await queryRunner.query(`ALTER TABLE "jackson_index" ADD CONSTRAINT "FK_937b040fb2592b4671cbde09e83" FOREIGN KEY ("storeKey") REFERENCES "jackson_store"("key") ON DELETE CASCADE ON UPDATE NO ACTION`); + await queryRunner.query(`CREATE TABLE ${jacksonStoreTableName} ("key" character varying(1500) NOT NULL, "value" text NOT NULL, "iv" character varying(64), "tag" character varying(64), CONSTRAINT "PK_87b6fc1475fbd1228d2f53c6f4a" PRIMARY KEY ("key"))`); + await queryRunner.query(`CREATE TABLE ${jacksonIndexTableName} ("id" SERIAL NOT NULL, "key" character varying(1500) NOT NULL, "storeKey" character varying(1500) NOT NULL, CONSTRAINT "PK_a95aa83f01e3c73e126856b7820" PRIMARY KEY ("id"))`); + await queryRunner.query(`CREATE INDEX "_jackson_index_key" ON ${jacksonIndexTableName} ("key") `); + await queryRunner.query(`CREATE INDEX "_jackson_index_key_store" ON ${jacksonIndexTableName} ("key", "storeKey") `); + await queryRunner.query(`CREATE TABLE ${jacksonTTLTableName} ("key" character varying(1500) NOT NULL, "expiresAt" bigint NOT NULL, CONSTRAINT "PK_7c9bcdfb4d82e873e19935ec806" PRIMARY KEY ("key"))`); + await queryRunner.query(`CREATE INDEX "_jackson_ttl_expires_at" ON ${jacksonTTLTableName} ("expiresAt") `); + await queryRunner.query(`ALTER TABLE ${jacksonIndexTableName} ADD CONSTRAINT "FK_937b040fb2592b4671cbde09e83" FOREIGN KEY ("storeKey") REFERENCES ${jacksonStoreTableName}("key") ON DELETE CASCADE ON UPDATE NO ACTION`); } public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "jackson_index" DROP CONSTRAINT "FK_937b040fb2592b4671cbde09e83"`); - await queryRunner.query(`DROP INDEX "public"."_jackson_ttl_expires_at"`); - await queryRunner.query(`DROP TABLE "jackson_ttl"`); - await queryRunner.query(`DROP INDEX "public"."_jackson_index_key_store"`); - await queryRunner.query(`DROP INDEX "public"."_jackson_index_key"`); - await queryRunner.query(`DROP TABLE "jackson_index"`); - await queryRunner.query(`DROP TABLE "jackson_store"`); + await queryRunner.query(`ALTER TABLE ${jacksonIndexTableName} DROP CONSTRAINT "FK_937b040fb2592b4671cbde09e83"`); + await queryRunner.query(`DROP INDEX ${schema}."_jackson_ttl_expires_at"`); + await queryRunner.query(`DROP TABLE ${jacksonTTLTableName}`); + await queryRunner.query(`DROP INDEX ${schema}."_jackson_index_key_store"`); + await queryRunner.query(`DROP INDEX ${schema}."_jackson_index_key"`); + await queryRunner.query(`DROP TABLE ${jacksonIndexTableName}`); + await queryRunner.query(`DROP TABLE ${jacksonStoreTableName}`); } - } diff --git a/npm/migration/postgres/1644332647279-createdAt.ts b/npm/migration/postgres/1644332647279-createdAt.ts index 94da61d2a..fbb77c51b 100644 --- a/npm/migration/postgres/1644332647279-createdAt.ts +++ b/npm/migration/postgres/1644332647279-createdAt.ts @@ -1,16 +1,19 @@ import {MigrationInterface, QueryRunner} from "typeorm"; +const schema = process.env.POSTGRES_SCHEMA || "public"; +const jacksonStoreTableName = `${schema}.jackson_store`; + export class createdAt1644332647279 implements MigrationInterface { name = 'createdAt1644332647279' public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "jackson_store" ADD "createdAt" TIMESTAMP NOT NULL DEFAULT now()`); - await queryRunner.query(`ALTER TABLE "jackson_store" ADD "modifiedAt" TIMESTAMP`); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} ADD "createdAt" TIMESTAMP NOT NULL DEFAULT now()`); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} ADD "modifiedAt" TIMESTAMP`); } public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "jackson_store" DROP COLUMN "modifiedAt"`); - await queryRunner.query(`ALTER TABLE "jackson_store" DROP COLUMN "createdAt"`); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} DROP COLUMN "modifiedAt"`); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} DROP COLUMN "createdAt"`); } } diff --git a/npm/migration/postgres/1692767993709-pg_namespace.ts b/npm/migration/postgres/1692767993709-pg_namespace.ts index c14d3abea..e42b268f6 100644 --- a/npm/migration/postgres/1692767993709-pg_namespace.ts +++ b/npm/migration/postgres/1692767993709-pg_namespace.ts @@ -1,16 +1,19 @@ import { MigrationInterface, QueryRunner } from "typeorm"; +const schema = process.env.POSTGRES_SCHEMA || "public"; +const jacksonStoreTableName = `${schema}.jackson_store`; + export class PgNamespace1692767993709 implements MigrationInterface { name = 'PgNamespace1692767993709' public async up(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "jackson_store" ADD "namespace" character varying(64)`); - await queryRunner.query(`CREATE INDEX "_jackson_store_namespace" ON "jackson_store" ("namespace") `); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} ADD "namespace" character varying(64)`); + await queryRunner.query(`CREATE INDEX "_jackson_store_namespace" ON ${jacksonStoreTableName} ("namespace") `); } public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`DROP INDEX "public"."_jackson_store_namespace"`); - await queryRunner.query(`ALTER TABLE "jackson_store" DROP COLUMN "namespace"`); + await queryRunner.query(`DROP INDEX ${schema}."_jackson_store_namespace"`); + await queryRunner.query(`ALTER TABLE ${jacksonStoreTableName} DROP COLUMN "namespace"`); } } diff --git a/npm/migration/postgres/1692817789888-namespace.ts b/npm/migration/postgres/1692817789888-namespace.ts new file mode 100644 index 000000000..94c110411 --- /dev/null +++ b/npm/migration/postgres/1692817789888-namespace.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from "typeorm" + +// This file is same as npm/migration/sql/1692817789888-namespace.ts, +// but, with the added postgres schema name. + +const schema = process.env.POSTGRES_SCHEMA || "public"; +const jacksonStoreTableName = `${schema}.jackson_store`; + +export class namespace1692817789888 implements MigrationInterface { + name = 'namespace1692817789888' + + public async up(queryRunner: QueryRunner): Promise { + const response = await queryRunner.query(`select jackson.key from ${jacksonStoreTableName} jackson`) + const searchTerm = ':'; + for (const k in response) { + const key = response[k].key; + const tokens2 = key.split(searchTerm).slice(0, 2); + const value = tokens2.join(searchTerm); + queryRunner.query(`update ${jacksonStoreTableName} set namespace = '${value}' where ${jacksonStoreTableName}.key = '${key}'`) + } + } + + public async down(queryRunner: QueryRunner): Promise { + const response = await queryRunner.query(`select jackson.key from ${jacksonStoreTableName} jackson`) + for (const k in response) { + const key = response[k].key; + queryRunner.query(`update ${jacksonStoreTableName} set namespace = NULL where ${jacksonStoreTableName}.key = '${key}'`) + } + } + +} diff --git a/npm/src/db/constants.ts b/npm/src/db/constants.ts new file mode 100644 index 000000000..93bed1e31 --- /dev/null +++ b/npm/src/db/constants.ts @@ -0,0 +1 @@ +export const DEFAULT_POSTGRES_SCHEMA = 'public'; diff --git a/npm/src/db/defaultDb.ts b/npm/src/db/defaultDb.ts index 6b935d726..aab4d1e2c 100644 --- a/npm/src/db/defaultDb.ts +++ b/npm/src/db/defaultDb.ts @@ -1,4 +1,5 @@ import { JacksonOption } from '../typings'; +import { DEFAULT_POSTGRES_SCHEMA } from './constants'; export default function defaultDb(opts: JacksonOption) { opts.db = opts.db || {}; @@ -12,6 +13,8 @@ export default function defaultDb(opts: JacksonOption) { opts.db.dynamodb.readCapacityUnits = opts.db.dynamodb.readCapacityUnits || 5; opts.db.dynamodb.writeCapacityUnits = opts.db.dynamodb.writeCapacityUnits || 5; opts.db.manualMigration = opts.db.manualMigration || false; + opts.db.postgres = opts.db.postgres || {}; + opts.db.postgres.schema = opts.db.postgres.schema || DEFAULT_POSTGRES_SCHEMA; return opts; } diff --git a/npm/src/db/sql/sql.ts b/npm/src/db/sql/sql.ts index eee5a494e..bc5990ae2 100644 --- a/npm/src/db/sql/sql.ts +++ b/npm/src/db/sql/sql.ts @@ -6,8 +6,9 @@ import { DatabaseDriver, DatabaseOption, Index, Encrypted, Records, SortOrder } import { DataSource, DataSourceOptions, In, IsNull } from 'typeorm'; import * as dbutils from '../utils'; import * as mssql from './mssql'; +import { DEFAULT_POSTGRES_SCHEMA } from '../constants'; -class Sql implements DatabaseDriver { +export class Sql implements DatabaseDriver { private options: DatabaseOption; private dataSource!: DataSource; private storeRepository; @@ -26,6 +27,7 @@ class Sql implements DatabaseDriver { async init({ JacksonStore, JacksonIndex, JacksonTTL }): Promise { const sqlType = this.options.engine === 'planetscale' ? 'mysql' : this.options.type!; + const postgresSchema = this.options.postgres?.schema || DEFAULT_POSTGRES_SCHEMA; // Synchronize by default for non-planetscale engines only if migrations are not set to run let synchronize = !this.options.manualMigration; if (this.options.engine === 'planetscale') { @@ -53,14 +55,30 @@ class Sql implements DatabaseDriver { ...baseOpts, }); } else { - this.dataSource = new DataSource({ + const dataSourceOptions = { url: this.options.url, ssl: this.options.ssl, ...baseOpts, - }); + }; + + if (sqlType === 'postgres') { + dataSourceOptions['synchronize'] = false; + dataSourceOptions['schema'] = postgresSchema; + } + this.dataSource = new DataSource(dataSourceOptions); } + await this.dataSource.initialize(); + if (sqlType === 'postgres' && synchronize) { + // We skip synchronization for postgres databases because TypeORM + // does not create schemas if they don't exist, we manually run + // synchronize here if it is set to true. + const queryRunner = this.dataSource.createQueryRunner(); + await queryRunner.query(`CREATE SCHEMA IF NOT EXISTS ${postgresSchema}`); + this.dataSource.synchronize(); + } + break; } catch (err) { console.error(`error connecting to engine: ${this.options.engine}, type: ${sqlType} db: ${err}`); diff --git a/npm/src/typings.ts b/npm/src/typings.ts index 4835d23f0..469c7f638 100644 --- a/npm/src/typings.ts +++ b/npm/src/typings.ts @@ -418,6 +418,9 @@ export interface DatabaseOption { writeCapacityUnits?: number; }; manualMigration?: boolean; + postgres?: { + schema?: string; + }; } export interface JacksonOption { diff --git a/npm/test/db/db.test.ts b/npm/test/db/db.test.ts index fe376b463..008cc6de3 100644 --- a/npm/test/db/db.test.ts +++ b/npm/test/db/db.test.ts @@ -9,6 +9,7 @@ const dbObjs: { [key: string]: DatabaseDriver } = {}; const connectionStores: Storable[] = []; const ttlStores: Storable[] = []; const ttl = 2; +const non_default_schema = 'non_default'; const record1 = { id: '1', @@ -130,6 +131,12 @@ const dbs = [ ...postgresDbConfig, encryptionKey, }, + { + ...postgresDbConfig, + postgres: { + schema: non_default_schema, + }, + }, { ...mongoDbConfig, }, @@ -188,7 +195,11 @@ tap.before(async () => { for (const idx in dbs) { const opts = dbs[idx]; const db = await DB.new(opts, true); - dbObjs[opts.engine! + (opts.type ? opts.type : '')] = db; + if (opts.type === 'postgres' && opts['schema'] === non_default_schema) { + dbObjs[opts['schema'] + opts.engine! + (opts.type ? opts.type : '')] = db; + } else { + dbObjs[opts.engine! + (opts.type ? opts.type : '')] = db; + } const randomSession = Date.now(); connectionStores.push(db.store('saml:config:' + randomSession + randomBytes(4).toString('hex'))); @@ -201,15 +212,32 @@ tap.teardown(async () => { }); tap.test('dbs', async () => { + // We need this to ensure that the test runs atleast once. + // It is quite easy to skip the test by mistake in the future + // if one of the conditions change and it goes unnoticed. + let has_non_default_postgres_schema_test_ran = false; for (const idx in connectionStores) { const connectionStore = connectionStores[idx]; const ttlStore = ttlStores[idx]; const dbEngine = dbs[idx].engine!; - let dbType = dbEngine; + let dbType = dbEngine.toString(); if (dbs[idx].type) { dbType += ': ' + dbs[idx].type; } + tap.test('Test non default postgres schema', (t) => { + if (dbType === 'sql: postgres' && dbs[idx].postgres?.schema === non_default_schema) { + t.same( + connectionStore['db']['db']['dataSource']['createQueryBuilder']()['connection']['options'][ + 'schema' + ], + non_default_schema + ); + } + has_non_default_postgres_schema_test_ran = true; + t.end(); + }); + tap.test('put(): ' + dbType, async () => { await connectionStore.put( record1.id, @@ -527,4 +555,9 @@ tap.test('dbs', async () => { await value.close(); } }); + + tap.test('Ensure that the test for non default postgres schema has ran atleast once', (t) => { + t.same(has_non_default_postgres_schema_test_ran, true); + t.end(); + }); }); diff --git a/npm/typeorm.ts b/npm/typeorm.ts index 7d8f495a8..6c3464088 100644 --- a/npm/typeorm.ts +++ b/npm/typeorm.ts @@ -1,6 +1,7 @@ require('reflect-metadata'); import { DataSource, DatabaseType, DataSourceOptions } from 'typeorm'; import * as mssql from './src/db/sql/mssql'; +import { DEFAULT_POSTGRES_SCHEMA } from './src/db/constants'; const type = process.env.DB_ENGINE === 'planetscale' @@ -45,7 +46,7 @@ const baseOpts = { logging: 'all', entities: [`src/db/${entitiesDir}/entity/**/*.ts`], migrations: - type === 'mssql' + type === 'mssql' || type === 'postgres' ? [`migration/${migrationsDir}/**/*.ts`] : [`migration/${migrationsDir}/**/*.ts`, `migration/sql/**/*.ts`], }; @@ -62,14 +63,18 @@ if (type === 'mssql') { ...baseOpts, }); } else { - AppDataSource = new DataSource({ + const dataSourceOptions = { url: process.env.DB_URL || process.env.DATABASE_URL || 'postgresql://postgres:postgres@localhost:5432/postgres', ssl, ...baseOpts, - }); + }; + if (type === 'postgres') { + dataSourceOptions['schema'] = process.env.POSTGRES_SCHEMA || DEFAULT_POSTGRES_SCHEMA; + } + AppDataSource = new DataSource(dataSourceOptions); } export default AppDataSource;