diff --git a/.babelrc b/.babelrc deleted file mode 100644 index e22cfb0e..00000000 --- a/.babelrc +++ /dev/null @@ -1,14 +0,0 @@ -{ - "presets": [ - [ - "next/babel", - { - "preset-env": { - "targets": { - "esmodules": true - } - } - } - ] - ] -} diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index e3b3fe77..00000000 --- a/.eslintignore +++ /dev/null @@ -1,34 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# next.js -/.next/ -/out/ - -# production -/build - -# misc -.DS_Store -*.pem - -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# local env files -.env.local -.env.development.local -.env.test.local -.env.production.local - -# vercel -.vercel \ No newline at end of file diff --git a/.eslintrc b/.eslintrc deleted file mode 100644 index f518d6f0..00000000 --- a/.eslintrc +++ /dev/null @@ -1,51 +0,0 @@ -{ - "env": { - "browser": true, - "node": true - }, - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 12, - "sourceType": "module", - "project": "./tsconfig.json" - }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended", - "next", - "prettier" - ], - "rules": { - "@typescript-eslint/no-floating-promises": "error", - "@typescript-eslint/naming-convention": [ - "error", - { - "selector": "memberLike", - "modifiers": ["public"], - "format": ["camelCase"], - "leadingUnderscore": "forbid" - } - ], - "eqeqeq": "error", - "no-var": "error", - "object-shorthand": "error", - "prefer-arrow-callback": "error", - "prefer-destructuring": [ - "error", - { - "VariableDeclarator": { - "object": true - } - }, - { - "enforceForRenamedProperties": false - } - ], - "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": [ - "error", - { "argsIgnorePattern": "^_" } - ] - }, - "plugins": ["react", "@typescript-eslint"] -} diff --git a/.gitignore b/.gitignore index e0a4dc30..7ba0f0b3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,46 +1,109 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +vite.config.ts.* -# dependencies -/node_modules -/.pnp -.pnp.js +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json -# testing -/coverage +# Runtime data +pids +*.pid +*.seed +*.pid.lock -# next.js -/.next/ -/out/ +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov -# production -/build +# Coverage directory used by tools like istanbul +coverage +*.lcov -# misc -.DS_Store -*.pem +# nyc test coverage +.nyc_output -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt -# local env files -.env.local -.env.development.local -.env.test.local -.env.production.local +# Bower dependency directory (https://bower.io/) +bower_components -# vercel -.vercel +# node-waf configuration +.lock-wscript -# react-designer -lib +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release -#tsc +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# TypeScript cache *.tsbuildinfo -# Supabase -**/supabase/.branches -**/supabase/.temp -**/supabase/.env +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file .env +.env.test + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# Next.js build output +.next + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and *not* Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + + +.DS_Store +archive.sh \ No newline at end of file diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index d9fb1898..00000000 --- a/.jshintrc +++ /dev/null @@ -1,6 +0,0 @@ -{ - "node": true, - "browser": true, - "esnext": true, - "newcap": false -} diff --git a/.npmrc b/.npmrc deleted file mode 100644 index 37cfe26a..00000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -unsafe-perm = true \ No newline at end of file diff --git a/.prettierignore b/.prettierignore deleted file mode 100644 index e3b3fe77..00000000 --- a/.prettierignore +++ /dev/null @@ -1,34 +0,0 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# next.js -/.next/ -/out/ - -# production -/build - -# misc -.DS_Store -*.pem - -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -# local env files -.env.local -.env.development.local -.env.test.local -.env.production.local - -# vercel -.vercel \ No newline at end of file diff --git a/.prettierrc.json b/.prettierrc.json deleted file mode 100644 index b0646c18..00000000 --- a/.prettierrc.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "singleQuote": false, - "trailingComma": "es5", - "arrowParens": "always", - "bracketSpacing": true, - "tabWidth": 2, - "useTabs": false -} diff --git a/.vscode b/.vscode deleted file mode 100644 index 57810d34..00000000 --- a/.vscode +++ /dev/null @@ -1,9 +0,0 @@ -{ - "editor.formatOnPaste": true, - "editor.formatOnSave": true, - "editor.defaultFormatter": "esbenp.prettier-vscode", - "editor.codeActionsOnSave": { - "source.fixAll.eslint": true, - "source.fixAll.format": true - } -} diff --git a/LICENSE b/LICENSE index 9cf10627..163dc218 100644 --- a/LICENSE +++ b/LICENSE @@ -1,19 +1,13 @@ -MIT License +Copyright 2022 Rocicorp LLC -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +http://www.apache.org/licenses/LICENSE-2.0 -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md index 383c667f..ab7a1cfe 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,55 @@ -# Repliear +![Replicache logo](https://uploads-ssl.webflow.com/623a2f46e064937599256c2d/6269e72c61073c3d561a5015_Lockup%20v2.svg) -A high-performance issue tracker in the style of [Linear](https://linear.app/). +# repliear-row-versioning -Built with [Replicache](https://replicache.dev), [Next.js](https://nextjs.org/), -[Pusher](https://pusher.com/), and [Postgres](https://www.postgresql.org/). +This is a demonstration of the [Row Version Strategy](https://doc.replicache.dev/strategies/row-version). -Running at [repliear.herokuapp.com](https://repliear.herokuapp.com/). +A high-performance issue tracker in the style of Linear. -# Prerequisites +Built with [Replicache](https://replicache.dev), [ViteJS](https://vitejs.dev/), +and [Postgres](https://www.postgresql.org/). -1. [Get a Replicache license key](https://doc.replicache.dev/licensing) -2. Install PostgreSQL. On MacOS, we recommend using [Postgres.app](https://postgresapp.com/). For other OSes and options, see [Postgres Downloads](https://www.postgresql.org/download/). -3. [Sign up for a free pusher.com account](https://pusher.com/) and create a new "channels" app. -# To run locally +## 1. Setup -Get the Pusher environment variables from the ["App Keys" section](https://i.imgur.com/7DNmTKZ.png) of the Pusher App UI. +#### Get your Replicache License Key -**Note:** These instructions assume you installed PostgreSQL via Postgres.app on MacOS. If you installed some other way, or configured PostgreSQL specially, you may additionally need to set the `PGUSER` and `PGPASSWORD` environment variables.

+```bash +$ npx replicache get-license +``` + +#### Set your `VITE_REPLICACHE_LICENSE_KEY` environment variable + +```bash +$ export VITE_REPLICACHE_LICENSE_KEY="" +``` + +#### Install Postgres + +Install PostgreSQL. On MacOS, we recommend using [Postgres.app](https://postgresapp.com/). For other OSes and options, see [Postgres Downloads](https://www.postgresql.org/download/). + +Once installed, set your database url +```bash +$ export DATABASE_URL="postgresql://localhost/repliear" ``` -export PGDATABASE="repliear" -export NEXT_PUBLIC_REPLICACHE_LICENSE_KEY="" -export NEXT_PUBLIC_PUSHER_APP_ID= -export NEXT_PUBLIC_PUSHER_KEY= -export NEXT_PUBLIC_PUSHER_SECRET= -export NEXT_PUBLIC_PUSHER_CLUSTER= - -# Create a new database for Repliear -psql -d postgres -c 'create database repliear' - -npm install -npm run dev + +and create a postrgres DB + +```bash +$ psql -d postgres -c 'create database repliear' ``` -## Credits +#### Install and Build + +```bash +$ npm install; npm run build; +``` + +## 2. Start frontend and backend watcher + +```bash +$ npm run watch --ws +``` -We started this project by forking [linear_clone](https://github.com/tuan3w/linearapp_clone). This enabled us to get the visual styling right much faster than we otherwise could have. +Provides an example integrating replicache with react in a simple todo application. diff --git a/backend/comments-react.d.ts b/backend/comments-react.d.ts deleted file mode 100644 index 43fb9936..00000000 --- a/backend/comments-react.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -declare module "*comments-react.js.gz" { - const gitHubComments: { - number: number; - // eslint-disable-next-line @typescript-eslint/naming-convention - comment_id: string; - body: string | null; - // eslint-disable-next-line @typescript-eslint/naming-convention - updated_at: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - created_at: string; - - // eslint-disable-next-line @typescript-eslint/naming-convention - creator_user_login: string; - }[]; - export default gitHubComments; -} diff --git a/backend/data.test.ts b/backend/data.test.ts deleted file mode 100644 index 6d7706cc..00000000 --- a/backend/data.test.ts +++ /dev/null @@ -1,390 +0,0 @@ -import { expect } from "chai"; -import { Issue, Priority, Status, Comment } from "../frontend/issue"; -import { setup, teardown, test } from "mocha"; -import type { JSONValue } from "replicache"; -import { - createDatabase, - delEntries, - getEntry, - getVersion, - initSpace, - putEntries, - SampleData, - BASE_SPACE_ID, - getIssueEntries, - getNonIssueEntriesInSyncOrder, -} from "./data"; -import { transact, withExecutor } from "./pg"; - -const i1: Issue = { - priority: Priority.HIGH, - id: "1", - title: "Issue 1", - status: Status.IN_PROGRESS, - modified: 0, - created: 0, - creator: "testUser1", - kanbanOrder: "1", -}; - -const comment1i1: Comment = { - id: "1", - issueID: "1", - created: 0, - body: "Comment 1", - creator: "testUser1", -}; - -const comment2i1: Comment = { - id: "2", - issueID: "1", - created: 0, - body: "Comment 2", - creator: "testUser2", -}; - -const i2: Issue = { - priority: Priority.MEDIUM, - id: "2", - title: "Issue 2", - status: Status.IN_PROGRESS, - modified: 0, - created: 0, - creator: "testUser2", - kanbanOrder: "2", -}; - -const comment1i2: Comment = { - id: "1", - issueID: "2", - created: 0, - body: "Comment 1", - creator: "testUser1", -}; - -const i3: Issue = { - priority: Priority.LOW, - id: "3", - title: "Issue 3", - status: Status.TODO, - modified: 0, - created: 0, - creator: "testUser3", - kanbanOrder: "3", -}; - -export const testSampleData: SampleData = [ - { - issue: i1, - description: "Description 1", - comments: [comment1i1, comment2i1], - }, - { issue: i2, description: "Description 2", comments: [comment1i2] }, - { issue: i3, description: "Description 3", comments: [] }, -]; - -function getTestSyncOrder(key: string) { - return `${key}-testSyncOrder`; -} - -setup(async () => { - // TODO: This is a very expensive way to unit test :). - // Is there an in-memory postgres or something? - await transact((executor) => createDatabase(executor)); -}); - -teardown(async () => { - await withExecutor(async (executor) => { - await executor(`delete from entry where spaceid = $1`, [BASE_SPACE_ID]); - await executor(`delete from space where id = $1`, [BASE_SPACE_ID]); - await executor(`delete from entry where spaceid like 'test-s-%'`); - await executor(`delete from space where id like 'test-s-%'`); - }); -}); - -test("getEntry", async () => { - type Case = { - name: string; - exists: boolean; - deleted: boolean; - validJSON: boolean; - }; - const cases: Case[] = [ - { - name: "does not exist", - exists: false, - deleted: false, - validJSON: false, - }, - { - name: "exists, deleted", - exists: true, - deleted: true, - validJSON: true, - }, - { - name: "exists, not deleted, invalid JSON", - exists: true, - deleted: false, - validJSON: false, - }, - { - name: "exists, not deleted, valid JSON", - exists: true, - deleted: false, - validJSON: true, - }, - ]; - - await withExecutor(async (executor) => { - for (const c of cases) { - await executor( - `delete from entry where spaceid = 'test-s-s1' and key = 'foo'` - ); - if (c.exists) { - await executor( - `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'foo', $1, '${getTestSyncOrder( - "foo" - )}',$2, 1, now())`, - [c.validJSON ? JSON.stringify(42) : "not json", c.deleted] - ); - } - - const promise = getEntry(executor, "test-s-s1", "foo"); - let result: JSONValue | undefined; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let error: any | undefined; - await promise.then( - (r) => (result = r), - (e) => (error = String(e)) - ); - if (!c.exists) { - expect(result, c.name).undefined; - expect(error, c.name).undefined; - } else if (c.deleted) { - expect(result, c.name).undefined; - expect(error, c.name).undefined; - } else if (!c.validJSON) { - expect(result, c.name).undefined; - expect(error, c.name).contains("SyntaxError"); - } else { - expect(result, c.name).eq(42); - expect(error, c.name).undefined; - } - } - }); -}); - -test("getEntry RoundTrip types", async () => { - await withExecutor(async (executor) => { - await putEntries( - executor, - "test-s-s1", - [ - ["boolean", true, getTestSyncOrder("boolean")], - ["number", 42, getTestSyncOrder("number")], - ["string", "foo", getTestSyncOrder("string")], - ["array", [1, 2, 3], getTestSyncOrder("array")], - ["object", { a: 1, b: 2 }, getTestSyncOrder("object")], - ], - 1 - ); - expect(await getEntry(executor, "test-s-s1", "boolean")).eq(true); - expect(await getEntry(executor, "test-s-s1", "number")).eq(42); - expect(await getEntry(executor, "test-s-s1", "string")).eq("foo"); - expect(await getEntry(executor, "test-s-s1", "array")).deep.equal([ - 1, - 2, - 3, - ]); - expect(await getEntry(executor, "test-s-s1", "object")).deep.equal({ - a: 1, - b: 2, - }); - }); -}); - -test("putEntries", async () => { - type Case = { - name: string; - duplicate: boolean; - deleted: boolean; - }; - - const cases: Case[] = [ - { - name: "no duplicate", - duplicate: false, - deleted: false, - }, - { - name: "duplicate", - duplicate: true, - deleted: false, - }, - { - name: "deleted", - duplicate: true, - deleted: true, - }, - ]; - - await withExecutor(async (executor) => { - for (const c of cases) { - await executor( - `delete from entry where spaceid = 'test-s-s1' and key = 'bar'` - ); - await executor( - `delete from entry where spaceid = 'test-s-s1' and key = 'foo'` - ); - - if (c.duplicate) { - await putEntries( - executor, - "test-s-s1", - [["foo", 41, getTestSyncOrder("foo")]], - 1 - ); - if (c.deleted) { - await delEntries(executor, "test-s-s1", ["foo"], 1); - } - } - const res: Promise = putEntries( - executor, - "test-s-s1", - [ - ["bar", 100, getTestSyncOrder("bar")], - ["foo", 42, getTestSyncOrder("foo")], - ], - 2 - ); - await res.catch(() => ({})); - - const qr = await executor( - `select spaceid, key, value, deleted, version - from entry where spaceid = 'test-s-s1' and key in ('bar', 'foo') order by key` - ); - const [barRow, fooRow] = qr.rows; - - expect(fooRow, c.name).not.undefined; - { - const { spaceid, key, value, deleted, version } = fooRow; - expect(spaceid, c.name).eq("test-s-s1"); - expect(key, c.name).eq("foo"); - expect(value, c.name).eq("42"); - expect(deleted, c.name).false; - expect(version, c.name).eq(2); - } - { - const { spaceid, key, value, deleted, version } = barRow; - expect(spaceid, c.name).eq("test-s-s1"); - expect(key, c.name).eq("bar"); - expect(value, c.name).eq("100"); - expect(deleted, c.name).false; - expect(version, c.name).eq(2); - } - } - }); -}); - -test("delEntries", async () => { - type Case = { - name: string; - exists: boolean; - }; - const cases: Case[] = [ - { - name: "does not exist", - exists: false, - }, - { - name: "exists", - exists: true, - }, - ]; - for (const c of cases) { - await withExecutor(async (executor) => { - await executor( - `delete from entry where spaceid = 'test-s-s1' and key = 'bar'` - ); - await executor( - `delete from entry where spaceid = 'test-s-s1' and key = 'foo'` - ); - await executor( - `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'bar', '100', '${getTestSyncOrder( - "bar" - )}', false, 1, now())` - ); - if (c.exists) { - await executor( - `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'foo', '42', '${getTestSyncOrder( - "foo" - )}',false, 1, now())` - ); - } - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let error: any | undefined; - await delEntries(executor, "test-s-s1", ["bar", "foo"], 2).catch( - (e) => (error = String(e)) - ); - - const qr = await executor( - ` - select spaceid, key, value, deleted, version from entry - where spaceid = 'test-s-s1' and key in ('bar', 'foo') order by key - ` - ); - const [barRow, fooRow] = qr.rows; - - expect(barRow, c.name).not.undefined; - const { spaceid, key, value, deleted, version } = barRow; - expect(spaceid, c.name).eq("test-s-s1"); - expect(key, c.name).eq("bar"); - expect(value, c.name).eq("100"); - expect(deleted, c.name).true; - expect(version, c.name).eq(2); - if (c.exists) { - expect(fooRow, c.name).not.undefined; - const { spaceid, key, value, deleted, version } = fooRow; - expect(spaceid, c.name).eq("test-s-s1"); - expect(key, c.name).eq("foo"); - expect(value, c.name).eq("42"); - expect(deleted, c.name).true; - expect(version, c.name).eq(2); - } else { - expect(fooRow, c.name).undefined; - expect(error, c.name).undefined; - } - }); - } -}); - -test("initSpace", async () => { - await withExecutor(async (executor) => { - await executor(`delete from entry where spaceid = $1`, [BASE_SPACE_ID]); - await executor(`delete from space where id = $1`, [BASE_SPACE_ID]); - const testSpaceID1 = await initSpace(executor, () => - Promise.resolve(testSampleData) - ); - expect(await getVersion(executor, testSpaceID1)).eq(1); - // 3 issues - expect((await getIssueEntries(executor, testSpaceID1)).length).eq(3); - // 3 descriptions, and 3 comments - expect( - (await getNonIssueEntriesInSyncOrder(executor, testSpaceID1, "", 10)) - .entries.length - ).eq(6); - const testSpaceID2 = await initSpace(executor, () => { - throw new Error("unexpected call to getSampleIssues on subsequent calls"); - }); - expect(await getVersion(executor, testSpaceID2)).eq(1); - // 3 issues - expect((await getIssueEntries(executor, testSpaceID2)).length).eq(3); - // 3 descriptions, and 3 comments - expect( - (await getNonIssueEntriesInSyncOrder(executor, testSpaceID2, "", 10)) - .entries.length - ).eq(6); - }); -}); diff --git a/backend/data.ts b/backend/data.ts deleted file mode 100644 index 0733537a..00000000 --- a/backend/data.ts +++ /dev/null @@ -1,457 +0,0 @@ -import type { JSONValue } from "replicache"; -import { z } from "zod"; -import type { Executor } from "./pg"; -import { ReplicacheTransaction } from "./replicache-transaction"; -import type { Issue, Comment, Description } from "../frontend/issue"; -import { mutators } from "../frontend/mutators"; -import { flatten } from "lodash"; -import { getSyncOrder } from "./sync-order"; -import { nanoid } from "nanoid"; - -export type SampleData = { - issue: Issue; - description: Description; - comments: Comment[]; -}[]; - -export async function createDatabase(executor: Executor) { - const schemaVersion = await getSchemaVersion(executor); - if (schemaVersion < 0 || schemaVersion > 2) { - throw new Error("Unexpected schema version: " + schemaVersion); - } - - if (schemaVersion === 2) { - console.log("schemaVersion is 2 - nothing to do"); - return; - } - - console.log("creating schema"); - await executor("drop schema if exists public cascade"); - await executor("create schema public"); - await executor("grant all on schema public to postgres"); - await executor("grant all on schema public to public"); - await createSchema(executor); -} - -async function getSchemaVersion(executor: Executor) { - const metaExists = await executor(`select exists( - select from pg_tables where schemaname = 'public' and tablename = 'meta')`); - if (!metaExists.rows[0].exists) { - return 0; - } - - const qr = await executor( - `select value from meta where key = 'schemaVersion'` - ); - return qr.rows[0].value; -} - -// nanoid's don't include $, so cannot collide with other space ids. -export const BASE_SPACE_ID = "$base-space-id"; - -export async function createSchema(executor: Executor) { - await executor(`create table meta (key text primary key, value json)`); - await executor(`insert into meta (key, value) values ('schemaVersion', '2')`); - - await executor(`create table space ( - id text primary key not null, - version integer not null, - lastmodified timestamp(6) not null - )`); - - // lastpullid is null until the client has pulled for the first time. - await executor(`create table clientgroup ( - id text primary key not null, - lastpullid integer null - )`); - - await executor(`create table client ( - id text primary key not null, - lastmutationid integer not null, - version integer not null, - clientgroupid text not null, - lastmodified timestamp(6) not null - )`); - - await executor(`create table entry ( - spaceid text not null, - key text not null, - value text not null, - syncorder text not null, - deleted boolean not null, - version integer not null, - lastmodified timestamp(6) not null - )`); - - await executor( - `create unique index idx_entry_spaceid_key on entry (spaceid, key)` - ); - await executor( - `create index idx_entry_spaceid_syncorder on entry (spaceid, syncorder)` - ); - await executor(`create index - on entry (spaceid, deleted) - include (key, value, deleted) - where key like 'issue/%'`); - await executor(`create index on entry (spaceid)`); - await executor(`create index on entry (deleted)`); - await executor(`create index on entry (version)`); - await executor(`create index on client (clientgroupid, version)`); -} - -const INITIAL_SPACE_VERSION = 1; -const INITIAL_SPACE_MUTATION_ID = 0; -export async function initSpace( - executor: Executor, - getSampleData: () => Promise -): Promise { - const { - rows: baseSpaceRows, - } = await executor(`select version from space where id = $1`, [ - BASE_SPACE_ID, - ]); - - if (baseSpaceRows.length === 0) { - console.log("Initializing base space", BASE_SPACE_ID); - await insertSpace(executor, BASE_SPACE_ID, INITIAL_SPACE_VERSION); - const start = Date.now(); - // We have to batch insertions to work around postgres command size limits - const sampleData = await getSampleData(); - const sampleDataBatchs: SampleData[] = []; - for (let i = 0; i < sampleData.length; i++) { - if (i % 1000 === 0) { - sampleDataBatchs.push([]); - } - sampleDataBatchs[sampleDataBatchs.length - 1].push(sampleData[i]); - } - for (const sampleDataBatch of sampleDataBatchs) { - const tx = new ReplicacheTransaction( - executor, - BASE_SPACE_ID, - "fake-client-id-for-server-init", - INITIAL_SPACE_MUTATION_ID, - INITIAL_SPACE_VERSION, - getSyncOrder - ); - for (const { issue, description, comments } of sampleDataBatch) { - await mutators.putIssue(tx, { issue, description }); - for (const comment of comments) { - await mutators.putIssueComment(tx, comment, false); - } - } - await tx.flush(); - } - console.log("Initing base space took " + (Date.now() - start) + "ms"); - } - const spaceID = nanoid(10); - await insertSpace(executor, spaceID, INITIAL_SPACE_VERSION); - return spaceID; -} - -async function insertSpace( - executor: Executor, - spaceID: string, - version: number -) { - await executor( - `insert into space (id, version, lastmodified) values ($1, $2, now())`, - [spaceID, version] - ); -} - -export async function getEntry( - executor: Executor, - spaceID: string, - key: string -): Promise { - const { rows } = await executor( - ` - with overlayentry as ( - select key, value, deleted from entry where spaceid = $1 and key = $3 - ), baseentry as ( - select key, value from entry where spaceid = $2 and key = $3 - ) - select coalesce(overlayentry.key, baseentry.key), - coalesce(overlayentry.value, baseentry.value) as value, - overlayentry.deleted as deleted - from overlayentry full join baseentry on overlayentry.key = baseentry.key - `, - [spaceID, BASE_SPACE_ID, key] - ); - const value = rows[0]?.value; - if (value === undefined || rows[0]?.deleted) { - return undefined; - } - return JSON.parse(value); -} - -export async function putEntries( - executor: Executor, - spaceID: string, - entries: [key: string, value: JSONValue, syncOrder: string][], - version: number -): Promise { - if (entries.length === 0) { - return; - } - const valuesSql = Array.from( - { length: entries.length }, - (_, i) => - `($1, $${i * 3 + 3}, $${i * 3 + 4}, $${i * 3 + 5}, false, $2, now())` - ).join(); - - await executor( - ` - insert into entry ( - spaceid, key, value, syncOrder, deleted, version, lastmodified - ) values ${valuesSql} - on conflict (spaceid, key) do - update set value = excluded.value, syncorder = excluded.syncorder, - deleted = false, version = excluded.version, lastmodified = now() - `, - [ - spaceID, - version, - ...flatten( - entries.map(([key, value, syncOrder]) => [ - key, - JSON.stringify(value), - syncOrder, - ]) - ), - ] - ); -} - -export async function delEntries( - executor: Executor, - spaceID: string, - keys: string[], - version: number -): Promise { - if (keys.length === 0) { - return; - } - const keyParamsSQL = keys.map((_, i) => `$${i + 3}`).join(","); - await executor( - ` - update entry set deleted = true, version = $2 - where spaceid = $1 and key in(${keyParamsSQL}) - `, - [spaceID, version, ...keys] - ); -} - -export async function getIssueEntries( - executor: Executor, - spaceID: string -): Promise<[key: string, value: string][]> { - const { rows } = await executor( - ` - with overlayentry as ( - select key, value, deleted from entry - where spaceid = $1 and key like 'issue/%' - ), baseentry as ( - select key, value from entry where spaceid = $2 and key like 'issue/%' - ) - select coalesce(overlayentry.key, baseentry.key) as key, - coalesce(overlayentry.value, baseentry.value) as value, - overlayentry.deleted as deleted - from overlayentry full join baseentry on overlayentry.key = baseentry.key - `, - [spaceID, BASE_SPACE_ID] - ); - const startFilter = Date.now(); - const filtered: [key: string, value: string][] = rows - .filter((row) => !row.deleted) - .map((row) => [row.key, row.value]); - - console.log("getIssueEntries filter took " + (Date.now() - startFilter)); - return filtered; -} - -export async function getNonIssueEntriesInSyncOrder( - executor: Executor, - spaceID: string, - startSyncOrderExclusive: string, - limit: number -): Promise<{ - entries: [key: string, value: string][]; - endSyncOrder: string | undefined; -}> { - // All though it complicates the query, we do the deleted filtering - // in the query so that we can correctly limit the results. - const { rows } = await executor( - ` - with overlayentry as ( - select key, value, syncorder, deleted from entry - where spaceid = $1 and key not like 'issue/%' and syncorder > $3 - order by syncorder limit $4 - ), baseentry as ( - select key, value, syncorder from entry - where spaceid = $2 and key not like 'issue/%' and syncorder > $3 - order by syncorder limit $4 - ) - select key, value, syncorder from ( - select coalesce(overlayentry.key, baseentry.key) as key, - coalesce(overlayentry.value, baseentry.value) as value, - coalesce(overlayentry.syncorder, baseentry.syncorder) as syncorder, - overlayentry.deleted as deleted - from overlayentry full join baseentry on overlayentry.key = baseentry.key - ) as merged where deleted = false or deleted is null - order by syncorder - limit $4 - `, - [spaceID, BASE_SPACE_ID, startSyncOrderExclusive, limit] - ); - return { - entries: rows.map((row) => [row.key, row.value]), - endSyncOrder: rows[rows.length - 1]?.syncorder, - }; -} - -export async function getChangedEntries( - executor: Executor, - spaceID: string, - prevVersion: number -): Promise<[key: string, value: string, deleted: boolean][]> { - // changes are only in the onverlay space, so we do not need to - // query the base space. - const { - rows, - } = await executor( - `select key, value, deleted from entry where spaceid = $1 and version > $2`, - [spaceID, prevVersion] - ); - return rows.map((row) => [row.key, row.value, row.deleted]); -} - -export async function getVersion( - executor: Executor, - spaceID: string -): Promise { - const { rows } = await executor(`select version from space where id = $1`, [ - spaceID, - ]); - const value = rows[0]?.version; - if (value === undefined) { - return undefined; - } - return z.number().parse(value); -} - -export async function setVersion( - executor: Executor, - spaceID: string, - version: number -): Promise { - await executor( - `update space set version = $2, lastmodified = now() where id = $1`, - [spaceID, version] - ); -} - -export async function getLastMutationID( - executor: Executor, - clientID: string -): Promise { - const { - rows, - } = await executor(`select lastmutationid from client where id = $1`, [ - clientID, - ]); - const value = rows[0]?.lastmutationid; - if (value === undefined) { - return undefined; - } - return z.number().parse(value); -} - -export async function getLastMutationIDs( - executor: Executor, - clientIDs: string[] -) { - return Object.fromEntries( - await Promise.all( - clientIDs.map(async (cid) => { - const lmid = await getLastMutationID(executor, cid); - return [cid, lmid ?? 0] as const; - }) - ) - ); -} - -export async function getLastMutationIDsSince( - executor: Executor, - clientGroupID: string, - sinceVersion: number -) { - const { - rows, - } = await executor( - `select id, clientgroupid, lastmutationid from client where clientgroupid = $1 and version > $2`, - [clientGroupID, sinceVersion] - ); - return Object.fromEntries( - rows.map((r) => [r.id as string, r.lastmutationid as number] as const) - ); -} - -export async function incrementPullID( - executor: Executor, - clientGroupID: string -) { - const { - rows, - } = await executor(`select lastpullid from clientgroup where id = $1`, [ - clientGroupID, - ]); - if (rows.length === 0) { - await executor(`insert into clientgroup (id, lastpullid) values ($1, 1)`, [ - clientGroupID, - ]); - return 1; - } - const [prev] = rows; - const { lastpullid } = prev; - const nextPullID = lastpullid + 1; - await executor(`update clientgroup set lastpullid = $1`, [nextPullID]); - return nextPullID; -} - -export async function setLastMutationID( - executor: Executor, - clientID: string, - clientGroupID: string, - lastMutationID: number, - version: number -): Promise { - await executor( - ` - insert into clientgroup (id, lastpullid) values ($1, null) - on conflict (id) do nothing - `, - [clientGroupID] - ); - await executor( - ` - insert into client (id, clientgroupid, lastmutationid, version, lastmodified) - values ($1, $2, $3, $4, now()) - on conflict (id) do update set lastmutationid = $3, version = $4, lastmodified = now() - `, - [clientID, clientGroupID, lastMutationID, version] - ); -} - -export async function setLastMutationIDs( - executor: Executor, - clientGroupID: string, - lmids: Record, - version: number -) { - return await Promise.all( - [...Object.entries(lmids)].map(([clientID, lmid]) => - setLastMutationID(executor, clientID, clientGroupID, lmid, version) - ) - ); -} diff --git a/backend/issues-react.d.ts b/backend/issues-react.d.ts deleted file mode 100644 index 08853228..00000000 --- a/backend/issues-react.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -declare module "*issues-react.js.gz" { - const gitHubIssues: { - number: number; - title: string; - body: string | null; - state: "open" | "closed"; - // eslint-disable-next-line @typescript-eslint/naming-convention - updated_at: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - created_at: string; - // eslint-disable-next-line @typescript-eslint/naming-convention - creator_user_login: string; - }[]; - export default gitHubIssues; -} diff --git a/backend/issues-react.js.gz b/backend/issues-react.js.gz deleted file mode 100644 index 190aff55..00000000 Binary files a/backend/issues-react.js.gz and /dev/null differ diff --git a/backend/pg.ts b/backend/pg.ts deleted file mode 100644 index 999a7252..00000000 --- a/backend/pg.ts +++ /dev/null @@ -1,117 +0,0 @@ -// Low-level config and utilities for Postgres. - -import { Pool, QueryResult } from "pg"; - -const pool = new Pool( - process.env.DATABASE_URL - ? { - connectionString: process.env.DATABASE_URL, - ssl: - process.env.NODE_ENV === "production" - ? { - rejectUnauthorized: false, - } - : undefined, - } - : undefined -); - -// the pool will emit an error on behalf of any idle clients -// it contains if a backend error or network partition happens -pool.on("error", (err) => { - console.error("Unexpected error on idle client", err); - process.exit(-1); -}); - -pool.on("connect", (client) => { - // eslint-disable-next-line @typescript-eslint/no-floating-promises - client.query( - "SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL SERIALIZABLE" - ); -}); - -export async function withExecutor( - f: (executor: Executor) => R -): Promise { - const startConnect = Date.now(); - const client = await pool.connect(); - console.log("pool.connect took", Date.now() - startConnect); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const executor = async (sql: string, params?: any[]) => { - try { - const start = Date.now(); - const result = await client.query(sql, params); - console.log( - "Db query took " + - (Date.now() - start) + - "ms. SQL: " + - sql.substring(0, Math.min(sql.length, 150)) - ); - return result; - } catch (e) { - throw new Error( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - `Error executing SQL: ${sql}: ${((e as unknown) as any).toString()}` - ); - } - }; - - try { - return await f(executor); - } finally { - const startRelease = Date.now(); - client.release(); - console.log("client.release took", Date.now() - startRelease); - } -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type Executor = (sql: string, params?: any[]) => Promise; -export type TransactionBodyFn = (executor: Executor) => Promise; - -/** - * Invokes a supplied function within an RDS transaction. - * @param body Function to invoke. If this throws, the transaction will be rolled - * back. The thrown error will be re-thrown. - */ -export async function transact(body: TransactionBodyFn) { - return await withExecutor(async (executor) => { - return await transactWithExecutor(executor, body); - }); -} - -async function transactWithExecutor( - executor: Executor, - body: TransactionBodyFn -) { - for (let i = 0; i < 10; i++) { - try { - await executor("begin"); - try { - const r = await body(executor); - await executor("commit"); - return r; - } catch (e) { - console.log("caught error", e, "rolling back"); - await executor("rollback"); - throw e; - } - } catch (e) { - if (shouldRetryTransaction(e)) { - console.log( - `Retrying transaction due to error ${e} - attempt number ${i}` - ); - continue; - } - throw e; - } - } - throw new Error("Tried to execute transacation too many times. Giving up."); -} - -//stackoverflow.com/questions/60339223/node-js-transaction-coflicts-in-postgresql-optimistic-concurrency-control-and -function shouldRetryTransaction(err: unknown) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const code = typeof err === "object" ? String((err as any).code) : null; - return code === "40001" || code === "40P01"; -} diff --git a/backend/replicache-transaction.test.ts b/backend/replicache-transaction.test.ts deleted file mode 100644 index 792afc9a..00000000 --- a/backend/replicache-transaction.test.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { ReplicacheTransaction } from "./replicache-transaction"; -import { expect } from "chai"; -import { test, teardown, setup } from "mocha"; -import { transact, withExecutor } from "./pg"; -import { createDatabase, getEntry } from "./data"; -import type { JSONValue, ReadTransaction } from "replicache"; - -setup(async () => { - await transact((executor) => createDatabase(executor)); -}); - -teardown(async () => { - await withExecutor(async (executor) => { - await executor(`delete from entry where spaceid like 'test-s-%'`); - await executor(`delete from space where id like 'test-s-%'`); - }); -}); - -async function getTestSyncOrder( - _: ReadTransaction, - entry: [key: string, _: JSONValue] -) { - return entry[0]; -} - -test("ReplicacheTransaction", async () => { - await withExecutor(async (executor) => { - const t1 = new ReplicacheTransaction( - executor, - "test-s-s1", - "c1", - 1, - getTestSyncOrder - ); - - expect(t1.clientID).equal("c1"); - expect(await t1.has("foo")).false; - expect(await t1.get("foo")).undefined; - - await t1.put("foo", "bar"); - expect(await t1.has("foo")).true; - expect(await t1.get("foo")).equal("bar"); - - await t1.flush(); - - expect(await getEntry(executor, "test-s-s1", "foo")).equal("bar"); - - const t2 = new ReplicacheTransaction( - executor, - "test-s-s1", - "c1", - 2, - getTestSyncOrder - ); - await t2.del("foo"); - await t2.flush(); - - expect(await getEntry(executor, "test-s-s1", "foo")).equal(undefined); - const qr = await executor( - `select value, deleted, version - from entry where spaceid = 'test-s-s1' and key = 'foo'` - ); - const [row] = qr.rows; - expect(row).deep.equal({ - value: `"bar"`, - deleted: true, - version: 2, - }); - }); -}); - -test("ReplicacheTransaction overlap", async () => { - await withExecutor(async (executor) => { - const t1 = new ReplicacheTransaction( - executor, - "test-s-s1", - "c1", - 1, - getTestSyncOrder - ); - await t1.put("foo", "bar"); - - const t2 = new ReplicacheTransaction( - executor, - "test-s-s1", - "c1", - 1, - getTestSyncOrder - ); - expect(await t2.has("foo")).false; - - await t1.flush(); - expect(await t2.has("foo")).false; - - const t3 = new ReplicacheTransaction( - executor, - "test-s-s1", - "c1", - 1, - getTestSyncOrder - ); - expect(await t3.has("foo")).true; - }); -}); diff --git a/backend/replicache-transaction.ts b/backend/replicache-transaction.ts deleted file mode 100644 index a3c1e32b..00000000 --- a/backend/replicache-transaction.ts +++ /dev/null @@ -1,143 +0,0 @@ -import type { - DeepReadonly, - IndexKey, - JSONValue, - ReadTransaction, - ReadonlyJSONValue, - ScanIndexOptions, - ScanNoIndexOptions, - ScanOptions, - ScanResult, - TransactionEnvironment, - TransactionReason, - WriteTransaction, -} from "replicache"; -import { delEntries, getEntry, putEntries } from "./data"; -import type { Executor } from "./pg"; - -export type SyncOrderFn = ( - tx: ReadTransaction, - entry: [key: string, value: JSONValue] -) => Promise; - -/** - * Implements Replicache's WriteTransaction interface in terms of a Postgres - * transaction. - */ -export class ReplicacheTransaction implements WriteTransaction { - private readonly _spaceID: string; - private readonly _clientID: string; - private readonly _version: number; - private readonly _mutationID: number; - private readonly _executor: Executor; - private readonly _getSyncOrder: SyncOrderFn; - private readonly _cache: Map< - string, - { value: JSONValue | undefined; dirty: boolean } - > = new Map(); - - constructor( - executor: Executor, - spaceID: string, - clientID: string, - version: number, - mutationId: number, - getSyncOrder: SyncOrderFn - ) { - this._spaceID = spaceID; - this._clientID = clientID; - this._version = version; - this._mutationID = mutationId; - this._executor = executor; - this._getSyncOrder = getSyncOrder; - } - - get reason(): TransactionReason { - return "authoritative"; - } - - get environment(): TransactionEnvironment { - return "server"; - } - - get location() { - return this.environment; - } - - get mutationID(): number { - return this._mutationID; - } - - get clientID(): string { - return this._clientID; - } - - async put(key: string, value: JSONValue): Promise { - await this.set(key, value); - } - async set(key: string, value: JSONValue): Promise { - this._cache.set(key, { value, dirty: true }); - } - async del(key: string): Promise { - const had = await this.has(key); - this._cache.set(key, { value: undefined, dirty: true }); - return had; - } - async get(key: string): Promise { - const entry = this._cache.get(key); - if (entry) { - return entry.value; - } - const value = await getEntry(this._executor, this._spaceID, key); - this._cache.set(key, { value, dirty: false }); - return value; - } - async has(key: string): Promise { - const val = await this.get(key); - return val !== undefined; - } - - // TODO! - async isEmpty(): Promise { - throw new Error("Method isEmpty not implemented"); - } - - scan(options: ScanIndexOptions): ScanResult; - scan(options?: ScanNoIndexOptions): ScanResult; - scan(options?: ScanOptions): ScanResult; - scan( - options: ScanIndexOptions - ): ScanResult>; - scan( - options?: ScanNoIndexOptions - ): ScanResult>; - scan( - options?: ScanOptions - ): ScanResult>; - scan(): ScanResult { - throw new Error("Method scan not implemented."); - } - - async flush(): Promise { - const dirtyEntries = [...this._cache.entries()].filter( - ([, { dirty }]) => dirty - ); - const entriesToPut: [string, JSONValue, string][] = []; - for (const dirtyEntry of dirtyEntries) { - if (dirtyEntry[1].value !== undefined) { - entriesToPut.push([ - dirtyEntry[0], - dirtyEntry[1].value, - await this._getSyncOrder(this, [dirtyEntry[0], dirtyEntry[1].value]), - ]); - } - } - const keysToDel = dirtyEntries - .filter(([, { value }]) => value === undefined) - .map(([key]) => key); - await Promise.all([ - delEntries(this._executor, this._spaceID, keysToDel, this._version), - putEntries(this._executor, this._spaceID, entriesToPut, this._version), - ]); - } -} diff --git a/backend/sample-issues.ts b/backend/sample-issues.ts deleted file mode 100644 index 41c61832..00000000 --- a/backend/sample-issues.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { Priority, Status } from "../frontend/issue"; -import type { SampleData } from "./data"; -import { generateNKeysBetween } from "fractional-indexing"; -import { sortBy } from "lodash"; - -export async function getReactSampleData(): Promise { - const issuesDefault = (await import("./issues-react.js.gz")).default; - const sortedIssues = sortBy( - issuesDefault, - (reactIssue) => - Number.MAX_SAFE_INTEGER - - Date.parse(reactIssue.updated_at) + - "-" + - reactIssue.number - ); - - const issuesCount = issuesDefault.length; - const kanbanOrderKeys = generateNKeysBetween(null, null, issuesCount); - const issues: SampleData = sortedIssues.map((reactIssue, idx) => ({ - issue: { - id: reactIssue.number.toString(), - title: reactIssue.title, - priority: getPriority(reactIssue), - status: getStatus(reactIssue), - modified: Date.parse(reactIssue.updated_at), - created: Date.parse(reactIssue.created_at), - creator: reactIssue.creator_user_login, - kanbanOrder: kanbanOrderKeys[idx], - }, - description: reactIssue.body || "", - comments: [], - })); - - const comments = (await import("./comments-react.js.gz")).default.map( - (reactComment) => ({ - id: reactComment.comment_id, - issueID: reactComment.number.toString(), - created: Date.parse(reactComment.created_at), - body: reactComment.body || "", - creator: reactComment.creator_user_login, - }) - ); - for (const comment of comments) { - const issue = issues.find((issue) => issue.issue.id === comment.issueID); - if (issue) { - issue.comments.push(comment); - } - } - issues; - - // Can use this to generate artifically larger datasets for stress testing. - const multiplied: SampleData = []; - for (let i = 0; i < 1; i++) { - multiplied.push( - ...issues.map((issue) => ({ - ...issue, - issue: { - ...issue.issue, - id: issue.issue.id + "-" + i, - }, - comments: issue.comments.map((comment) => ({ - ...comment, - issueID: comment.issueID + "-" + i, - })), - })) - ); - } - - return multiplied; -} - -function getStatus({ - number, - created_at, -}: { - number: number; - state: "open" | "closed"; - // eslint-disable-next-line @typescript-eslint/naming-convention - created_at: string; -}): Status { - const stableRandom = number + Date.parse(created_at); - // 90% closed, 10% open - if (stableRandom % 10 < 8) { - // 2/3's done, 1/3 cancelled - switch (stableRandom % 3) { - case 0: - case 1: - return Status.DONE; - case 2: - return Status.CANCELED; - } - } - switch (stableRandom % 6) { - // 2/6 backlog, 3/6 todo, 1/6 in progress - case 0: - case 1: - return Status.BACKLOG; - case 2: - case 3: - case 4: - return Status.TODO; - case 5: - return Status.IN_PROGRESS; - } - return Status.TODO; -} - -function getPriority({ - number, - created_at, -}: { - number: number; - // eslint-disable-next-line @typescript-eslint/naming-convention - created_at: string; -}): Priority { - const stableRandom = number + Date.parse(created_at); - // bell curve priorities - switch (stableRandom % 10) { - case 0: - return Priority.NONE; - case 1: - case 2: - return Priority.LOW; - case 3: - case 4: - case 5: - case 6: - return Priority.MEDIUM; - case 7: - case 8: - return Priority.HIGH; - case 9: - return Priority.URGENT; - } - return Priority.NONE; -} diff --git a/backend/sync-order.ts b/backend/sync-order.ts deleted file mode 100644 index 23ca8124..00000000 --- a/backend/sync-order.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { - commentSchema, - COMMENT_KEY_PREFIX, - DESCRIPTION_KEY_PREFIX, - getDescriptionIssueId, - getIssue, - issueSchema, - ISSUE_KEY_PREFIX, - reverseTimestampSortKey, -} from "../frontend/issue"; -import type { JSONValue, ReadTransaction } from "replicache"; -import { assertNotUndefined } from "../util/asserts"; - -export async function getSyncOrder( - tx: ReadTransaction, - entry: [key: string, value: JSONValue] -): Promise { - // The default view is a list of issues in reverse modified order, so it is - // preferable to sync entries in reverse modified order of their - // corresponding issue, so that if a user clicks on an issue near the top - // of the default initial list view the entries needed for displaying the - // detail view is available as soon as possible. - const [key, value] = entry; - let issue; - if (key.startsWith(ISSUE_KEY_PREFIX)) { - // Note as an optimization we return all of the issue entries in the - // first pull response regardless of sync order, but we still need - // to assign them a sync order despite it being unused. - issue = issueSchema.parse(value); - } else if (key.startsWith(COMMENT_KEY_PREFIX)) { - const comment = commentSchema.parse(value); - issue = await getIssue(tx, comment.issueID); - } else if (key.startsWith(DESCRIPTION_KEY_PREFIX)) { - issue = await getIssue(tx, getDescriptionIssueId(key)); - } - assertNotUndefined(issue); - return reverseTimestampSortKey(issue.modified, issue.id) + "-" + key; -} diff --git a/client/.env.example b/client/.env.example new file mode 100644 index 00000000..23e6b829 --- /dev/null +++ b/client/.env.example @@ -0,0 +1 @@ +VITE_REPLICACHE_LICENSE_KEY= diff --git a/client/.eslintignore b/client/.eslintignore new file mode 100644 index 00000000..7758a6ad --- /dev/null +++ b/client/.eslintignore @@ -0,0 +1,9 @@ +node_modules +out +tool +bin +.eslintrc.cjs +dist +lib +env.d.ts +vite.config.ts \ No newline at end of file diff --git a/client/.npmignore b/client/.npmignore new file mode 100644 index 00000000..7ceb59f8 --- /dev/null +++ b/client/.npmignore @@ -0,0 +1,25 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? +.env diff --git a/client/.prettierignore b/client/.prettierignore new file mode 100644 index 00000000..f596c21d --- /dev/null +++ b/client/.prettierignore @@ -0,0 +1,4 @@ +node_modules +dist +lib +*.log \ No newline at end of file diff --git a/client/env.d.ts b/client/env.d.ts new file mode 100644 index 00000000..6523c5e0 --- /dev/null +++ b/client/env.d.ts @@ -0,0 +1,11 @@ +/// +/// + +interface ImportMetaEnv { + readonly VITE_REPLICACHE_LICENSE_KEY: string; + // more env variables... +} + +interface ImportMeta { + readonly env: ImportMetaEnv; +} diff --git a/client/index.html b/client/index.html new file mode 100644 index 00000000..7b901446 --- /dev/null +++ b/client/index.html @@ -0,0 +1,17 @@ + + + + + + + + Repliear + + + +
+
+ + + + \ No newline at end of file diff --git a/client/package.json b/client/package.json new file mode 100644 index 00000000..39271370 --- /dev/null +++ b/client/package.json @@ -0,0 +1,65 @@ +{ + "name": "client", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "lint": "eslint --ext .ts,.tsx,.js,.jsx .", + "check-types": "tsc --noEmit", + "dev": "vite", + "build": "tsc && vite build", + "build:server": "cd ../server && npm run build", + "preview": "vite preview", + "format": "prettier --write './src/**/*.{js,jsx,json,ts,tsx,html,css,md}' '*.{cjs,js,jsx,json,ts,tsx,html,css,md}'", + "check-format": "prettier --check './src/**/*.{js,jsx,json,ts,tsx,html,css,md}' '*.{cjs,js,jsx,json,ts,tsx,html,css,md}'", + "clean": "rm -rf ./dist; mkdir -p ./dist", + "prod": "cp -r ./dist/ ../server/dist/; cd ../server; npm run prod", + "server": "cd ../server && npm run dev", + "watch": "concurrently --kill-others 'npm run server' 'npm run check-types -- --watch --preserveWatchOutput' 'sleep 3; npm run dev'" + }, + "dependencies": { + "@mui/icons-material": "^5.14.16", + "@rocicorp/undo": "^0.2.0", + "classnames": "^2.3.1", + "lodash": "^4.17.21", + "navigo": "^8.11.1", + "qs": "^6.11.0", + "react": "^18.2.0", + "react-beautiful-dnd": "^13.1.1", + "react-dom": "^18.2.0", + "react-hotkeys": "^2.0.0", + "react-popper": "^2.3.0", + "react-remark": "^2.1.0", + "react-virtualized-auto-sizer": "^1.0.20", + "react-window": "^1.8.9", + "replicache-react": "5.0.1", + "shared": "^0.1.0", + "todomvc-app-css": "^2.4.2" + }, + "devDependencies": { + "@rocicorp/eslint-config": "^0.1.2", + "@rocicorp/prettier-config": "^0.1.1", + "@tailwindcss/forms": "^0.5.6", + "@tailwindcss/line-clamp": "^0.4.4", + "@types/lodash": "^4.14.201", + "@types/react": "^18.0.17", + "@types/react-beautiful-dnd": "^13.1.7", + "@types/react-dom": "^18.0.6", + "@types/react-window": "^1.8.8", + "@vitejs/plugin-react": "^2.0.1", + "autoprefixer": "^10.4.16", + "concurrently": "^7.4.0", + "postcss": "^8.4.31", + "postcss-preset-env": "^9.3.0", + "prettier": "^2.2.1", + "tailwindcss": "^3.3.5", + "typescript": "^4.7.4", + "use-debounce": "^9.0.4", + "vite": "^3.0.7", + "vite-plugin-svgr": "^4.1.0" + }, + "eslintConfig": { + "extends": "@rocicorp/eslint-config" + }, + "prettier": "@rocicorp/prettier-config" +} diff --git a/client/postcss.config.js b/client/postcss.config.js new file mode 100644 index 00000000..5fa0ad15 --- /dev/null +++ b/client/postcss.config.js @@ -0,0 +1,7 @@ +export default { + plugins: { + 'tailwindcss': {}, + 'autoprefixer': {}, + 'postcss-preset-env': {}, + }, +}; diff --git a/public/static/replicache-logo-96.png b/client/replicache-logo-96.png similarity index 100% rename from public/static/replicache-logo-96.png rename to client/replicache-logo-96.png diff --git a/frontend/about-modal.tsx b/client/src/about-modal.tsx similarity index 81% rename from frontend/about-modal.tsx rename to client/src/about-modal.tsx index 9d23868a..550b9cef 100644 --- a/frontend/about-modal.tsx +++ b/client/src/about-modal.tsx @@ -1,18 +1,18 @@ -import CloseIcon from "./assets/icons/close.svg"; -import Modal from "./modal"; -import React from "react"; -import classNames from "classnames"; +import CloseIcon from './assets/icons/close.svg?react'; +import Modal from './layout/modal'; +import React from 'react'; +import classNames from 'classnames'; interface Props { isOpen: boolean; onDismiss?: () => void; } -function Title({ children }: { children: string }) { +function Title({children}: {children: string}) { return
{children}
; } -function H1({ children }: { children: string }) { +function H1({children}: {children: string}) { return
{children}
; } @@ -25,9 +25,9 @@ function P({ }) { return (
{children} @@ -35,7 +35,7 @@ function P({ ); } -function Feature({ title, children }: { title: string; children: string }) { +function Feature({title, children}: {title: string; children: string}) { return (
  • {title}: {children} @@ -43,7 +43,7 @@ function Feature({ title, children }: { title: string; children: string }) { ); } -function A({ href, children }: { href: string; children: string }) { +function A({href, children}: {href: string; children: string}) { return ( @@ -53,7 +53,7 @@ function A({ href, children }: { href: string; children: string }) { ); } -export default function AboutModal({ isOpen, onDismiss }: Props) { +export default function AboutModal({isOpen, onDismiss}: Props) { const handleClickCloseBtn = () => { if (onDismiss) onDismiss(); }; @@ -71,8 +71,8 @@ export default function AboutModal({ isOpen, onDismiss }: Props) {
  • - Repliear is a loving ♥ tribute to the{" "} - Linear issue tracker built with{" "} + Repliear is a loving ♥ tribute to the{' '} + Linear issue tracker built with{' '} Replicache.

    @@ -86,7 +86,7 @@ export default function AboutModal({ isOpen, onDismiss }: Props) {
      @@ -113,7 +113,7 @@ export default function AboutModal({ isOpen, onDismiss }: Props) {

      - Check out the source for this demo at{" "} + Check out the source for this demo at{' '} github.com/rocicorp/repliear diff --git a/client/src/app.tsx b/client/src/app.tsx new file mode 100644 index 00000000..aed0b084 --- /dev/null +++ b/client/src/app.tsx @@ -0,0 +1,225 @@ +import {useCallback, useEffect, useReducer} from 'react'; +import type {ReadTransaction, Replicache} from 'replicache'; +import type {M} from './model/mutators'; +import {useState} from 'react'; +import {minBy, pickBy} from 'lodash'; +import {generateKeyBetween} from 'fractional-indexing'; +import type {UndoManager} from '@rocicorp/undo'; +import {HotKeys} from 'react-hotkeys'; +import { + useIssueDetailState, + useOrderByState, + usePriorityFilterState, + useStatusFilterState, + useViewState, +} from './hooks/query-state-hooks'; +import {useSubscribe} from 'replicache-react'; +import {getPartialSyncState} from './model/control'; +import { + Comment, + Description, + Issue, + IssueUpdate, + IssueUpdateWithID, + ISSUE_KEY_PREFIX, +} from 'shared'; +import {getFilters, getIssueOrder} from './filters'; +import {Layout} from './layout/layout'; +import {timedReducer} from './reducer'; + +type AppProps = { + rep: Replicache; + undoManager: UndoManager; +}; + +const App = ({rep, undoManager}: AppProps) => { + const [view] = useViewState(); + const [priorityFilter] = usePriorityFilterState(); + const [statusFilter] = useStatusFilterState(); + const [orderBy] = useOrderByState(); + const [detailIssueID, setDetailIssueID] = useIssueDetailState(); + const [menuVisible, setMenuVisible] = useState(false); + + const [state, dispatch] = useReducer(timedReducer, { + allIssuesMap: new Map(), + viewIssueCount: 0, + filteredIssues: [], + filters: getFilters(view, priorityFilter, statusFilter), + issueOrder: getIssueOrder(view, orderBy), + }); + + const partialSync = useSubscribe( + rep, + async (tx: ReadTransaction) => { + return (await getPartialSyncState(tx)) || 'NOT_RECEIVED_FROM_SERVER'; + }, + {default: 'NOT_RECEIVED_FROM_SERVER'}, + ); + const partialSyncComplete = partialSync === 'COMPLETE'; + useEffect(() => { + console.log('partialSync', partialSync); + if (!partialSyncComplete) { + void rep.pull(); + } + }, [rep, partialSync, partialSyncComplete]); + + useEffect(() => { + const ev = new EventSource(`/api/replicache/poke?channel=poke`); + ev.onmessage = async () => { + console.log('Receive poke. Pulling'); + void rep.pull(); + }; + return () => ev.close(); + }, []); + + useEffect(() => { + return rep.experimentalWatch( + diff => { + dispatch({ + type: 'diff', + diff, + }); + }, + {prefix: ISSUE_KEY_PREFIX, initialValuesInFirstDiff: true}, + ); + }, [rep]); + + useEffect(() => { + dispatch({ + type: 'setFilters', + filters: getFilters(view, priorityFilter, statusFilter), + }); + }, [view, priorityFilter?.join(), statusFilter?.join()]); + + useEffect(() => { + dispatch({ + type: 'setIssueOrder', + issueOrder: getIssueOrder(view, orderBy), + }); + }, [view, orderBy]); + + const handleCreateIssue = useCallback( + async (issue: Omit, description: Description) => { + const minKanbanOrderIssue = minBy( + [...state.allIssuesMap.values()], + issue => issue.kanbanOrder, + ); + const minKanbanOrder = minKanbanOrderIssue + ? minKanbanOrderIssue.kanbanOrder + : null; + + await rep.mutate.putIssue({ + issue: { + ...issue, + kanbanOrder: generateKeyBetween(null, minKanbanOrder), + }, + description, + }); + }, + [rep.mutate, state.allIssuesMap], + ); + const handleCreateComment = useCallback( + async (comment: Comment) => { + await undoManager.add({ + execute: () => rep.mutate.putIssueComment(comment), + undo: () => rep.mutate.deleteIssueComment(comment), + }); + }, + [rep.mutate, undoManager], + ); + + const handleUpdateIssues = useCallback( + async (issueUpdates: Array) => { + const uChanges: Array = + issueUpdates.map(issueUpdate => { + const undoChanges = pickBy( + issueUpdate.issue, + (_, key) => key in issueUpdate.issueChanges, + ); + const rv: IssueUpdateWithID = { + id: issueUpdate.issue.id, + issueChanges: undoChanges, + }; + const {descriptionUpdate} = issueUpdate; + if (descriptionUpdate) { + return { + ...rv, + descriptionChange: descriptionUpdate.description, + }; + } + return rv; + }); + await undoManager.add({ + execute: () => + rep.mutate.updateIssues( + issueUpdates.map(({issue, issueChanges, descriptionUpdate}) => { + const rv: IssueUpdateWithID = { + id: issue.id, + issueChanges, + }; + if (descriptionUpdate) { + return { + ...rv, + descriptionChange: descriptionUpdate.description, + }; + } + return rv; + }), + ), + undo: () => rep.mutate.updateIssues(uChanges), + }); + }, + [rep.mutate, undoManager], + ); + + const handleOpenDetail = useCallback( + async (issue: Issue) => { + await setDetailIssueID(issue.id); + }, + [setDetailIssueID], + ); + const handleCloseMenu = useCallback( + () => setMenuVisible(false), + [setMenuVisible], + ); + const handleToggleMenu = useCallback( + () => setMenuVisible(!menuVisible), + [setMenuVisible, menuVisible], + ); + + const handlers = { + undo: () => undoManager.undo(), + redo: () => undoManager.redo(), + }; + + return ( + + + + ); +}; + +const keyMap = { + undo: ['ctrl+z', 'command+z'], + redo: ['ctrl+y', 'command+shift+z', 'ctrl+shift+z'], +}; + +export default App; diff --git a/frontend/assets/fonts/27237475-28043385 b/client/src/assets/fonts/27237475-28043385 similarity index 100% rename from frontend/assets/fonts/27237475-28043385 rename to client/src/assets/fonts/27237475-28043385 diff --git a/frontend/assets/fonts/Inter-UI-ExtraBold.woff b/client/src/assets/fonts/Inter-UI-ExtraBold.woff similarity index 100% rename from frontend/assets/fonts/Inter-UI-ExtraBold.woff rename to client/src/assets/fonts/Inter-UI-ExtraBold.woff diff --git a/frontend/assets/fonts/Inter-UI-ExtraBold.woff2 b/client/src/assets/fonts/Inter-UI-ExtraBold.woff2 similarity index 100% rename from frontend/assets/fonts/Inter-UI-ExtraBold.woff2 rename to client/src/assets/fonts/Inter-UI-ExtraBold.woff2 diff --git a/frontend/assets/fonts/Inter-UI-Medium.woff b/client/src/assets/fonts/Inter-UI-Medium.woff similarity index 100% rename from frontend/assets/fonts/Inter-UI-Medium.woff rename to client/src/assets/fonts/Inter-UI-Medium.woff diff --git a/frontend/assets/fonts/Inter-UI-Medium.woff2 b/client/src/assets/fonts/Inter-UI-Medium.woff2 similarity index 100% rename from frontend/assets/fonts/Inter-UI-Medium.woff2 rename to client/src/assets/fonts/Inter-UI-Medium.woff2 diff --git a/frontend/assets/fonts/Inter-UI-Regular.woff b/client/src/assets/fonts/Inter-UI-Regular.woff similarity index 100% rename from frontend/assets/fonts/Inter-UI-Regular.woff rename to client/src/assets/fonts/Inter-UI-Regular.woff diff --git a/frontend/assets/fonts/Inter-UI-Regular.woff2 b/client/src/assets/fonts/Inter-UI-Regular.woff2 similarity index 100% rename from frontend/assets/fonts/Inter-UI-Regular.woff2 rename to client/src/assets/fonts/Inter-UI-Regular.woff2 diff --git a/frontend/assets/fonts/Inter-UI-SemiBold.woff b/client/src/assets/fonts/Inter-UI-SemiBold.woff similarity index 100% rename from frontend/assets/fonts/Inter-UI-SemiBold.woff rename to client/src/assets/fonts/Inter-UI-SemiBold.woff diff --git a/frontend/assets/fonts/Inter-UI-SemiBold.woff2 b/client/src/assets/fonts/Inter-UI-SemiBold.woff2 similarity index 100% rename from frontend/assets/fonts/Inter-UI-SemiBold.woff2 rename to client/src/assets/fonts/Inter-UI-SemiBold.woff2 diff --git a/frontend/assets/icons/add-subissue.svg b/client/src/assets/icons/add-subissue.svg similarity index 100% rename from frontend/assets/icons/add-subissue.svg rename to client/src/assets/icons/add-subissue.svg diff --git a/frontend/assets/icons/add.svg b/client/src/assets/icons/add.svg similarity index 100% rename from frontend/assets/icons/add.svg rename to client/src/assets/icons/add.svg diff --git a/frontend/assets/icons/archive.svg b/client/src/assets/icons/archive.svg similarity index 100% rename from frontend/assets/icons/archive.svg rename to client/src/assets/icons/archive.svg diff --git a/frontend/assets/icons/arrow.svg b/client/src/assets/icons/arrow.svg similarity index 100% rename from frontend/assets/icons/arrow.svg rename to client/src/assets/icons/arrow.svg diff --git a/frontend/assets/icons/assignee.svg b/client/src/assets/icons/assignee.svg similarity index 100% rename from frontend/assets/icons/assignee.svg rename to client/src/assets/icons/assignee.svg diff --git a/frontend/assets/icons/attachment.svg b/client/src/assets/icons/attachment.svg similarity index 100% rename from frontend/assets/icons/attachment.svg rename to client/src/assets/icons/attachment.svg diff --git a/frontend/assets/icons/avatar.svg b/client/src/assets/icons/avatar.svg similarity index 100% rename from frontend/assets/icons/avatar.svg rename to client/src/assets/icons/avatar.svg diff --git a/frontend/assets/icons/cancel.svg b/client/src/assets/icons/cancel.svg similarity index 100% rename from frontend/assets/icons/cancel.svg rename to client/src/assets/icons/cancel.svg diff --git a/frontend/assets/icons/chat.svg b/client/src/assets/icons/chat.svg similarity index 100% rename from frontend/assets/icons/chat.svg rename to client/src/assets/icons/chat.svg diff --git a/frontend/assets/icons/circle-dot.svg b/client/src/assets/icons/circle-dot.svg similarity index 100% rename from frontend/assets/icons/circle-dot.svg rename to client/src/assets/icons/circle-dot.svg diff --git a/frontend/assets/icons/circle.svg b/client/src/assets/icons/circle.svg similarity index 100% rename from frontend/assets/icons/circle.svg rename to client/src/assets/icons/circle.svg diff --git a/frontend/assets/icons/claim.svg b/client/src/assets/icons/claim.svg similarity index 100% rename from frontend/assets/icons/claim.svg rename to client/src/assets/icons/claim.svg diff --git a/frontend/assets/icons/close.svg b/client/src/assets/icons/close.svg similarity index 100% rename from frontend/assets/icons/close.svg rename to client/src/assets/icons/close.svg diff --git a/frontend/assets/icons/delete.svg b/client/src/assets/icons/delete.svg similarity index 100% rename from frontend/assets/icons/delete.svg rename to client/src/assets/icons/delete.svg diff --git a/frontend/assets/icons/done.svg b/client/src/assets/icons/done.svg similarity index 100% rename from frontend/assets/icons/done.svg rename to client/src/assets/icons/done.svg diff --git a/frontend/assets/icons/dots.svg b/client/src/assets/icons/dots.svg similarity index 100% rename from frontend/assets/icons/dots.svg rename to client/src/assets/icons/dots.svg diff --git a/frontend/assets/icons/due-date.svg b/client/src/assets/icons/due-date.svg similarity index 100% rename from frontend/assets/icons/due-date.svg rename to client/src/assets/icons/due-date.svg diff --git a/frontend/assets/icons/dupplication.svg b/client/src/assets/icons/dupplication.svg similarity index 100% rename from frontend/assets/icons/dupplication.svg rename to client/src/assets/icons/dupplication.svg diff --git a/frontend/assets/icons/filter.svg b/client/src/assets/icons/filter.svg similarity index 100% rename from frontend/assets/icons/filter.svg rename to client/src/assets/icons/filter.svg diff --git a/frontend/assets/icons/git-issue.svg b/client/src/assets/icons/git-issue.svg similarity index 100% rename from frontend/assets/icons/git-issue.svg rename to client/src/assets/icons/git-issue.svg diff --git a/frontend/assets/icons/guide.svg b/client/src/assets/icons/guide.svg similarity index 100% rename from frontend/assets/icons/guide.svg rename to client/src/assets/icons/guide.svg diff --git a/frontend/assets/icons/half-circle.svg b/client/src/assets/icons/half-circle.svg similarity index 100% rename from frontend/assets/icons/half-circle.svg rename to client/src/assets/icons/half-circle.svg diff --git a/frontend/assets/icons/help.svg b/client/src/assets/icons/help.svg similarity index 100% rename from frontend/assets/icons/help.svg rename to client/src/assets/icons/help.svg diff --git a/frontend/assets/icons/inbox.svg b/client/src/assets/icons/inbox.svg similarity index 100% rename from frontend/assets/icons/inbox.svg rename to client/src/assets/icons/inbox.svg diff --git a/frontend/assets/icons/issue.svg b/client/src/assets/icons/issue.svg similarity index 100% rename from frontend/assets/icons/issue.svg rename to client/src/assets/icons/issue.svg diff --git a/frontend/assets/icons/label.svg b/client/src/assets/icons/label.svg similarity index 100% rename from frontend/assets/icons/label.svg rename to client/src/assets/icons/label.svg diff --git a/frontend/assets/icons/menu.svg b/client/src/assets/icons/menu.svg similarity index 100% rename from frontend/assets/icons/menu.svg rename to client/src/assets/icons/menu.svg diff --git a/frontend/assets/icons/parent-issue.svg b/client/src/assets/icons/parent-issue.svg similarity index 100% rename from frontend/assets/icons/parent-issue.svg rename to client/src/assets/icons/parent-issue.svg diff --git a/frontend/assets/icons/plus.svg b/client/src/assets/icons/plus.svg similarity index 100% rename from frontend/assets/icons/plus.svg rename to client/src/assets/icons/plus.svg diff --git a/frontend/assets/icons/project.svg b/client/src/assets/icons/project.svg similarity index 100% rename from frontend/assets/icons/project.svg rename to client/src/assets/icons/project.svg diff --git a/frontend/assets/icons/question.svg b/client/src/assets/icons/question.svg similarity index 100% rename from frontend/assets/icons/question.svg rename to client/src/assets/icons/question.svg diff --git a/frontend/assets/icons/relationship.svg b/client/src/assets/icons/relationship.svg similarity index 100% rename from frontend/assets/icons/relationship.svg rename to client/src/assets/icons/relationship.svg diff --git a/frontend/assets/icons/rounded-claim.svg b/client/src/assets/icons/rounded-claim.svg similarity index 100% rename from frontend/assets/icons/rounded-claim.svg rename to client/src/assets/icons/rounded-claim.svg diff --git a/frontend/assets/icons/search.svg b/client/src/assets/icons/search.svg similarity index 100% rename from frontend/assets/icons/search.svg rename to client/src/assets/icons/search.svg diff --git a/frontend/assets/icons/signal-medium.svg b/client/src/assets/icons/signal-medium.svg similarity index 100% rename from frontend/assets/icons/signal-medium.svg rename to client/src/assets/icons/signal-medium.svg diff --git a/frontend/assets/icons/signal-strong.svg b/client/src/assets/icons/signal-strong.svg similarity index 100% rename from frontend/assets/icons/signal-strong.svg rename to client/src/assets/icons/signal-strong.svg diff --git a/frontend/assets/icons/signal-strong.xsd b/client/src/assets/icons/signal-strong.xsd similarity index 100% rename from frontend/assets/icons/signal-strong.xsd rename to client/src/assets/icons/signal-strong.xsd diff --git a/frontend/assets/icons/signal-weak.svg b/client/src/assets/icons/signal-weak.svg similarity index 100% rename from frontend/assets/icons/signal-weak.svg rename to client/src/assets/icons/signal-weak.svg diff --git a/frontend/assets/icons/slack.svg b/client/src/assets/icons/slack.svg similarity index 100% rename from frontend/assets/icons/slack.svg rename to client/src/assets/icons/slack.svg diff --git a/frontend/assets/icons/view.svg b/client/src/assets/icons/view.svg similarity index 100% rename from frontend/assets/icons/view.svg rename to client/src/assets/icons/view.svg diff --git a/frontend/assets/icons/zoom.svg b/client/src/assets/icons/zoom.svg similarity index 100% rename from frontend/assets/icons/zoom.svg rename to client/src/assets/icons/zoom.svg diff --git a/frontend/assets/images/logo.svg b/client/src/assets/images/logo.svg similarity index 100% rename from frontend/assets/images/logo.svg rename to client/src/assets/images/logo.svg diff --git a/client/src/filters.ts b/client/src/filters.ts new file mode 100644 index 00000000..5b6f8646 --- /dev/null +++ b/client/src/filters.ts @@ -0,0 +1,99 @@ +import {isEqual} from 'lodash'; +import {Issue, Order, Priority, Status} from 'shared'; + +export class Filters { + private readonly _viewStatuses: Set | undefined; + private readonly _issuesStatuses: Set | undefined; + private readonly _issuesPriorities: Set | undefined; + readonly hasNonViewFilters: boolean; + constructor( + view: string | null, + priorityFilter: Priority[] | null, + statusFilter: Status[] | null, + ) { + this._viewStatuses = undefined; + switch (view?.toLowerCase()) { + case 'active': + this._viewStatuses = new Set(['IN_PROGRESS', 'TODO']); + break; + case 'backlog': + this._viewStatuses = new Set(['BACKLOG']); + break; + default: + this._viewStatuses = undefined; + } + + this._issuesStatuses = undefined; + this._issuesPriorities = undefined; + this.hasNonViewFilters = false; + if (statusFilter) { + this._issuesStatuses = new Set(); + for (const s of statusFilter) { + if (!this._viewStatuses || this._viewStatuses.has(s)) { + this.hasNonViewFilters = true; + this._issuesStatuses.add(s); + } + } + } + if (!this.hasNonViewFilters) { + this._issuesStatuses = this._viewStatuses; + } + + if (priorityFilter) { + this._issuesPriorities = new Set(); + for (const p of priorityFilter) { + this.hasNonViewFilters = true; + this._issuesPriorities.add(p); + } + if (this._issuesPriorities.size === 0) { + this._issuesPriorities = undefined; + } + } + } + + viewFilter(issue: Issue): boolean { + return this._viewStatuses ? this._viewStatuses.has(issue.status) : true; + } + + issuesFilter(issue: Issue): boolean { + if (this._issuesStatuses) { + if (!this._issuesStatuses.has(issue.status)) { + return false; + } + } + if (this._issuesPriorities) { + if (!this._issuesPriorities.has(issue.priority)) { + return false; + } + } + return true; + } + + equals(other: Filters): boolean { + return ( + this === other || + (isEqual(this._viewStatuses, other._viewStatuses) && + isEqual(this._issuesStatuses, other._issuesStatuses) && + isEqual(this._issuesPriorities, other._issuesPriorities) && + isEqual(this.hasNonViewFilters, other.hasNonViewFilters)) + ); + } +} + +export function getFilters( + view: string | null, + priorityFilter: Priority[] | null, + statusFilter: Status[] | null, +): Filters { + return new Filters(view, priorityFilter, statusFilter); +} + +export function getIssueOrder( + view: string | null, + orderBy: Order | null, +): Order { + if (view === 'board') { + return 'KANBAN'; + } + return orderBy ?? 'MODIFIED'; +} diff --git a/client/src/hooks/query-state-hooks.ts b/client/src/hooks/query-state-hooks.ts new file mode 100644 index 00000000..3b882be8 --- /dev/null +++ b/client/src/hooks/query-state-hooks.ts @@ -0,0 +1,42 @@ +import useQueryState, { + identityProcessor, + QueryStateProcessor, +} from './useQueryState'; +import {Order, Priority, Status} from 'shared'; + +const processOrderBy: QueryStateProcessor = { + toString: (value: Order) => value, + fromString: (value: string | null) => (value ?? 'MODIFIED') as Order, +}; + +const processStatuFilter: QueryStateProcessor = { + toString: (value: Status[]) => value.join(','), + fromString: (value: string | null) => + value === null ? null : (value.split(',') as Status[]), +}; + +const processPriorityFilter: QueryStateProcessor = { + toString: (value: Priority[]) => value.join(','), + fromString: (value: string | null) => + value === null ? null : (value.split(',') as Priority[]), +}; + +export function useOrderByState() { + return useQueryState('orderBy', processOrderBy); +} + +export function useStatusFilterState() { + return useQueryState('statusFilter', processStatuFilter); +} + +export function usePriorityFilterState() { + return useQueryState('priorityFilter', processPriorityFilter); +} + +export function useViewState() { + return useQueryState('view', identityProcessor); +} + +export function useIssueDetailState() { + return useQueryState('iss', identityProcessor); +} diff --git a/frontend/hooks/useClickOutside.ts b/client/src/hooks/useClickOutside.ts similarity index 100% rename from frontend/hooks/useClickOutside.ts rename to client/src/hooks/useClickOutside.ts diff --git a/frontend/hooks/useKeyPressed.ts b/client/src/hooks/useKeyPressed.ts similarity index 100% rename from frontend/hooks/useKeyPressed.ts rename to client/src/hooks/useKeyPressed.ts diff --git a/frontend/hooks/useLockBodyScroll.ts b/client/src/hooks/useLockBodyScroll.ts similarity index 100% rename from frontend/hooks/useLockBodyScroll.ts rename to client/src/hooks/useLockBodyScroll.ts diff --git a/client/src/hooks/useQueryState.ts b/client/src/hooks/useQueryState.ts new file mode 100644 index 00000000..9720ef0f --- /dev/null +++ b/client/src/hooks/useQueryState.ts @@ -0,0 +1,83 @@ +import {useState, useEffect, useCallback} from 'react'; + +export type QueryStateProcessor = { + toString: (value: T) => string; + fromString: (value: string | null) => T | null; +}; + +export const identityProcessor = { + toString: (value: string) => value, + fromString: (value: string | null) => value, +}; + +const queryStateListenres = new Set<() => void>(); + +export function useQueryState( + key: string, + processor: QueryStateProcessor, +) { + function getQueryValue() { + const searchParams = new URLSearchParams(window.location.search); + const param = searchParams.get(key); + return param === null ? null : decodeURIComponent(param); + } + function processQueryValue(queryValue: string | null) { + return queryValue === null ? null : processor.fromString(queryValue); + } + // Initialize state from the current URL + const [value, setValue] = useState(getQueryValue); + + // Update URL when state changes + useEffect(() => { + const searchParams = new URLSearchParams(window.location.search); + const oldRelativePathQuery = `${ + window.location.pathname + }?${searchParams.toString()}`; + if (value === null) { + searchParams.delete(key); + } else { + searchParams.set(key, encodeURIComponent(value)); + } + const newRelativePathQuery = `${ + window.location.pathname + }?${searchParams.toString()}`; + if (oldRelativePathQuery === newRelativePathQuery) { + return; + } + history.pushState(null, '', newRelativePathQuery); + for (const listener of queryStateListenres) { + listener(); + } + }, [key, value, processor]); + + useEffect(() => { + const handlePopState = () => { + console.log('pop state event...'); + const encoded = getQueryValue(); + setValue(encoded); + }; + + // Subscribe to popstate event + window.addEventListener('popstate', handlePopState); + queryStateListenres.add(handlePopState); + + // Cleanup listener + return () => { + window.removeEventListener('popstate', handlePopState); + queryStateListenres.delete(handlePopState); + }; + }, [key]); + + // Wrap setValue with a callback that ensures a new function is not created on every render + const setQueryState = useCallback( + (newValue: T | null) => { + const encoded = newValue === null ? null : processor.toString(newValue); + setValue(encoded); + }, + [setValue], + ); + + return [processQueryValue(value), setQueryState] as const; +} + +export default useQueryState; diff --git a/client/src/index.css b/client/src/index.css new file mode 100644 index 00000000..87e8c777 --- /dev/null +++ b/client/src/index.css @@ -0,0 +1,56 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +body { + font-size: 12px; + color-scheme: dark; + @apply font-medium text-white bg-gray; +} + +@font-face { + font-family: 'Inter UI'; + font-style: normal; + font-weight: 400; + font-display: swap; + src: url('./assets/fonts/Inter-UI-Regular.woff2') format('woff2'), + url('./assets/fonts/Inter-UI-Regular.woff') format('woff'); +} + +@font-face { + font-family: 'Inter UI'; + font-style: normal; + font-weight: 500; + font-display: swap; + src: url('./assets/fonts/Inter-UI-Medium.woff2') format('woff2'), + url('./assets/fonts/Inter-UI-Medium.woff') format('woff'); +} + +@font-face { + font-family: 'Inter UI'; + font-style: normal; + font-weight: 600; + font-display: swap; + src: url('./assets/fonts/Inter-UI-SemiBold.woff2') format('woff2'), + url('./assets/fonts/Inter-UI-SemiBold.woff') format('woff'); +} + +@font-face { + font-family: 'Inter UI'; + font-style: normal; + font-weight: 800; + font-display: swap; + src: url('./assets/fonts/Inter-UI-ExtraBold.woff2') format('woff2'), + url('./assets/fonts/Inter-UI-ExtraBold.woff') format('woff'); +} + +.modal { + max-width: calc(100vw - 32px); + max-height: calc(100vh - 32px); +} + +#root, +body, +html { + height: 100%; +} diff --git a/client/src/index.tsx b/client/src/index.tsx new file mode 100644 index 00000000..ad7ea10b --- /dev/null +++ b/client/src/index.tsx @@ -0,0 +1,38 @@ +import ReactDOM from 'react-dom/client'; +import './index.css'; +import {mutators} from './model/mutators'; +import {Replicache} from 'replicache'; +import {UndoManager} from '@rocicorp/undo'; +import App from './app'; + +async function init() { + // See https://doc.replicache.dev/licensing for how to get a license key. + const licenseKey = import.meta.env.VITE_REPLICACHE_LICENSE_KEY; + if (!licenseKey) { + throw new Error('Missing VITE_REPLICACHE_LICENSE_KEY'); + } + + const r = new Replicache({ + name: 'anon', + licenseKey, + mutators, + logLevel: 'debug', + pushURL: `/api/replicache/push`, + pullURL: `/api/replicache/pull`, + }); + const undoManager = new UndoManager(); + + function Root() { + return ( +

      + +
      + ); + } + + ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render( + , + ); +} + +await init(); diff --git a/frontend/issue-board.tsx b/client/src/issue/issue-board.tsx similarity index 66% rename from frontend/issue-board.tsx rename to client/src/issue/issue-board.tsx index 8c0810cb..5618aa90 100644 --- a/frontend/issue-board.tsx +++ b/client/src/issue/issue-board.tsx @@ -1,36 +1,36 @@ -import { generateNKeysBetween } from "fractional-indexing"; -import { groupBy, indexOf } from "lodash"; -import React, { memo, useCallback } from "react"; -import { DragDropContext, DropResult } from "react-beautiful-dnd"; - -import { Status, Issue, IssueUpdate, Priority } from "./issue"; -import IssueCol from "./issue-col"; +/* eslint-disable @typescript-eslint/naming-convention */ +import {generateNKeysBetween} from 'fractional-indexing'; +import {groupBy, indexOf} from 'lodash'; +import {memo, useCallback} from 'react'; +import {DragDropContext, DropResult} from 'react-beautiful-dnd'; +import {Issue, IssueUpdate, Priority, Status} from 'shared'; +import IssueCol from './issue-col'; export type IssuesByStatusType = { - [Status.BACKLOG]: Issue[]; - [Status.TODO]: Issue[]; - [Status.IN_PROGRESS]: Issue[]; - [Status.DONE]: Issue[]; - [Status.CANCELED]: Issue[]; + BACKLOG: Issue[]; + TODO: Issue[]; + IN_PROGRESS: Issue[]; + DONE: Issue[]; + CANCELED: Issue[]; }; export const getIssueByType = (allIssues: Issue[]): IssuesByStatusType => { - const issuesBySType = groupBy(allIssues, "status"); + const issuesBySType = groupBy(allIssues, 'status'); const defaultIssueByType = { - [Status.BACKLOG]: [], - [Status.TODO]: [], - [Status.IN_PROGRESS]: [], - [Status.DONE]: [], - [Status.CANCELED]: [], + BACKLOG: [], + TODO: [], + IN_PROGRESS: [], + DONE: [], + CANCELED: [], }; - const result = { ...defaultIssueByType, ...issuesBySType }; + const result = {...defaultIssueByType, ...issuesBySType}; return result; }; export function getKanbanOrderIssueUpdates( issueToMove: Issue, issueToInsertBefore: Issue, - issues: Issue[] + issues: Issue[], ): IssueUpdate[] { const indexInKanbanOrder = indexOf(issues, issueToInsertBefore); let beforeKey: string | null = null; @@ -52,19 +52,19 @@ export function getKanbanOrderIssueUpdates( const newKanbanOrderKeys = generateNKeysBetween( beforeKey, afterKey, - issuesToReKey.length + 1 // +1 for the dragged issue + issuesToReKey.length + 1, // +1 for the dragged issue ); const issueUpdates = [ { issue: issueToMove, - issueChanges: { kanbanOrder: newKanbanOrderKeys[0] }, + issueChanges: {kanbanOrder: newKanbanOrderKeys[0]}, }, ]; for (let i = 0; i < issuesToReKey.length; i++) { issueUpdates.push({ issue: issuesToReKey[i], - issueChanges: { kanbanOrder: newKanbanOrderKeys[i + 1] }, + issueChanges: {kanbanOrder: newKanbanOrderKeys[i + 1]}, }); } return issueUpdates; @@ -76,11 +76,13 @@ interface Props { onOpenDetail: (issue: Issue) => void; } -function IssueBoard({ issues, onUpdateIssues, onOpenDetail }: Props) { +function IssueBoard({issues, onUpdateIssues, onOpenDetail}: Props) { + const start = performance.now(); const issuesByType = getIssueByType(issues); + console.log(`Issues by type duration: ${performance.now() - start}ms`); const handleDragEnd = useCallback( - ({ source, destination }: DropResult) => { + ({source, destination}: DropResult) => { if (!destination) { return; } @@ -100,7 +102,7 @@ function IssueBoard({ issues, onUpdateIssues, onOpenDetail }: Props) { } const issueUpdates = issueToInsertBefore ? getKanbanOrderIssueUpdates(draggedIssue, issueToInsertBefore, issues) - : [{ issue: draggedIssue, issueChanges: {} }]; + : [{issue: draggedIssue, issueChanges: {}}]; if (newStatus !== sourceStatus) { issueUpdates[0] = { ...issueUpdates[0], @@ -112,7 +114,7 @@ function IssueBoard({ issues, onUpdateIssues, onOpenDetail }: Props) { } onUpdateIssues(issueUpdates); }, - [issues, issuesByType, onUpdateIssues] + [issues, issuesByType, onUpdateIssues], ); const handleChangePriority = useCallback( @@ -120,48 +122,48 @@ function IssueBoard({ issues, onUpdateIssues, onOpenDetail }: Props) { onUpdateIssues([ { issue, - issueChanges: { priority }, + issueChanges: {priority}, }, ]); }, - [onUpdateIssues] + [onUpdateIssues], ); return (
      diff --git a/frontend/issue-col.tsx b/client/src/issue/issue-col.tsx similarity index 84% rename from frontend/issue-col.tsx rename to client/src/issue/issue-col.tsx index 19a339c2..ed2dc022 100644 --- a/frontend/issue-col.tsx +++ b/client/src/issue/issue-col.tsx @@ -1,16 +1,16 @@ -import StatusIcon from "./status-icon"; -import React, { CSSProperties, memo, useMemo } from "react"; +import StatusIcon from '../widgets/status-icon'; +import React, {CSSProperties, memo, useMemo} from 'react'; import { Draggable, DraggableProvided, Droppable, DroppableProvided, DroppableStateSnapshot, -} from "react-beautiful-dnd"; -import type { Issue, Priority, Status } from "./issue"; -import IssueItem from "./issue-item"; -import { FixedSizeList } from "react-window"; -import AutoSizer from "react-virtualized-auto-sizer"; +} from 'react-beautiful-dnd'; +import IssueItem from './issue-item'; +import {FixedSizeList} from 'react-window'; +import AutoSizer from 'react-virtualized-auto-sizer'; +import {Issue, Priority, Status} from 'shared'; interface Props { status: Status; @@ -30,7 +30,7 @@ interface RowProps { style: CSSProperties; } -const RowPreMemo = ({ data, index, style }: RowProps) => { +const RowPreMemo = ({data, index, style}: RowProps) => { const issue = data.issues[index]; // We are rendering an extra item for the placeholder. // To do this we increased our data set size to include one 'fake' item. @@ -42,7 +42,6 @@ const RowPreMemo = ({ data, index, style }: RowProps) => { {(provided: DraggableProvided) => { return ( - // @ts-expect-error @types/react@17 are wrong but react 18 does not work with next
      ; return ( @@ -103,7 +102,6 @@ function IssueCol({ renderClone={(provided, _snapshot, rubric) => { const issue = issues[rubric.source.index]; return ( - // @ts-expect-error @types/react@17 are wrong but react 18 does not work with nextjs
      - {({ height, width }) => { + {({height, width}: {width: number; height: number}) => { return ( void; @@ -36,22 +36,20 @@ interface Props { } const CommentsList = (comments: Comment[], isLoading: boolean) => { - const elements = sortBy(comments, (comment) => comment.created).map( - (comment) => ( -
      -
      - - {comment.creator} {timeAgo(comment.created)} -
      -
      - {comment.body} -
      + const elements = sortBy(comments, comment => comment.created).map(comment => ( +
      +
      + + {comment.creator} {timeAgo(comment.created)}
      - ) - ); +
      + {comment.body} +
      +
      + )); if (isLoading) { elements.push(
      { className=" max-w-[85vw] mx-3 bg-gray-400 mt-0 mb-5 border-transparent rounded py-3 px-3 relative whitespace-pre-wrap overflow-auto" > Loading... -
      +
      , ); } return elements; @@ -72,55 +70,59 @@ export default function IssueDetail({ issues, isLoading, }: Props) { - const [detailIssueID, setDetailIssueID] = useQueryState("iss", { - history: "push", - }); + const [detailIssueID, setDetailIssueID] = useIssueDetailState(); const [editMode, setEditMode] = useState(false); const [currentIssueIdx, setCurrentIssueIdx] = useState(-1); - const [commentText, setCommentText] = useState(""); - const [titleText, setTitleText] = useState(""); - const [descriptionText, setDescriptionText] = useState(""); + const [commentText, setCommentText] = useState(''); + const [titleText, setTitleText] = useState(''); + const [descriptionText, setDescriptionText] = useState(''); useEffect(() => { if (detailIssueID) { - const index = issues.findIndex((issue) => issue.id === detailIssueID); + const index = issues.findIndex(issue => issue.id === detailIssueID); setCurrentIssueIdx(index); } }, [issues, detailIssueID]); const issue = useSubscribe( rep, - async (tx) => { + async tx => { if (detailIssueID) { return (await getIssue(tx, detailIssueID)) || null; } return null; }, - { default: null, dependencies: [detailIssueID] } + {default: null, dependencies: [detailIssueID]}, ); const description = useSubscribe( rep, - async (tx) => { + async tx => { if (detailIssueID) { - return (await getIssueDescription(tx, detailIssueID)) || null; + return (await getDescription(tx, detailIssueID)) || null; } return null; }, - { default: null, dependencies: [detailIssueID] } + { + default: null, + dependencies: [detailIssueID], + }, ); const comments = useSubscribe( rep, - async (tx) => { + async tx => { if (detailIssueID) { return (await getIssueComments(tx, detailIssueID)) || []; } return []; }, - { default: [], dependencies: [detailIssueID] } + { + default: [], + dependencies: [detailIssueID], + }, ); const handleClose = useCallback(async () => { @@ -129,38 +131,38 @@ export default function IssueDetail({ const handleChangePriority = useCallback( (priority: Priority) => { - issue && onUpdateIssues([{ issue, issueChanges: { priority } }]); + issue && onUpdateIssues([{issue, issueChanges: {priority}}]); }, - [onUpdateIssues, issue] + [onUpdateIssues, issue], ); const handleChangeStatus = useCallback( (status: Status) => { - issue && onUpdateIssues([{ issue, issueChanges: { status } }]); + issue && onUpdateIssues([{issue, issueChanges: {status}}]); }, - [onUpdateIssues, issue] + [onUpdateIssues, issue], ); const handleAddComment = useCallback(() => { - if (commentText !== "") { + if (commentText !== '') { onAddComment({ - id: nanoid(), + id: `${issue?.id as string}/${nanoid()}`, issueID: issue?.id as string, created: Date.now(), - creator: "Me", + creator: 'Me', body: commentText, }); - setCommentText(""); + setCommentText(''); } }, [onAddComment, commentText, issue]); const handleFwdPrev = useCallback( - async (direction: "prev" | "fwd") => { + async (direction: 'prev' | 'fwd') => { if (currentIssueIdx === undefined) { return; } let newIss = undefined; - if (direction === "prev") { + if (direction === 'prev') { if (currentIssueIdx === 0) { return; } @@ -172,28 +174,25 @@ export default function IssueDetail({ newIss = issues[currentIssueIdx + 1].id; } - await setDetailIssueID(newIss, { - scroll: false, - shallow: true, - }); + await setDetailIssueID(newIss); }, - [currentIssueIdx, issues, setDetailIssueID] + [currentIssueIdx, issues, setDetailIssueID], ); const handleFwd = useCallback(async () => { - await handleFwdPrev("fwd"); + await handleFwdPrev('fwd'); }, [handleFwdPrev]); const handlePrev = useCallback(async () => { - await handleFwdPrev("prev"); + await handleFwdPrev('prev'); }, [handleFwdPrev]); - useKeyPressed("j", handleFwd); - useKeyPressed("k", handlePrev); + useKeyPressed('j', handleFwd); + useKeyPressed('k', handlePrev); const handleEdit = () => { - setTitleText(issue?.title || ""); - setDescriptionText(description || ""); + setTitleText(issue?.title || ''); + setDescriptionText(description?.body || ''); setEditMode(true); }; @@ -204,9 +203,9 @@ export default function IssueDetail({ const handleSave = () => { if (issue) { const descriptionUpdate = - descriptionText !== description + descriptionText !== description?.body ? { - description: description || "", + description: description?.body || '', descriptionChange: descriptionText, } : undefined; @@ -244,10 +243,10 @@ export default function IssueDetail({
      handleFwdPrev("fwd")} + onMouseDown={() => handleFwdPrev('fwd')} disabled={currentIssueIdx === issues.length - 1} > @@ -275,13 +274,13 @@ export default function IssueDetail({
      {editMode ? ( @@ -313,7 +312,7 @@ export default function IssueDetail({ {editMode ? ( setTitleText(e.target.value)} + onChange={e => setTitleText(e.target.value)} value={titleText} /> ) : ( @@ -324,13 +323,13 @@ export default function IssueDetail({ {editMode ? (