diff --git a/.babelrc b/.babelrc
deleted file mode 100644
index e22cfb0e..00000000
--- a/.babelrc
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "presets": [
- [
- "next/babel",
- {
- "preset-env": {
- "targets": {
- "esmodules": true
- }
- }
- }
- ]
- ]
-}
diff --git a/.eslintignore b/.eslintignore
deleted file mode 100644
index e3b3fe77..00000000
--- a/.eslintignore
+++ /dev/null
@@ -1,34 +0,0 @@
-# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
-
-# dependencies
-/node_modules
-/.pnp
-.pnp.js
-
-# testing
-/coverage
-
-# next.js
-/.next/
-/out/
-
-# production
-/build
-
-# misc
-.DS_Store
-*.pem
-
-# debug
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
-
-# local env files
-.env.local
-.env.development.local
-.env.test.local
-.env.production.local
-
-# vercel
-.vercel
\ No newline at end of file
diff --git a/.eslintrc b/.eslintrc
deleted file mode 100644
index f518d6f0..00000000
--- a/.eslintrc
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "env": {
- "browser": true,
- "node": true
- },
- "parser": "@typescript-eslint/parser",
- "parserOptions": {
- "ecmaVersion": 12,
- "sourceType": "module",
- "project": "./tsconfig.json"
- },
- "extends": [
- "eslint:recommended",
- "plugin:@typescript-eslint/recommended",
- "next",
- "prettier"
- ],
- "rules": {
- "@typescript-eslint/no-floating-promises": "error",
- "@typescript-eslint/naming-convention": [
- "error",
- {
- "selector": "memberLike",
- "modifiers": ["public"],
- "format": ["camelCase"],
- "leadingUnderscore": "forbid"
- }
- ],
- "eqeqeq": "error",
- "no-var": "error",
- "object-shorthand": "error",
- "prefer-arrow-callback": "error",
- "prefer-destructuring": [
- "error",
- {
- "VariableDeclarator": {
- "object": true
- }
- },
- {
- "enforceForRenamedProperties": false
- }
- ],
- "no-unused-vars": "off",
- "@typescript-eslint/no-unused-vars": [
- "error",
- { "argsIgnorePattern": "^_" }
- ]
- },
- "plugins": ["react", "@typescript-eslint"]
-}
diff --git a/.gitignore b/.gitignore
index e0a4dc30..7ba0f0b3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,46 +1,109 @@
-# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+vite.config.ts.*
-# dependencies
-/node_modules
-/.pnp
-.pnp.js
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
-# testing
-/coverage
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
-# next.js
-/.next/
-/out/
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
-# production
-/build
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
-# misc
-.DS_Store
-*.pem
+# nyc test coverage
+.nyc_output
-# debug
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
-# local env files
-.env.local
-.env.development.local
-.env.test.local
-.env.production.local
+# Bower dependency directory (https://bower.io/)
+bower_components
-# vercel
-.vercel
+# node-waf configuration
+.lock-wscript
-# react-designer
-lib
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
-#tsc
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# TypeScript v1 declaration files
+typings/
+
+# TypeScript cache
*.tsbuildinfo
-# Supabase
-**/supabase/.branches
-**/supabase/.temp
-**/supabase/.env
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variables file
.env
+.env.test
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+
+# Next.js build output
+.next
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and *not* Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+
+.DS_Store
+archive.sh
\ No newline at end of file
diff --git a/.jshintrc b/.jshintrc
deleted file mode 100644
index d9fb1898..00000000
--- a/.jshintrc
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "node": true,
- "browser": true,
- "esnext": true,
- "newcap": false
-}
diff --git a/.npmrc b/.npmrc
deleted file mode 100644
index 37cfe26a..00000000
--- a/.npmrc
+++ /dev/null
@@ -1 +0,0 @@
-unsafe-perm = true
\ No newline at end of file
diff --git a/.prettierignore b/.prettierignore
deleted file mode 100644
index e3b3fe77..00000000
--- a/.prettierignore
+++ /dev/null
@@ -1,34 +0,0 @@
-# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
-
-# dependencies
-/node_modules
-/.pnp
-.pnp.js
-
-# testing
-/coverage
-
-# next.js
-/.next/
-/out/
-
-# production
-/build
-
-# misc
-.DS_Store
-*.pem
-
-# debug
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
-
-# local env files
-.env.local
-.env.development.local
-.env.test.local
-.env.production.local
-
-# vercel
-.vercel
\ No newline at end of file
diff --git a/.prettierrc.json b/.prettierrc.json
deleted file mode 100644
index b0646c18..00000000
--- a/.prettierrc.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "singleQuote": false,
- "trailingComma": "es5",
- "arrowParens": "always",
- "bracketSpacing": true,
- "tabWidth": 2,
- "useTabs": false
-}
diff --git a/.vscode b/.vscode
deleted file mode 100644
index 57810d34..00000000
--- a/.vscode
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "editor.formatOnPaste": true,
- "editor.formatOnSave": true,
- "editor.defaultFormatter": "esbenp.prettier-vscode",
- "editor.codeActionsOnSave": {
- "source.fixAll.eslint": true,
- "source.fixAll.format": true
- }
-}
diff --git a/LICENSE b/LICENSE
index 9cf10627..163dc218 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,19 +1,13 @@
-MIT License
+Copyright 2022 Rocicorp LLC
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+http://www.apache.org/licenses/LICENSE-2.0
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
\ No newline at end of file
diff --git a/README.md b/README.md
index 383c667f..ab7a1cfe 100644
--- a/README.md
+++ b/README.md
@@ -1,39 +1,55 @@
-# Repliear
+![Replicache logo](https://uploads-ssl.webflow.com/623a2f46e064937599256c2d/6269e72c61073c3d561a5015_Lockup%20v2.svg)
-A high-performance issue tracker in the style of [Linear](https://linear.app/).
+# repliear-row-versioning
-Built with [Replicache](https://replicache.dev), [Next.js](https://nextjs.org/),
-[Pusher](https://pusher.com/), and [Postgres](https://www.postgresql.org/).
+This is a demonstration of the [Row Version Strategy](https://doc.replicache.dev/strategies/row-version).
-Running at [repliear.herokuapp.com](https://repliear.herokuapp.com/).
+A high-performance issue tracker in the style of Linear.
-# Prerequisites
+Built with [Replicache](https://replicache.dev), [ViteJS](https://vitejs.dev/),
+and [Postgres](https://www.postgresql.org/).
-1. [Get a Replicache license key](https://doc.replicache.dev/licensing)
-2. Install PostgreSQL. On MacOS, we recommend using [Postgres.app](https://postgresapp.com/). For other OSes and options, see [Postgres Downloads](https://www.postgresql.org/download/).
-3. [Sign up for a free pusher.com account](https://pusher.com/) and create a new "channels" app.
-# To run locally
+## 1. Setup
-Get the Pusher environment variables from the ["App Keys" section](https://i.imgur.com/7DNmTKZ.png) of the Pusher App UI.
+#### Get your Replicache License Key
-**Note:** These instructions assume you installed PostgreSQL via Postgres.app on MacOS. If you installed some other way, or configured PostgreSQL specially, you may additionally need to set the `PGUSER` and `PGPASSWORD` environment variables.
+```bash
+$ npx replicache get-license
+```
+
+#### Set your `VITE_REPLICACHE_LICENSE_KEY` environment variable
+
+```bash
+$ export VITE_REPLICACHE_LICENSE_KEY=""
+```
+
+#### Install Postgres
+
+Install PostgreSQL. On MacOS, we recommend using [Postgres.app](https://postgresapp.com/). For other OSes and options, see [Postgres Downloads](https://www.postgresql.org/download/).
+
+Once installed, set your database url
+```bash
+$ export DATABASE_URL="postgresql://localhost/repliear"
```
-export PGDATABASE="repliear"
-export NEXT_PUBLIC_REPLICACHE_LICENSE_KEY=""
-export NEXT_PUBLIC_PUSHER_APP_ID=
-export NEXT_PUBLIC_PUSHER_KEY=
-export NEXT_PUBLIC_PUSHER_SECRET=
-export NEXT_PUBLIC_PUSHER_CLUSTER=
-
-# Create a new database for Repliear
-psql -d postgres -c 'create database repliear'
-
-npm install
-npm run dev
+
+and create a postrgres DB
+
+```bash
+$ psql -d postgres -c 'create database repliear'
```
-## Credits
+#### Install and Build
+
+```bash
+$ npm install; npm run build;
+```
+
+## 2. Start frontend and backend watcher
+
+```bash
+$ npm run watch --ws
+```
-We started this project by forking [linear_clone](https://github.com/tuan3w/linearapp_clone). This enabled us to get the visual styling right much faster than we otherwise could have.
+Provides an example integrating replicache with react in a simple todo application.
diff --git a/backend/comments-react.d.ts b/backend/comments-react.d.ts
deleted file mode 100644
index 43fb9936..00000000
--- a/backend/comments-react.d.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-declare module "*comments-react.js.gz" {
- const gitHubComments: {
- number: number;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- comment_id: string;
- body: string | null;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- updated_at: string;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- created_at: string;
-
- // eslint-disable-next-line @typescript-eslint/naming-convention
- creator_user_login: string;
- }[];
- export default gitHubComments;
-}
diff --git a/backend/data.test.ts b/backend/data.test.ts
deleted file mode 100644
index 6d7706cc..00000000
--- a/backend/data.test.ts
+++ /dev/null
@@ -1,390 +0,0 @@
-import { expect } from "chai";
-import { Issue, Priority, Status, Comment } from "../frontend/issue";
-import { setup, teardown, test } from "mocha";
-import type { JSONValue } from "replicache";
-import {
- createDatabase,
- delEntries,
- getEntry,
- getVersion,
- initSpace,
- putEntries,
- SampleData,
- BASE_SPACE_ID,
- getIssueEntries,
- getNonIssueEntriesInSyncOrder,
-} from "./data";
-import { transact, withExecutor } from "./pg";
-
-const i1: Issue = {
- priority: Priority.HIGH,
- id: "1",
- title: "Issue 1",
- status: Status.IN_PROGRESS,
- modified: 0,
- created: 0,
- creator: "testUser1",
- kanbanOrder: "1",
-};
-
-const comment1i1: Comment = {
- id: "1",
- issueID: "1",
- created: 0,
- body: "Comment 1",
- creator: "testUser1",
-};
-
-const comment2i1: Comment = {
- id: "2",
- issueID: "1",
- created: 0,
- body: "Comment 2",
- creator: "testUser2",
-};
-
-const i2: Issue = {
- priority: Priority.MEDIUM,
- id: "2",
- title: "Issue 2",
- status: Status.IN_PROGRESS,
- modified: 0,
- created: 0,
- creator: "testUser2",
- kanbanOrder: "2",
-};
-
-const comment1i2: Comment = {
- id: "1",
- issueID: "2",
- created: 0,
- body: "Comment 1",
- creator: "testUser1",
-};
-
-const i3: Issue = {
- priority: Priority.LOW,
- id: "3",
- title: "Issue 3",
- status: Status.TODO,
- modified: 0,
- created: 0,
- creator: "testUser3",
- kanbanOrder: "3",
-};
-
-export const testSampleData: SampleData = [
- {
- issue: i1,
- description: "Description 1",
- comments: [comment1i1, comment2i1],
- },
- { issue: i2, description: "Description 2", comments: [comment1i2] },
- { issue: i3, description: "Description 3", comments: [] },
-];
-
-function getTestSyncOrder(key: string) {
- return `${key}-testSyncOrder`;
-}
-
-setup(async () => {
- // TODO: This is a very expensive way to unit test :).
- // Is there an in-memory postgres or something?
- await transact((executor) => createDatabase(executor));
-});
-
-teardown(async () => {
- await withExecutor(async (executor) => {
- await executor(`delete from entry where spaceid = $1`, [BASE_SPACE_ID]);
- await executor(`delete from space where id = $1`, [BASE_SPACE_ID]);
- await executor(`delete from entry where spaceid like 'test-s-%'`);
- await executor(`delete from space where id like 'test-s-%'`);
- });
-});
-
-test("getEntry", async () => {
- type Case = {
- name: string;
- exists: boolean;
- deleted: boolean;
- validJSON: boolean;
- };
- const cases: Case[] = [
- {
- name: "does not exist",
- exists: false,
- deleted: false,
- validJSON: false,
- },
- {
- name: "exists, deleted",
- exists: true,
- deleted: true,
- validJSON: true,
- },
- {
- name: "exists, not deleted, invalid JSON",
- exists: true,
- deleted: false,
- validJSON: false,
- },
- {
- name: "exists, not deleted, valid JSON",
- exists: true,
- deleted: false,
- validJSON: true,
- },
- ];
-
- await withExecutor(async (executor) => {
- for (const c of cases) {
- await executor(
- `delete from entry where spaceid = 'test-s-s1' and key = 'foo'`
- );
- if (c.exists) {
- await executor(
- `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'foo', $1, '${getTestSyncOrder(
- "foo"
- )}',$2, 1, now())`,
- [c.validJSON ? JSON.stringify(42) : "not json", c.deleted]
- );
- }
-
- const promise = getEntry(executor, "test-s-s1", "foo");
- let result: JSONValue | undefined;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let error: any | undefined;
- await promise.then(
- (r) => (result = r),
- (e) => (error = String(e))
- );
- if (!c.exists) {
- expect(result, c.name).undefined;
- expect(error, c.name).undefined;
- } else if (c.deleted) {
- expect(result, c.name).undefined;
- expect(error, c.name).undefined;
- } else if (!c.validJSON) {
- expect(result, c.name).undefined;
- expect(error, c.name).contains("SyntaxError");
- } else {
- expect(result, c.name).eq(42);
- expect(error, c.name).undefined;
- }
- }
- });
-});
-
-test("getEntry RoundTrip types", async () => {
- await withExecutor(async (executor) => {
- await putEntries(
- executor,
- "test-s-s1",
- [
- ["boolean", true, getTestSyncOrder("boolean")],
- ["number", 42, getTestSyncOrder("number")],
- ["string", "foo", getTestSyncOrder("string")],
- ["array", [1, 2, 3], getTestSyncOrder("array")],
- ["object", { a: 1, b: 2 }, getTestSyncOrder("object")],
- ],
- 1
- );
- expect(await getEntry(executor, "test-s-s1", "boolean")).eq(true);
- expect(await getEntry(executor, "test-s-s1", "number")).eq(42);
- expect(await getEntry(executor, "test-s-s1", "string")).eq("foo");
- expect(await getEntry(executor, "test-s-s1", "array")).deep.equal([
- 1,
- 2,
- 3,
- ]);
- expect(await getEntry(executor, "test-s-s1", "object")).deep.equal({
- a: 1,
- b: 2,
- });
- });
-});
-
-test("putEntries", async () => {
- type Case = {
- name: string;
- duplicate: boolean;
- deleted: boolean;
- };
-
- const cases: Case[] = [
- {
- name: "no duplicate",
- duplicate: false,
- deleted: false,
- },
- {
- name: "duplicate",
- duplicate: true,
- deleted: false,
- },
- {
- name: "deleted",
- duplicate: true,
- deleted: true,
- },
- ];
-
- await withExecutor(async (executor) => {
- for (const c of cases) {
- await executor(
- `delete from entry where spaceid = 'test-s-s1' and key = 'bar'`
- );
- await executor(
- `delete from entry where spaceid = 'test-s-s1' and key = 'foo'`
- );
-
- if (c.duplicate) {
- await putEntries(
- executor,
- "test-s-s1",
- [["foo", 41, getTestSyncOrder("foo")]],
- 1
- );
- if (c.deleted) {
- await delEntries(executor, "test-s-s1", ["foo"], 1);
- }
- }
- const res: Promise = putEntries(
- executor,
- "test-s-s1",
- [
- ["bar", 100, getTestSyncOrder("bar")],
- ["foo", 42, getTestSyncOrder("foo")],
- ],
- 2
- );
- await res.catch(() => ({}));
-
- const qr = await executor(
- `select spaceid, key, value, deleted, version
- from entry where spaceid = 'test-s-s1' and key in ('bar', 'foo') order by key`
- );
- const [barRow, fooRow] = qr.rows;
-
- expect(fooRow, c.name).not.undefined;
- {
- const { spaceid, key, value, deleted, version } = fooRow;
- expect(spaceid, c.name).eq("test-s-s1");
- expect(key, c.name).eq("foo");
- expect(value, c.name).eq("42");
- expect(deleted, c.name).false;
- expect(version, c.name).eq(2);
- }
- {
- const { spaceid, key, value, deleted, version } = barRow;
- expect(spaceid, c.name).eq("test-s-s1");
- expect(key, c.name).eq("bar");
- expect(value, c.name).eq("100");
- expect(deleted, c.name).false;
- expect(version, c.name).eq(2);
- }
- }
- });
-});
-
-test("delEntries", async () => {
- type Case = {
- name: string;
- exists: boolean;
- };
- const cases: Case[] = [
- {
- name: "does not exist",
- exists: false,
- },
- {
- name: "exists",
- exists: true,
- },
- ];
- for (const c of cases) {
- await withExecutor(async (executor) => {
- await executor(
- `delete from entry where spaceid = 'test-s-s1' and key = 'bar'`
- );
- await executor(
- `delete from entry where spaceid = 'test-s-s1' and key = 'foo'`
- );
- await executor(
- `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'bar', '100', '${getTestSyncOrder(
- "bar"
- )}', false, 1, now())`
- );
- if (c.exists) {
- await executor(
- `insert into entry (spaceid, key, value, syncorder, deleted, version, lastmodified) values ('test-s-s1', 'foo', '42', '${getTestSyncOrder(
- "foo"
- )}',false, 1, now())`
- );
- }
-
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- let error: any | undefined;
- await delEntries(executor, "test-s-s1", ["bar", "foo"], 2).catch(
- (e) => (error = String(e))
- );
-
- const qr = await executor(
- `
- select spaceid, key, value, deleted, version from entry
- where spaceid = 'test-s-s1' and key in ('bar', 'foo') order by key
- `
- );
- const [barRow, fooRow] = qr.rows;
-
- expect(barRow, c.name).not.undefined;
- const { spaceid, key, value, deleted, version } = barRow;
- expect(spaceid, c.name).eq("test-s-s1");
- expect(key, c.name).eq("bar");
- expect(value, c.name).eq("100");
- expect(deleted, c.name).true;
- expect(version, c.name).eq(2);
- if (c.exists) {
- expect(fooRow, c.name).not.undefined;
- const { spaceid, key, value, deleted, version } = fooRow;
- expect(spaceid, c.name).eq("test-s-s1");
- expect(key, c.name).eq("foo");
- expect(value, c.name).eq("42");
- expect(deleted, c.name).true;
- expect(version, c.name).eq(2);
- } else {
- expect(fooRow, c.name).undefined;
- expect(error, c.name).undefined;
- }
- });
- }
-});
-
-test("initSpace", async () => {
- await withExecutor(async (executor) => {
- await executor(`delete from entry where spaceid = $1`, [BASE_SPACE_ID]);
- await executor(`delete from space where id = $1`, [BASE_SPACE_ID]);
- const testSpaceID1 = await initSpace(executor, () =>
- Promise.resolve(testSampleData)
- );
- expect(await getVersion(executor, testSpaceID1)).eq(1);
- // 3 issues
- expect((await getIssueEntries(executor, testSpaceID1)).length).eq(3);
- // 3 descriptions, and 3 comments
- expect(
- (await getNonIssueEntriesInSyncOrder(executor, testSpaceID1, "", 10))
- .entries.length
- ).eq(6);
- const testSpaceID2 = await initSpace(executor, () => {
- throw new Error("unexpected call to getSampleIssues on subsequent calls");
- });
- expect(await getVersion(executor, testSpaceID2)).eq(1);
- // 3 issues
- expect((await getIssueEntries(executor, testSpaceID2)).length).eq(3);
- // 3 descriptions, and 3 comments
- expect(
- (await getNonIssueEntriesInSyncOrder(executor, testSpaceID2, "", 10))
- .entries.length
- ).eq(6);
- });
-});
diff --git a/backend/data.ts b/backend/data.ts
deleted file mode 100644
index 0733537a..00000000
--- a/backend/data.ts
+++ /dev/null
@@ -1,457 +0,0 @@
-import type { JSONValue } from "replicache";
-import { z } from "zod";
-import type { Executor } from "./pg";
-import { ReplicacheTransaction } from "./replicache-transaction";
-import type { Issue, Comment, Description } from "../frontend/issue";
-import { mutators } from "../frontend/mutators";
-import { flatten } from "lodash";
-import { getSyncOrder } from "./sync-order";
-import { nanoid } from "nanoid";
-
-export type SampleData = {
- issue: Issue;
- description: Description;
- comments: Comment[];
-}[];
-
-export async function createDatabase(executor: Executor) {
- const schemaVersion = await getSchemaVersion(executor);
- if (schemaVersion < 0 || schemaVersion > 2) {
- throw new Error("Unexpected schema version: " + schemaVersion);
- }
-
- if (schemaVersion === 2) {
- console.log("schemaVersion is 2 - nothing to do");
- return;
- }
-
- console.log("creating schema");
- await executor("drop schema if exists public cascade");
- await executor("create schema public");
- await executor("grant all on schema public to postgres");
- await executor("grant all on schema public to public");
- await createSchema(executor);
-}
-
-async function getSchemaVersion(executor: Executor) {
- const metaExists = await executor(`select exists(
- select from pg_tables where schemaname = 'public' and tablename = 'meta')`);
- if (!metaExists.rows[0].exists) {
- return 0;
- }
-
- const qr = await executor(
- `select value from meta where key = 'schemaVersion'`
- );
- return qr.rows[0].value;
-}
-
-// nanoid's don't include $, so cannot collide with other space ids.
-export const BASE_SPACE_ID = "$base-space-id";
-
-export async function createSchema(executor: Executor) {
- await executor(`create table meta (key text primary key, value json)`);
- await executor(`insert into meta (key, value) values ('schemaVersion', '2')`);
-
- await executor(`create table space (
- id text primary key not null,
- version integer not null,
- lastmodified timestamp(6) not null
- )`);
-
- // lastpullid is null until the client has pulled for the first time.
- await executor(`create table clientgroup (
- id text primary key not null,
- lastpullid integer null
- )`);
-
- await executor(`create table client (
- id text primary key not null,
- lastmutationid integer not null,
- version integer not null,
- clientgroupid text not null,
- lastmodified timestamp(6) not null
- )`);
-
- await executor(`create table entry (
- spaceid text not null,
- key text not null,
- value text not null,
- syncorder text not null,
- deleted boolean not null,
- version integer not null,
- lastmodified timestamp(6) not null
- )`);
-
- await executor(
- `create unique index idx_entry_spaceid_key on entry (spaceid, key)`
- );
- await executor(
- `create index idx_entry_spaceid_syncorder on entry (spaceid, syncorder)`
- );
- await executor(`create index
- on entry (spaceid, deleted)
- include (key, value, deleted)
- where key like 'issue/%'`);
- await executor(`create index on entry (spaceid)`);
- await executor(`create index on entry (deleted)`);
- await executor(`create index on entry (version)`);
- await executor(`create index on client (clientgroupid, version)`);
-}
-
-const INITIAL_SPACE_VERSION = 1;
-const INITIAL_SPACE_MUTATION_ID = 0;
-export async function initSpace(
- executor: Executor,
- getSampleData: () => Promise
-): Promise {
- const {
- rows: baseSpaceRows,
- } = await executor(`select version from space where id = $1`, [
- BASE_SPACE_ID,
- ]);
-
- if (baseSpaceRows.length === 0) {
- console.log("Initializing base space", BASE_SPACE_ID);
- await insertSpace(executor, BASE_SPACE_ID, INITIAL_SPACE_VERSION);
- const start = Date.now();
- // We have to batch insertions to work around postgres command size limits
- const sampleData = await getSampleData();
- const sampleDataBatchs: SampleData[] = [];
- for (let i = 0; i < sampleData.length; i++) {
- if (i % 1000 === 0) {
- sampleDataBatchs.push([]);
- }
- sampleDataBatchs[sampleDataBatchs.length - 1].push(sampleData[i]);
- }
- for (const sampleDataBatch of sampleDataBatchs) {
- const tx = new ReplicacheTransaction(
- executor,
- BASE_SPACE_ID,
- "fake-client-id-for-server-init",
- INITIAL_SPACE_MUTATION_ID,
- INITIAL_SPACE_VERSION,
- getSyncOrder
- );
- for (const { issue, description, comments } of sampleDataBatch) {
- await mutators.putIssue(tx, { issue, description });
- for (const comment of comments) {
- await mutators.putIssueComment(tx, comment, false);
- }
- }
- await tx.flush();
- }
- console.log("Initing base space took " + (Date.now() - start) + "ms");
- }
- const spaceID = nanoid(10);
- await insertSpace(executor, spaceID, INITIAL_SPACE_VERSION);
- return spaceID;
-}
-
-async function insertSpace(
- executor: Executor,
- spaceID: string,
- version: number
-) {
- await executor(
- `insert into space (id, version, lastmodified) values ($1, $2, now())`,
- [spaceID, version]
- );
-}
-
-export async function getEntry(
- executor: Executor,
- spaceID: string,
- key: string
-): Promise {
- const { rows } = await executor(
- `
- with overlayentry as (
- select key, value, deleted from entry where spaceid = $1 and key = $3
- ), baseentry as (
- select key, value from entry where spaceid = $2 and key = $3
- )
- select coalesce(overlayentry.key, baseentry.key),
- coalesce(overlayentry.value, baseentry.value) as value,
- overlayentry.deleted as deleted
- from overlayentry full join baseentry on overlayentry.key = baseentry.key
- `,
- [spaceID, BASE_SPACE_ID, key]
- );
- const value = rows[0]?.value;
- if (value === undefined || rows[0]?.deleted) {
- return undefined;
- }
- return JSON.parse(value);
-}
-
-export async function putEntries(
- executor: Executor,
- spaceID: string,
- entries: [key: string, value: JSONValue, syncOrder: string][],
- version: number
-): Promise {
- if (entries.length === 0) {
- return;
- }
- const valuesSql = Array.from(
- { length: entries.length },
- (_, i) =>
- `($1, $${i * 3 + 3}, $${i * 3 + 4}, $${i * 3 + 5}, false, $2, now())`
- ).join();
-
- await executor(
- `
- insert into entry (
- spaceid, key, value, syncOrder, deleted, version, lastmodified
- ) values ${valuesSql}
- on conflict (spaceid, key) do
- update set value = excluded.value, syncorder = excluded.syncorder,
- deleted = false, version = excluded.version, lastmodified = now()
- `,
- [
- spaceID,
- version,
- ...flatten(
- entries.map(([key, value, syncOrder]) => [
- key,
- JSON.stringify(value),
- syncOrder,
- ])
- ),
- ]
- );
-}
-
-export async function delEntries(
- executor: Executor,
- spaceID: string,
- keys: string[],
- version: number
-): Promise {
- if (keys.length === 0) {
- return;
- }
- const keyParamsSQL = keys.map((_, i) => `$${i + 3}`).join(",");
- await executor(
- `
- update entry set deleted = true, version = $2
- where spaceid = $1 and key in(${keyParamsSQL})
- `,
- [spaceID, version, ...keys]
- );
-}
-
-export async function getIssueEntries(
- executor: Executor,
- spaceID: string
-): Promise<[key: string, value: string][]> {
- const { rows } = await executor(
- `
- with overlayentry as (
- select key, value, deleted from entry
- where spaceid = $1 and key like 'issue/%'
- ), baseentry as (
- select key, value from entry where spaceid = $2 and key like 'issue/%'
- )
- select coalesce(overlayentry.key, baseentry.key) as key,
- coalesce(overlayentry.value, baseentry.value) as value,
- overlayentry.deleted as deleted
- from overlayentry full join baseentry on overlayentry.key = baseentry.key
- `,
- [spaceID, BASE_SPACE_ID]
- );
- const startFilter = Date.now();
- const filtered: [key: string, value: string][] = rows
- .filter((row) => !row.deleted)
- .map((row) => [row.key, row.value]);
-
- console.log("getIssueEntries filter took " + (Date.now() - startFilter));
- return filtered;
-}
-
-export async function getNonIssueEntriesInSyncOrder(
- executor: Executor,
- spaceID: string,
- startSyncOrderExclusive: string,
- limit: number
-): Promise<{
- entries: [key: string, value: string][];
- endSyncOrder: string | undefined;
-}> {
- // All though it complicates the query, we do the deleted filtering
- // in the query so that we can correctly limit the results.
- const { rows } = await executor(
- `
- with overlayentry as (
- select key, value, syncorder, deleted from entry
- where spaceid = $1 and key not like 'issue/%' and syncorder > $3
- order by syncorder limit $4
- ), baseentry as (
- select key, value, syncorder from entry
- where spaceid = $2 and key not like 'issue/%' and syncorder > $3
- order by syncorder limit $4
- )
- select key, value, syncorder from (
- select coalesce(overlayentry.key, baseentry.key) as key,
- coalesce(overlayentry.value, baseentry.value) as value,
- coalesce(overlayentry.syncorder, baseentry.syncorder) as syncorder,
- overlayentry.deleted as deleted
- from overlayentry full join baseentry on overlayentry.key = baseentry.key
- ) as merged where deleted = false or deleted is null
- order by syncorder
- limit $4
- `,
- [spaceID, BASE_SPACE_ID, startSyncOrderExclusive, limit]
- );
- return {
- entries: rows.map((row) => [row.key, row.value]),
- endSyncOrder: rows[rows.length - 1]?.syncorder,
- };
-}
-
-export async function getChangedEntries(
- executor: Executor,
- spaceID: string,
- prevVersion: number
-): Promise<[key: string, value: string, deleted: boolean][]> {
- // changes are only in the onverlay space, so we do not need to
- // query the base space.
- const {
- rows,
- } = await executor(
- `select key, value, deleted from entry where spaceid = $1 and version > $2`,
- [spaceID, prevVersion]
- );
- return rows.map((row) => [row.key, row.value, row.deleted]);
-}
-
-export async function getVersion(
- executor: Executor,
- spaceID: string
-): Promise {
- const { rows } = await executor(`select version from space where id = $1`, [
- spaceID,
- ]);
- const value = rows[0]?.version;
- if (value === undefined) {
- return undefined;
- }
- return z.number().parse(value);
-}
-
-export async function setVersion(
- executor: Executor,
- spaceID: string,
- version: number
-): Promise {
- await executor(
- `update space set version = $2, lastmodified = now() where id = $1`,
- [spaceID, version]
- );
-}
-
-export async function getLastMutationID(
- executor: Executor,
- clientID: string
-): Promise {
- const {
- rows,
- } = await executor(`select lastmutationid from client where id = $1`, [
- clientID,
- ]);
- const value = rows[0]?.lastmutationid;
- if (value === undefined) {
- return undefined;
- }
- return z.number().parse(value);
-}
-
-export async function getLastMutationIDs(
- executor: Executor,
- clientIDs: string[]
-) {
- return Object.fromEntries(
- await Promise.all(
- clientIDs.map(async (cid) => {
- const lmid = await getLastMutationID(executor, cid);
- return [cid, lmid ?? 0] as const;
- })
- )
- );
-}
-
-export async function getLastMutationIDsSince(
- executor: Executor,
- clientGroupID: string,
- sinceVersion: number
-) {
- const {
- rows,
- } = await executor(
- `select id, clientgroupid, lastmutationid from client where clientgroupid = $1 and version > $2`,
- [clientGroupID, sinceVersion]
- );
- return Object.fromEntries(
- rows.map((r) => [r.id as string, r.lastmutationid as number] as const)
- );
-}
-
-export async function incrementPullID(
- executor: Executor,
- clientGroupID: string
-) {
- const {
- rows,
- } = await executor(`select lastpullid from clientgroup where id = $1`, [
- clientGroupID,
- ]);
- if (rows.length === 0) {
- await executor(`insert into clientgroup (id, lastpullid) values ($1, 1)`, [
- clientGroupID,
- ]);
- return 1;
- }
- const [prev] = rows;
- const { lastpullid } = prev;
- const nextPullID = lastpullid + 1;
- await executor(`update clientgroup set lastpullid = $1`, [nextPullID]);
- return nextPullID;
-}
-
-export async function setLastMutationID(
- executor: Executor,
- clientID: string,
- clientGroupID: string,
- lastMutationID: number,
- version: number
-): Promise {
- await executor(
- `
- insert into clientgroup (id, lastpullid) values ($1, null)
- on conflict (id) do nothing
- `,
- [clientGroupID]
- );
- await executor(
- `
- insert into client (id, clientgroupid, lastmutationid, version, lastmodified)
- values ($1, $2, $3, $4, now())
- on conflict (id) do update set lastmutationid = $3, version = $4, lastmodified = now()
- `,
- [clientID, clientGroupID, lastMutationID, version]
- );
-}
-
-export async function setLastMutationIDs(
- executor: Executor,
- clientGroupID: string,
- lmids: Record,
- version: number
-) {
- return await Promise.all(
- [...Object.entries(lmids)].map(([clientID, lmid]) =>
- setLastMutationID(executor, clientID, clientGroupID, lmid, version)
- )
- );
-}
diff --git a/backend/issues-react.d.ts b/backend/issues-react.d.ts
deleted file mode 100644
index 08853228..00000000
--- a/backend/issues-react.d.ts
+++ /dev/null
@@ -1,15 +0,0 @@
-declare module "*issues-react.js.gz" {
- const gitHubIssues: {
- number: number;
- title: string;
- body: string | null;
- state: "open" | "closed";
- // eslint-disable-next-line @typescript-eslint/naming-convention
- updated_at: string;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- created_at: string;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- creator_user_login: string;
- }[];
- export default gitHubIssues;
-}
diff --git a/backend/issues-react.js.gz b/backend/issues-react.js.gz
deleted file mode 100644
index 190aff55..00000000
Binary files a/backend/issues-react.js.gz and /dev/null differ
diff --git a/backend/pg.ts b/backend/pg.ts
deleted file mode 100644
index 999a7252..00000000
--- a/backend/pg.ts
+++ /dev/null
@@ -1,117 +0,0 @@
-// Low-level config and utilities for Postgres.
-
-import { Pool, QueryResult } from "pg";
-
-const pool = new Pool(
- process.env.DATABASE_URL
- ? {
- connectionString: process.env.DATABASE_URL,
- ssl:
- process.env.NODE_ENV === "production"
- ? {
- rejectUnauthorized: false,
- }
- : undefined,
- }
- : undefined
-);
-
-// the pool will emit an error on behalf of any idle clients
-// it contains if a backend error or network partition happens
-pool.on("error", (err) => {
- console.error("Unexpected error on idle client", err);
- process.exit(-1);
-});
-
-pool.on("connect", (client) => {
- // eslint-disable-next-line @typescript-eslint/no-floating-promises
- client.query(
- "SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL SERIALIZABLE"
- );
-});
-
-export async function withExecutor(
- f: (executor: Executor) => R
-): Promise {
- const startConnect = Date.now();
- const client = await pool.connect();
- console.log("pool.connect took", Date.now() - startConnect);
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const executor = async (sql: string, params?: any[]) => {
- try {
- const start = Date.now();
- const result = await client.query(sql, params);
- console.log(
- "Db query took " +
- (Date.now() - start) +
- "ms. SQL: " +
- sql.substring(0, Math.min(sql.length, 150))
- );
- return result;
- } catch (e) {
- throw new Error(
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- `Error executing SQL: ${sql}: ${((e as unknown) as any).toString()}`
- );
- }
- };
-
- try {
- return await f(executor);
- } finally {
- const startRelease = Date.now();
- client.release();
- console.log("client.release took", Date.now() - startRelease);
- }
-}
-
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-export type Executor = (sql: string, params?: any[]) => Promise;
-export type TransactionBodyFn = (executor: Executor) => Promise;
-
-/**
- * Invokes a supplied function within an RDS transaction.
- * @param body Function to invoke. If this throws, the transaction will be rolled
- * back. The thrown error will be re-thrown.
- */
-export async function transact(body: TransactionBodyFn) {
- return await withExecutor(async (executor) => {
- return await transactWithExecutor(executor, body);
- });
-}
-
-async function transactWithExecutor(
- executor: Executor,
- body: TransactionBodyFn
-) {
- for (let i = 0; i < 10; i++) {
- try {
- await executor("begin");
- try {
- const r = await body(executor);
- await executor("commit");
- return r;
- } catch (e) {
- console.log("caught error", e, "rolling back");
- await executor("rollback");
- throw e;
- }
- } catch (e) {
- if (shouldRetryTransaction(e)) {
- console.log(
- `Retrying transaction due to error ${e} - attempt number ${i}`
- );
- continue;
- }
- throw e;
- }
- }
- throw new Error("Tried to execute transacation too many times. Giving up.");
-}
-
-//stackoverflow.com/questions/60339223/node-js-transaction-coflicts-in-postgresql-optimistic-concurrency-control-and
-function shouldRetryTransaction(err: unknown) {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const code = typeof err === "object" ? String((err as any).code) : null;
- return code === "40001" || code === "40P01";
-}
diff --git a/backend/replicache-transaction.test.ts b/backend/replicache-transaction.test.ts
deleted file mode 100644
index 792afc9a..00000000
--- a/backend/replicache-transaction.test.ts
+++ /dev/null
@@ -1,104 +0,0 @@
-import { ReplicacheTransaction } from "./replicache-transaction";
-import { expect } from "chai";
-import { test, teardown, setup } from "mocha";
-import { transact, withExecutor } from "./pg";
-import { createDatabase, getEntry } from "./data";
-import type { JSONValue, ReadTransaction } from "replicache";
-
-setup(async () => {
- await transact((executor) => createDatabase(executor));
-});
-
-teardown(async () => {
- await withExecutor(async (executor) => {
- await executor(`delete from entry where spaceid like 'test-s-%'`);
- await executor(`delete from space where id like 'test-s-%'`);
- });
-});
-
-async function getTestSyncOrder(
- _: ReadTransaction,
- entry: [key: string, _: JSONValue]
-) {
- return entry[0];
-}
-
-test("ReplicacheTransaction", async () => {
- await withExecutor(async (executor) => {
- const t1 = new ReplicacheTransaction(
- executor,
- "test-s-s1",
- "c1",
- 1,
- getTestSyncOrder
- );
-
- expect(t1.clientID).equal("c1");
- expect(await t1.has("foo")).false;
- expect(await t1.get("foo")).undefined;
-
- await t1.put("foo", "bar");
- expect(await t1.has("foo")).true;
- expect(await t1.get("foo")).equal("bar");
-
- await t1.flush();
-
- expect(await getEntry(executor, "test-s-s1", "foo")).equal("bar");
-
- const t2 = new ReplicacheTransaction(
- executor,
- "test-s-s1",
- "c1",
- 2,
- getTestSyncOrder
- );
- await t2.del("foo");
- await t2.flush();
-
- expect(await getEntry(executor, "test-s-s1", "foo")).equal(undefined);
- const qr = await executor(
- `select value, deleted, version
- from entry where spaceid = 'test-s-s1' and key = 'foo'`
- );
- const [row] = qr.rows;
- expect(row).deep.equal({
- value: `"bar"`,
- deleted: true,
- version: 2,
- });
- });
-});
-
-test("ReplicacheTransaction overlap", async () => {
- await withExecutor(async (executor) => {
- const t1 = new ReplicacheTransaction(
- executor,
- "test-s-s1",
- "c1",
- 1,
- getTestSyncOrder
- );
- await t1.put("foo", "bar");
-
- const t2 = new ReplicacheTransaction(
- executor,
- "test-s-s1",
- "c1",
- 1,
- getTestSyncOrder
- );
- expect(await t2.has("foo")).false;
-
- await t1.flush();
- expect(await t2.has("foo")).false;
-
- const t3 = new ReplicacheTransaction(
- executor,
- "test-s-s1",
- "c1",
- 1,
- getTestSyncOrder
- );
- expect(await t3.has("foo")).true;
- });
-});
diff --git a/backend/replicache-transaction.ts b/backend/replicache-transaction.ts
deleted file mode 100644
index a3c1e32b..00000000
--- a/backend/replicache-transaction.ts
+++ /dev/null
@@ -1,143 +0,0 @@
-import type {
- DeepReadonly,
- IndexKey,
- JSONValue,
- ReadTransaction,
- ReadonlyJSONValue,
- ScanIndexOptions,
- ScanNoIndexOptions,
- ScanOptions,
- ScanResult,
- TransactionEnvironment,
- TransactionReason,
- WriteTransaction,
-} from "replicache";
-import { delEntries, getEntry, putEntries } from "./data";
-import type { Executor } from "./pg";
-
-export type SyncOrderFn = (
- tx: ReadTransaction,
- entry: [key: string, value: JSONValue]
-) => Promise;
-
-/**
- * Implements Replicache's WriteTransaction interface in terms of a Postgres
- * transaction.
- */
-export class ReplicacheTransaction implements WriteTransaction {
- private readonly _spaceID: string;
- private readonly _clientID: string;
- private readonly _version: number;
- private readonly _mutationID: number;
- private readonly _executor: Executor;
- private readonly _getSyncOrder: SyncOrderFn;
- private readonly _cache: Map<
- string,
- { value: JSONValue | undefined; dirty: boolean }
- > = new Map();
-
- constructor(
- executor: Executor,
- spaceID: string,
- clientID: string,
- version: number,
- mutationId: number,
- getSyncOrder: SyncOrderFn
- ) {
- this._spaceID = spaceID;
- this._clientID = clientID;
- this._version = version;
- this._mutationID = mutationId;
- this._executor = executor;
- this._getSyncOrder = getSyncOrder;
- }
-
- get reason(): TransactionReason {
- return "authoritative";
- }
-
- get environment(): TransactionEnvironment {
- return "server";
- }
-
- get location() {
- return this.environment;
- }
-
- get mutationID(): number {
- return this._mutationID;
- }
-
- get clientID(): string {
- return this._clientID;
- }
-
- async put(key: string, value: JSONValue): Promise {
- await this.set(key, value);
- }
- async set(key: string, value: JSONValue): Promise {
- this._cache.set(key, { value, dirty: true });
- }
- async del(key: string): Promise {
- const had = await this.has(key);
- this._cache.set(key, { value: undefined, dirty: true });
- return had;
- }
- async get(key: string): Promise {
- const entry = this._cache.get(key);
- if (entry) {
- return entry.value;
- }
- const value = await getEntry(this._executor, this._spaceID, key);
- this._cache.set(key, { value, dirty: false });
- return value;
- }
- async has(key: string): Promise {
- const val = await this.get(key);
- return val !== undefined;
- }
-
- // TODO!
- async isEmpty(): Promise {
- throw new Error("Method isEmpty not implemented");
- }
-
- scan(options: ScanIndexOptions): ScanResult;
- scan(options?: ScanNoIndexOptions): ScanResult;
- scan(options?: ScanOptions): ScanResult;
- scan(
- options: ScanIndexOptions
- ): ScanResult>;
- scan(
- options?: ScanNoIndexOptions
- ): ScanResult>;
- scan(
- options?: ScanOptions
- ): ScanResult>;
- scan(): ScanResult {
- throw new Error("Method scan not implemented.");
- }
-
- async flush(): Promise {
- const dirtyEntries = [...this._cache.entries()].filter(
- ([, { dirty }]) => dirty
- );
- const entriesToPut: [string, JSONValue, string][] = [];
- for (const dirtyEntry of dirtyEntries) {
- if (dirtyEntry[1].value !== undefined) {
- entriesToPut.push([
- dirtyEntry[0],
- dirtyEntry[1].value,
- await this._getSyncOrder(this, [dirtyEntry[0], dirtyEntry[1].value]),
- ]);
- }
- }
- const keysToDel = dirtyEntries
- .filter(([, { value }]) => value === undefined)
- .map(([key]) => key);
- await Promise.all([
- delEntries(this._executor, this._spaceID, keysToDel, this._version),
- putEntries(this._executor, this._spaceID, entriesToPut, this._version),
- ]);
- }
-}
diff --git a/backend/sample-issues.ts b/backend/sample-issues.ts
deleted file mode 100644
index 41c61832..00000000
--- a/backend/sample-issues.ts
+++ /dev/null
@@ -1,136 +0,0 @@
-import { Priority, Status } from "../frontend/issue";
-import type { SampleData } from "./data";
-import { generateNKeysBetween } from "fractional-indexing";
-import { sortBy } from "lodash";
-
-export async function getReactSampleData(): Promise {
- const issuesDefault = (await import("./issues-react.js.gz")).default;
- const sortedIssues = sortBy(
- issuesDefault,
- (reactIssue) =>
- Number.MAX_SAFE_INTEGER -
- Date.parse(reactIssue.updated_at) +
- "-" +
- reactIssue.number
- );
-
- const issuesCount = issuesDefault.length;
- const kanbanOrderKeys = generateNKeysBetween(null, null, issuesCount);
- const issues: SampleData = sortedIssues.map((reactIssue, idx) => ({
- issue: {
- id: reactIssue.number.toString(),
- title: reactIssue.title,
- priority: getPriority(reactIssue),
- status: getStatus(reactIssue),
- modified: Date.parse(reactIssue.updated_at),
- created: Date.parse(reactIssue.created_at),
- creator: reactIssue.creator_user_login,
- kanbanOrder: kanbanOrderKeys[idx],
- },
- description: reactIssue.body || "",
- comments: [],
- }));
-
- const comments = (await import("./comments-react.js.gz")).default.map(
- (reactComment) => ({
- id: reactComment.comment_id,
- issueID: reactComment.number.toString(),
- created: Date.parse(reactComment.created_at),
- body: reactComment.body || "",
- creator: reactComment.creator_user_login,
- })
- );
- for (const comment of comments) {
- const issue = issues.find((issue) => issue.issue.id === comment.issueID);
- if (issue) {
- issue.comments.push(comment);
- }
- }
- issues;
-
- // Can use this to generate artifically larger datasets for stress testing.
- const multiplied: SampleData = [];
- for (let i = 0; i < 1; i++) {
- multiplied.push(
- ...issues.map((issue) => ({
- ...issue,
- issue: {
- ...issue.issue,
- id: issue.issue.id + "-" + i,
- },
- comments: issue.comments.map((comment) => ({
- ...comment,
- issueID: comment.issueID + "-" + i,
- })),
- }))
- );
- }
-
- return multiplied;
-}
-
-function getStatus({
- number,
- created_at,
-}: {
- number: number;
- state: "open" | "closed";
- // eslint-disable-next-line @typescript-eslint/naming-convention
- created_at: string;
-}): Status {
- const stableRandom = number + Date.parse(created_at);
- // 90% closed, 10% open
- if (stableRandom % 10 < 8) {
- // 2/3's done, 1/3 cancelled
- switch (stableRandom % 3) {
- case 0:
- case 1:
- return Status.DONE;
- case 2:
- return Status.CANCELED;
- }
- }
- switch (stableRandom % 6) {
- // 2/6 backlog, 3/6 todo, 1/6 in progress
- case 0:
- case 1:
- return Status.BACKLOG;
- case 2:
- case 3:
- case 4:
- return Status.TODO;
- case 5:
- return Status.IN_PROGRESS;
- }
- return Status.TODO;
-}
-
-function getPriority({
- number,
- created_at,
-}: {
- number: number;
- // eslint-disable-next-line @typescript-eslint/naming-convention
- created_at: string;
-}): Priority {
- const stableRandom = number + Date.parse(created_at);
- // bell curve priorities
- switch (stableRandom % 10) {
- case 0:
- return Priority.NONE;
- case 1:
- case 2:
- return Priority.LOW;
- case 3:
- case 4:
- case 5:
- case 6:
- return Priority.MEDIUM;
- case 7:
- case 8:
- return Priority.HIGH;
- case 9:
- return Priority.URGENT;
- }
- return Priority.NONE;
-}
diff --git a/backend/sync-order.ts b/backend/sync-order.ts
deleted file mode 100644
index 23ca8124..00000000
--- a/backend/sync-order.ts
+++ /dev/null
@@ -1,38 +0,0 @@
-import {
- commentSchema,
- COMMENT_KEY_PREFIX,
- DESCRIPTION_KEY_PREFIX,
- getDescriptionIssueId,
- getIssue,
- issueSchema,
- ISSUE_KEY_PREFIX,
- reverseTimestampSortKey,
-} from "../frontend/issue";
-import type { JSONValue, ReadTransaction } from "replicache";
-import { assertNotUndefined } from "../util/asserts";
-
-export async function getSyncOrder(
- tx: ReadTransaction,
- entry: [key: string, value: JSONValue]
-): Promise {
- // The default view is a list of issues in reverse modified order, so it is
- // preferable to sync entries in reverse modified order of their
- // corresponding issue, so that if a user clicks on an issue near the top
- // of the default initial list view the entries needed for displaying the
- // detail view is available as soon as possible.
- const [key, value] = entry;
- let issue;
- if (key.startsWith(ISSUE_KEY_PREFIX)) {
- // Note as an optimization we return all of the issue entries in the
- // first pull response regardless of sync order, but we still need
- // to assign them a sync order despite it being unused.
- issue = issueSchema.parse(value);
- } else if (key.startsWith(COMMENT_KEY_PREFIX)) {
- const comment = commentSchema.parse(value);
- issue = await getIssue(tx, comment.issueID);
- } else if (key.startsWith(DESCRIPTION_KEY_PREFIX)) {
- issue = await getIssue(tx, getDescriptionIssueId(key));
- }
- assertNotUndefined(issue);
- return reverseTimestampSortKey(issue.modified, issue.id) + "-" + key;
-}
diff --git a/client/.env.example b/client/.env.example
new file mode 100644
index 00000000..23e6b829
--- /dev/null
+++ b/client/.env.example
@@ -0,0 +1 @@
+VITE_REPLICACHE_LICENSE_KEY=
diff --git a/client/.eslintignore b/client/.eslintignore
new file mode 100644
index 00000000..7758a6ad
--- /dev/null
+++ b/client/.eslintignore
@@ -0,0 +1,9 @@
+node_modules
+out
+tool
+bin
+.eslintrc.cjs
+dist
+lib
+env.d.ts
+vite.config.ts
\ No newline at end of file
diff --git a/client/.npmignore b/client/.npmignore
new file mode 100644
index 00000000..7ceb59f8
--- /dev/null
+++ b/client/.npmignore
@@ -0,0 +1,25 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
+node_modules
+dist
+dist-ssr
+*.local
+
+# Editor directories and files
+.vscode/*
+!.vscode/extensions.json
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
+.env
diff --git a/client/.prettierignore b/client/.prettierignore
new file mode 100644
index 00000000..f596c21d
--- /dev/null
+++ b/client/.prettierignore
@@ -0,0 +1,4 @@
+node_modules
+dist
+lib
+*.log
\ No newline at end of file
diff --git a/client/env.d.ts b/client/env.d.ts
new file mode 100644
index 00000000..6523c5e0
--- /dev/null
+++ b/client/env.d.ts
@@ -0,0 +1,11 @@
+///
+///
+
+interface ImportMetaEnv {
+ readonly VITE_REPLICACHE_LICENSE_KEY: string;
+ // more env variables...
+}
+
+interface ImportMeta {
+ readonly env: ImportMetaEnv;
+}
diff --git a/client/index.html b/client/index.html
new file mode 100644
index 00000000..7b901446
--- /dev/null
+++ b/client/index.html
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+ Repliear
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/client/package.json b/client/package.json
new file mode 100644
index 00000000..39271370
--- /dev/null
+++ b/client/package.json
@@ -0,0 +1,65 @@
+{
+ "name": "client",
+ "private": true,
+ "version": "0.1.0",
+ "type": "module",
+ "scripts": {
+ "lint": "eslint --ext .ts,.tsx,.js,.jsx .",
+ "check-types": "tsc --noEmit",
+ "dev": "vite",
+ "build": "tsc && vite build",
+ "build:server": "cd ../server && npm run build",
+ "preview": "vite preview",
+ "format": "prettier --write './src/**/*.{js,jsx,json,ts,tsx,html,css,md}' '*.{cjs,js,jsx,json,ts,tsx,html,css,md}'",
+ "check-format": "prettier --check './src/**/*.{js,jsx,json,ts,tsx,html,css,md}' '*.{cjs,js,jsx,json,ts,tsx,html,css,md}'",
+ "clean": "rm -rf ./dist; mkdir -p ./dist",
+ "prod": "cp -r ./dist/ ../server/dist/; cd ../server; npm run prod",
+ "server": "cd ../server && npm run dev",
+ "watch": "concurrently --kill-others 'npm run server' 'npm run check-types -- --watch --preserveWatchOutput' 'sleep 3; npm run dev'"
+ },
+ "dependencies": {
+ "@mui/icons-material": "^5.14.16",
+ "@rocicorp/undo": "^0.2.0",
+ "classnames": "^2.3.1",
+ "lodash": "^4.17.21",
+ "navigo": "^8.11.1",
+ "qs": "^6.11.0",
+ "react": "^18.2.0",
+ "react-beautiful-dnd": "^13.1.1",
+ "react-dom": "^18.2.0",
+ "react-hotkeys": "^2.0.0",
+ "react-popper": "^2.3.0",
+ "react-remark": "^2.1.0",
+ "react-virtualized-auto-sizer": "^1.0.20",
+ "react-window": "^1.8.9",
+ "replicache-react": "5.0.1",
+ "shared": "^0.1.0",
+ "todomvc-app-css": "^2.4.2"
+ },
+ "devDependencies": {
+ "@rocicorp/eslint-config": "^0.1.2",
+ "@rocicorp/prettier-config": "^0.1.1",
+ "@tailwindcss/forms": "^0.5.6",
+ "@tailwindcss/line-clamp": "^0.4.4",
+ "@types/lodash": "^4.14.201",
+ "@types/react": "^18.0.17",
+ "@types/react-beautiful-dnd": "^13.1.7",
+ "@types/react-dom": "^18.0.6",
+ "@types/react-window": "^1.8.8",
+ "@vitejs/plugin-react": "^2.0.1",
+ "autoprefixer": "^10.4.16",
+ "concurrently": "^7.4.0",
+ "postcss": "^8.4.31",
+ "postcss-preset-env": "^9.3.0",
+ "prettier": "^2.2.1",
+ "tailwindcss": "^3.3.5",
+ "typescript": "^4.7.4",
+ "use-debounce": "^9.0.4",
+ "vite": "^3.0.7",
+ "vite-plugin-svgr": "^4.1.0"
+ },
+ "eslintConfig": {
+ "extends": "@rocicorp/eslint-config"
+ },
+ "prettier": "@rocicorp/prettier-config"
+}
diff --git a/client/postcss.config.js b/client/postcss.config.js
new file mode 100644
index 00000000..5fa0ad15
--- /dev/null
+++ b/client/postcss.config.js
@@ -0,0 +1,7 @@
+export default {
+ plugins: {
+ 'tailwindcss': {},
+ 'autoprefixer': {},
+ 'postcss-preset-env': {},
+ },
+};
diff --git a/public/static/replicache-logo-96.png b/client/replicache-logo-96.png
similarity index 100%
rename from public/static/replicache-logo-96.png
rename to client/replicache-logo-96.png
diff --git a/frontend/about-modal.tsx b/client/src/about-modal.tsx
similarity index 81%
rename from frontend/about-modal.tsx
rename to client/src/about-modal.tsx
index 9d23868a..550b9cef 100644
--- a/frontend/about-modal.tsx
+++ b/client/src/about-modal.tsx
@@ -1,18 +1,18 @@
-import CloseIcon from "./assets/icons/close.svg";
-import Modal from "./modal";
-import React from "react";
-import classNames from "classnames";
+import CloseIcon from './assets/icons/close.svg?react';
+import Modal from './layout/modal';
+import React from 'react';
+import classNames from 'classnames';
interface Props {
isOpen: boolean;
onDismiss?: () => void;
}
-function Title({ children }: { children: string }) {
+function Title({children}: {children: string}) {
return
diff --git a/frontend/issue-col.tsx b/client/src/issue/issue-col.tsx
similarity index 84%
rename from frontend/issue-col.tsx
rename to client/src/issue/issue-col.tsx
index 19a339c2..ed2dc022 100644
--- a/frontend/issue-col.tsx
+++ b/client/src/issue/issue-col.tsx
@@ -1,16 +1,16 @@
-import StatusIcon from "./status-icon";
-import React, { CSSProperties, memo, useMemo } from "react";
+import StatusIcon from '../widgets/status-icon';
+import React, {CSSProperties, memo, useMemo} from 'react';
import {
Draggable,
DraggableProvided,
Droppable,
DroppableProvided,
DroppableStateSnapshot,
-} from "react-beautiful-dnd";
-import type { Issue, Priority, Status } from "./issue";
-import IssueItem from "./issue-item";
-import { FixedSizeList } from "react-window";
-import AutoSizer from "react-virtualized-auto-sizer";
+} from 'react-beautiful-dnd';
+import IssueItem from './issue-item';
+import {FixedSizeList} from 'react-window';
+import AutoSizer from 'react-virtualized-auto-sizer';
+import {Issue, Priority, Status} from 'shared';
interface Props {
status: Status;
@@ -30,7 +30,7 @@ interface RowProps {
style: CSSProperties;
}
-const RowPreMemo = ({ data, index, style }: RowProps) => {
+const RowPreMemo = ({data, index, style}: RowProps) => {
const issue = data.issues[index];
// We are rendering an extra item for the placeholder.
// To do this we increased our data set size to include one 'fake' item.
@@ -42,7 +42,6 @@ const RowPreMemo = ({ data, index, style }: RowProps) => {
{(provided: DraggableProvided) => {
return (
- // @ts-expect-error @types/react@17 are wrong but react 18 does not work with next
;
return (
@@ -103,7 +102,6 @@ function IssueCol({
renderClone={(provided, _snapshot, rubric) => {
const issue = issues[rubric.source.index];
return (
- // @ts-expect-error @types/react@17 are wrong but react 18 does not work with nextjs