diff --git a/.dockerignore b/.dockerignore index c9299b335..2ae712321 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,7 +1,12 @@ # big-AGI non-code files /docs/ +/dist/ README.md +# Ignore build and log files +Dockerfile +/.dockerignore + # Node build artifacts /node_modules /.pnp diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index c3236638b..dd81efd26 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -57,4 +57,5 @@ jobs: file: Dockerfile push: true tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file + labels: ${{ steps.meta.outputs.labels }} + build-args: NEXT_PUBLIC_GA4_MEASUREMENT_ID=${{ secrets.GA4_MEASUREMENT_ID }} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index e38991207..f4c60af0d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,7 @@ FROM node:18-alpine AS base ENV NEXT_TELEMETRY_DISABLED 1 + # Dependencies FROM base AS deps WORKDIR /app @@ -14,10 +15,15 @@ COPY src/server/prisma ./src/server/prisma ENV NODE_ENV development RUN npm ci + # Builder FROM base AS builder WORKDIR /app +# Optional argument to configure GA4 at build time (see: docs/deploy-analytics.md) +ARG NEXT_PUBLIC_GA4_MEASUREMENT_ID +ENV NEXT_PUBLIC_GA4_MEASUREMENT_ID=${NEXT_PUBLIC_GA4_MEASUREMENT_ID} + # Copy development deps and source COPY --from=deps /app/node_modules ./node_modules COPY . . @@ -29,6 +35,7 @@ RUN npm run build # Reduce installed packages to production-only RUN npm prune --production + # Runner FROM base AS runner WORKDIR /app diff --git a/docs/config-feature-browse.md b/docs/config-feature-browse.md index 7c0d6ccef..0787b59b7 100644 --- a/docs/config-feature-browse.md +++ b/docs/config-feature-browse.md @@ -3,11 +3,16 @@ Allows users to load web pages across various components of `big-AGI`. This feature is supported by Puppeteer-based browsing services, which are the most common way to render web pages in a headless environment. -Once configured, the Browsing service provides this functionality: +Once configured, the Browsing service provides the following functionality: -- **Paste a URL**: Simply paste/drag a URL into the chat, and `big-AGI` will load and attach the page (very effective) -- **Use /browse**: Type `/browse [URL]` in the chat to command `big-AGI` to load the specified web page -- **ReAct**: ReAct will automatically use the `loadURL()` function whenever a URL is encountered +- ✅ **Paste a URL**: Simply paste/drag a URL into the chat, and `big-AGI` will load and attach the page (very effective) +- ✅ **Use /browse**: Type `/browse [URL]` in the chat to command `big-AGI` to load the specified web page +- ✅ **ReAct**: ReAct will automatically use the `loadURL()` function whenever a URL is encountered + +It does not yet support the following functionality: + +- ✖️ **Auto-browsing by LLMs**: if an LLM encounters a URL, it will NOT load the page and will likely respond + that it cannot browse the web - No technical limitation, just haven't gotten to implement this yet outside of `/react` yet First of all, you need to procure a Puppteer web browsing service endpoint. `big-AGI` supports services like: @@ -109,3 +114,5 @@ If you encounter any issues or have questions about configuring the browse funct --- Enjoy the enhanced browsing experience within `big-AGI` and explore the web without ever leaving your chat! + +Last updated on Feb 27, 2024 ([edit on GitHub](https://github.com/enricoros/big-AGI/edit/main/docs/config-feature-browse.md)) \ No newline at end of file diff --git a/docs/deploy-analytics.md b/docs/deploy-analytics.md new file mode 100644 index 000000000..87c4852d9 --- /dev/null +++ b/docs/deploy-analytics.md @@ -0,0 +1,63 @@ +# big-AGI Analytics + +The open-source big-AGI project provides support for the following analytics services: + +- **Vercel Analytics**: automatic when deployed to Vercel +- **Google Analytics 4**: manual setup required + +The following is a quick overview of the Analytics options for the deployers of this open-source project. +big-AGI is deployed to many large-scale and enterprise though various ways (custom builds, Docker, Vercel, Cloudflare, etc.), +and this guide is for its customization. + +## Service Configuration + +### Vercel Analytics + +- Why: understand coarse traction, and identify deployment issues - all without tracking individual users +- What: top pages, top referrers, country of origin, operating system, browser, and page speed metrics + +Vercel Analytics and Speed Insights are local API endpoints deployed to your domain, so everything stays within your +domain. Furthermore, the Vercel Analytics service is privacy-friendly, and does not track individual users. + +This service is avaialble to system administrators when deploying to Vercel. It is automatically enabled when deploying to Vercel. +The code that activates Vercel Analytics is located in the `src/pages/_app.tsx` file: + +```tsx +const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) => <> + ... + {isVercelFromFrontend && } + {isVercelFromFrontend && } + ... +; +``` + +When big-AGI is served on Vercel hosts, the ```process.env.NEXT_PUBLIC_VERCEL_URL``` environment variable is trueish, and +analytics will be sent by default to the Vercel Analytics service which is deployed by Vercel IF configured from the +Vercel project dashboard. + +In summary: to turn it on: activate the `Analytics` service in the Vercel project dashboard. + +### Google Analytics 4 + +- Why: user engagement and retention, performance insights, personalization, content optimization +- What: https://support.google.com/analytics/answer/11593727 + +Google Analytics 4 (GA4) is a powerful tool for understanding user behavior and engagement. +This can help optimize big-AGI, understanding which features are needed/users and which aren't. + +To enable Google Analytics 4, you need to set the `NEXT_PUBLIC_GA4_MEASUREMENT_ID` environment variable +before starting the local build or the docker build (i.e. at build time), at which point the +server/container will be able to report analytics to your Google Analytics 4 property. + +As of Feb 27, 2024, this feature is in development. + +## Configurations + +| Scope | Default | Description / Instructions | +|-----------------------------------------------------------------------------------------|------------------|-------------------------------------------------------------------------------------------------------------------------| +| Your source builds of big-AGI | None | **Vercel**: enable Vercel Analytics from the dashboard. · **Google Analytics**: set environment variable at build time. | +| Your docker builds of big-AGI | None | **Vercel**: n/a. · **Google Analytics**: set environment variable at `docker build` time. | +| [big-agi.com](https://big-agi.com) | Vercel + Google | The main website ([privacy policy](https://big-agi.com/privacy)) hosted for free for anyone. | +| [official Docker packages](https://github.com/enricoros/big-AGI/pkgs/container/big-agi) | Google Analytics | **Vercel**: n/a · **Google Analytics**: set to the big-agi.com Google Analytics for analytics and improvements. | + +Note: this information is updated as of Feb 27, 2024 and can change at any time. \ No newline at end of file diff --git a/docs/environment-variables.md b/docs/environment-variables.md index d83bbeb34..a2ec79674 100644 --- a/docs/environment-variables.md +++ b/docs/environment-variables.md @@ -28,6 +28,8 @@ AZURE_OPENAI_API_KEY= ANTHROPIC_API_KEY= ANTHROPIC_API_HOST= GEMINI_API_KEY= +LOCALAI_API_HOST= +LOCALAI_API_KEY= MISTRAL_API_KEY= OLLAMA_API_HOST= OPENROUTER_API_KEY= @@ -55,15 +57,22 @@ BACKEND_ANALYTICS= # Backend HTTP Basic Authentication (see `deploy-authentication.md` for turning on authentication) HTTP_BASIC_AUTH_USERNAME= HTTP_BASIC_AUTH_PASSWORD= + +# Frontend variables +NEXT_PUBLIC_GA4_MEASUREMENT_ID= +NEXT_PUBLIC_PLANTUML_SERVER_URL= ``` -## Variables Documentation +## Backend Variables + +These variables are used only by the server-side code, at runtime. Define them before running the nextjs local server (in development or +cloud deployment), or pass them to Docker (--env-file or -e) when starting the container. ### Database -For Database configuration see [deploy-database.md](deploy-database.md). +To enable Chat Link Sharing, you need to connect the backend to a database. We currently support Postgres and MongoDB. -To enable features such as Chat Link Sharing, you need to connect the backend to a database. We currently support Postgres and MongoDB. +For Database configuration see [deploy-database.md](deploy-database.md). ### LLMs @@ -80,13 +89,15 @@ requiring the user to enter an API key | `ANTHROPIC_API_KEY` | The API key for Anthropic | Optional | | `ANTHROPIC_API_HOST` | Changes the backend host for the Anthropic vendor, to enable platforms such as [config-aws-bedrock.md](config-aws-bedrock.md) | Optional | | `GEMINI_API_KEY` | The API key for Google AI's Gemini | Optional | +| `LOCALAI_API_HOST` | Sets the URL of the LocalAI server, or defaults to http://127.0.0.1:8080 | Optional | +| `LOCALAI_API_KEY` | The (Optional) API key for LocalAI | Optional | | `MISTRAL_API_KEY` | The API key for Mistral | Optional | | `OLLAMA_API_HOST` | Changes the backend host for the Ollama vendor. See [config-local-ollama.md](config-local-ollama) | | | `OPENROUTER_API_KEY` | The API key for OpenRouter | Optional | | `PERPLEXITY_API_KEY` | The API key for Perplexity | Optional | | `TOGETHERAI_API_KEY` | The API key for Together AI | Optional | -### Model Observability: Helicone +### LLM Observability: Helicone Helicone provides observability to your LLM calls. It is a paid service, with a generous free tier. It is currently supported for: @@ -98,7 +109,7 @@ It is currently supported for: |--------------------|--------------------------| | `HELICONE_API_KEY` | The API key for Helicone | -### Specials +### Features Enable the app to Talk, Draw, and Google things up. @@ -108,16 +119,31 @@ Enable the app to Talk, Draw, and Google things up. | `ELEVENLABS_API_KEY` | ElevenLabs API Key - used for calls, etc. | | `ELEVENLABS_API_HOST` | Custom host for ElevenLabs | | `ELEVENLABS_VOICE_ID` | Default voice ID for ElevenLabs | +| **Text-To-Image** | [Prodia](https://prodia.com/) is a reliable image generation service | +| `PRODIA_API_KEY` | Prodia API Key - used with '/imagine ...' | | **Google Custom Search** | [Google Programmable Search Engine](https://programmablesearchengine.google.com/about/) produces links to pages | | `GOOGLE_CLOUD_API_KEY` | Google Cloud API Key, used with the '/react' command - [Link to GCP](https://console.cloud.google.com/apis/credentials) | | `GOOGLE_CSE_ID` | Google Custom/Programmable Search Engine ID - [Link to PSE](https://programmablesearchengine.google.com/) | -| **Text-To-Image** | [Prodia](https://prodia.com/) is a reliable image generation service | -| `PRODIA_API_KEY` | Prodia API Key - used with '/imagine ...' | | **Browse** | | -| `PUPPETEER_WSS_ENDPOINT` | Puppeteer WebSocket endpoint - used for browsing, etc. | +| `PUPPETEER_WSS_ENDPOINT` | Puppeteer WebSocket endpoint - used for browsing (pade downloadeing), etc. | | **Backend** | | | `BACKEND_ANALYTICS` | Semicolon-separated list of analytics flags (see backend.analytics.ts). Flags: `domain` logs the responding domain. | | `HTTP_BASIC_AUTH_USERNAME` | See the [Authentication](deploy-authentication.md) guide. Username for HTTP Basic Authentication. | | `HTTP_BASIC_AUTH_PASSWORD` | Password for HTTP Basic Authentication. | +### Frontend Variables + +The value of these variables are passed to the frontend (Web UI) - make sure they do not contain secrets. + +| Variable | Description | +|:----------------------------------|:-----------------------------------------------------------------------------------------| +| `NEXT_PUBLIC_GA4_MEASUREMENT_ID` | The measurement ID for Google Analytics 4. (see [deploy-analytics](deploy-analytics.md)) | +| `NEXT_PUBLIC_PLANTUML_SERVER_URL` | The URL of the PlantUML server, used for rendering UML diagrams. (code in RederCode.tsx) | + +> Important: these variables must be set at build time, which is required by Next.js to pass them to the frontend. +> This is in contrast to the backend variables, which can be set when starting the local server/container. + --- + +For a higher level overview of backend code and environemnt customization, +see the [big-AGI Customization](customizations.md) guide. diff --git a/package-lock.json b/package-lock.json index 8b59934a2..993911d28 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,10 +13,11 @@ "@emotion/react": "^11.11.3", "@emotion/server": "^11.11.0", "@emotion/styled": "^11.11.0", - "@mui/icons-material": "^5.15.10", - "@mui/joy": "^5.0.0-beta.28", + "@mui/icons-material": "^5.15.11", + "@mui/joy": "^5.0.0-beta.29", "@next/bundle-analyzer": "^14.1.0", - "@prisma/client": "^5.10.1", + "@next/third-parties": "^14.1.0", + "@prisma/client": "^5.10.2", "@sanity/diff-match-patch": "^3.1.1", "@t3-oss/env-nextjs": "^0.9.2", "@tanstack/react-query": "~4.36.1", @@ -41,12 +42,12 @@ "react-katex": "^3.0.1", "react-markdown": "^9.0.1", "react-player": "^2.14.1", - "react-resizable-panels": "^2.0.9", + "react-resizable-panels": "^2.0.11", "react-timeago": "^7.2.0", "remark-gfm": "^4.0.0", "sharp": "^0.33.2", "superjson": "^2.2.1", - "tesseract.js": "^5.0.4", + "tesseract.js": "^5.0.5", "tiktoken": "^1.0.13", "uuid": "^9.0.1", "zod": "^3.22.4", @@ -54,21 +55,21 @@ }, "devDependencies": { "@cloudflare/puppeteer": "^0.0.5", - "@types/node": "^20.11.19", + "@types/node": "^20.11.20", "@types/nprogress": "^0.2.3", "@types/plantuml-encoder": "^1.4.2", "@types/prismjs": "^1.26.3", - "@types/react": "^18.2.57", + "@types/react": "^18.2.59", "@types/react-beautiful-dnd": "^13.1.8", "@types/react-csv": "^1.1.10", "@types/react-dom": "^18.2.19", "@types/react-katex": "^3.0.4", "@types/react-timeago": "^4.1.7", "@types/uuid": "^9.0.8", - "eslint": "^8.56.0", + "eslint": "^8.57.0", "eslint-config-next": "^14.1.0", "prettier": "^3.2.5", - "prisma": "^5.10.1", + "prisma": "^5.10.2", "typescript": "^5.3.3" }, "engines": { @@ -522,9 +523,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", - "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1093,14 +1094,14 @@ } }, "node_modules/@mui/base": { - "version": "5.0.0-beta.36", - "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.36.tgz", - "integrity": "sha512-6A8fYiXgjqTO6pgj31Hc8wm1M3rFYCxDRh09dBVk0L0W4cb2lnurRJa3cAyic6hHY+we1S58OdGYRbKmOsDpGQ==", + "version": "5.0.0-beta.37", + "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.37.tgz", + "integrity": "sha512-/o3anbb+DeCng8jNsd3704XtmmLDZju1Fo8R2o7ugrVtPQ/QpcqddwKNzKPZwa0J5T8YNW3ZVuHyQgbTnQLisQ==", "dependencies": { "@babel/runtime": "^7.23.9", "@floating-ui/react-dom": "^2.0.8", "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.9", + "@mui/utils": "^5.15.11", "@popperjs/core": "^2.11.8", "clsx": "^2.1.0", "prop-types": "^15.8.1" @@ -1124,18 +1125,18 @@ } }, "node_modules/@mui/core-downloads-tracker": { - "version": "5.15.10", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.15.10.tgz", - "integrity": "sha512-qPv7B+LeMatYuzRjB3hlZUHqinHx/fX4YFBiaS19oC02A1e9JFuDKDvlyRQQ5oRSbJJt0QlaLTlr0IcauVcJRQ==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.15.11.tgz", + "integrity": "sha512-JVrJ9Jo4gyU707ujnRzmE8ABBWpXd6FwL9GYULmwZRtfPg89ggXs/S3MStQkpJ1JRWfdLL6S5syXmgQGq5EDAw==", "funding": { "type": "opencollective", "url": "https://opencollective.com/mui-org" } }, "node_modules/@mui/icons-material": { - "version": "5.15.10", - "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.15.10.tgz", - "integrity": "sha512-9cF8oUHZKo9oQ7EQ3pxPELaZuZVmphskU4OI6NiJNDVN7zcuvrEsuWjYo1Zh4fLiC39Nrvm30h/B51rcUjvSGA==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.15.11.tgz", + "integrity": "sha512-R5ZoQqnKpd+5Ew7mBygTFLxgYsQHPhgR3TDXSgIHYIjGzYuyPLmGLSdcPUoMdi6kxiYqHlpPj4NJxlbaFD0UHA==", "dependencies": { "@babel/runtime": "^7.23.9" }, @@ -1158,16 +1159,16 @@ } }, "node_modules/@mui/joy": { - "version": "5.0.0-beta.28", - "resolved": "https://registry.npmjs.org/@mui/joy/-/joy-5.0.0-beta.28.tgz", - "integrity": "sha512-K3a2GhYix+oOGkJwDn3+SLL1+Z8ZXwJyQaEZPzQllMvU8fn4JyfFN4lGukJScVzTR0mWDG7MPxHsP0ozVNi8Mg==", + "version": "5.0.0-beta.29", + "resolved": "https://registry.npmjs.org/@mui/joy/-/joy-5.0.0-beta.29.tgz", + "integrity": "sha512-23bU7NOSRa6BiWqExEA0p72ttMbzhoISX3qjKI8gcX+gAnk6lKKuhApBc/rD3ORdDQfiPx5LuxxLW0V73atQEg==", "dependencies": { "@babel/runtime": "^7.23.9", - "@mui/base": "5.0.0-beta.36", - "@mui/core-downloads-tracker": "^5.15.10", - "@mui/system": "^5.15.9", + "@mui/base": "5.0.0-beta.37", + "@mui/core-downloads-tracker": "^5.15.11", + "@mui/system": "^5.15.11", "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.9", + "@mui/utils": "^5.15.11", "clsx": "^2.1.0", "prop-types": "^15.8.1" }, @@ -1198,17 +1199,17 @@ } }, "node_modules/@mui/material": { - "version": "5.15.10", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.15.10.tgz", - "integrity": "sha512-YJJGHjwDOucecjDEV5l9ISTCo+l9YeWrho623UajzoHRYxuKUmwrGVYOW4PKwGvCx9SU9oklZnbbi2Clc5XZHw==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.15.11.tgz", + "integrity": "sha512-FA3eEuEZaDaxgN3CgfXezMWbCZ4VCeU/sv0F0/PK5n42qIgsPVD6q+j71qS7/62sp6wRFMHtDMpXRlN+tT/7NA==", "peer": true, "dependencies": { "@babel/runtime": "^7.23.9", - "@mui/base": "5.0.0-beta.36", - "@mui/core-downloads-tracker": "^5.15.10", - "@mui/system": "^5.15.9", + "@mui/base": "5.0.0-beta.37", + "@mui/core-downloads-tracker": "^5.15.11", + "@mui/system": "^5.15.11", "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.9", + "@mui/utils": "^5.15.11", "@types/react-transition-group": "^4.4.10", "clsx": "^2.1.0", "csstype": "^3.1.3", @@ -1243,12 +1244,12 @@ } }, "node_modules/@mui/private-theming": { - "version": "5.15.9", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.15.9.tgz", - "integrity": "sha512-/aMJlDOxOTAXyp4F2rIukW1O0anodAMCkv1DfBh/z9vaKHY3bd5fFf42wmP+0GRmwMinC5aWPpNfHXOED1fEtg==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.15.11.tgz", + "integrity": "sha512-jY/696SnSxSzO1u86Thym7ky5T9CgfidU3NFJjguldqK4f3Z5S97amZ6nffg8gTD0HBjY9scB+4ekqDEUmxZOA==", "dependencies": { "@babel/runtime": "^7.23.9", - "@mui/utils": "^5.15.9", + "@mui/utils": "^5.15.11", "prop-types": "^15.8.1" }, "engines": { @@ -1269,9 +1270,9 @@ } }, "node_modules/@mui/styled-engine": { - "version": "5.15.9", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.15.9.tgz", - "integrity": "sha512-NRKtYkL5PZDH7dEmaLEIiipd3mxNnQSO+Yo8rFNBNptY8wzQnQ+VjayTq39qH7Sast5cwHKYFusUrQyD+SS4Og==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.15.11.tgz", + "integrity": "sha512-So21AhAngqo07ces4S/JpX5UaMU2RHXpEA6hNzI6IQjd/1usMPxpgK8wkGgTe3JKmC2KDmH8cvoycq5H3Ii7/w==", "dependencies": { "@babel/runtime": "^7.23.9", "@emotion/cache": "^11.11.0", @@ -1300,15 +1301,15 @@ } }, "node_modules/@mui/system": { - "version": "5.15.9", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.15.9.tgz", - "integrity": "sha512-SxkaaZ8jsnIJ77bBXttfG//LUf6nTfOcaOuIgItqfHv60ZCQy/Hu7moaob35kBb+guxVJnoSZ+7vQJrA/E7pKg==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.15.11.tgz", + "integrity": "sha512-9j35suLFq+MgJo5ktVSHPbkjDLRMBCV17NMBdEQurh6oWyGnLM4uhU4QGZZQ75o0vuhjJghOCA1jkO3+79wKsA==", "dependencies": { "@babel/runtime": "^7.23.9", - "@mui/private-theming": "^5.15.9", - "@mui/styled-engine": "^5.15.9", + "@mui/private-theming": "^5.15.11", + "@mui/styled-engine": "^5.15.11", "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.9", + "@mui/utils": "^5.15.11", "clsx": "^2.1.0", "csstype": "^3.1.3", "prop-types": "^15.8.1" @@ -1352,9 +1353,9 @@ } }, "node_modules/@mui/utils": { - "version": "5.15.9", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.15.9.tgz", - "integrity": "sha512-yDYfr61bCYUz1QtwvpqYy/3687Z8/nS4zv7lv/ih/6ZFGMl1iolEvxRmR84v2lOYxlds+kq1IVYbXxDKh8Z9sg==", + "version": "5.15.11", + "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.15.11.tgz", + "integrity": "sha512-D6bwqprUa9Stf8ft0dcMqWyWDKEo7D+6pB1k8WajbqlYIRA8J8Kw9Ra7PSZKKePGBGWO+/xxrX1U8HpG/aXQCw==", "dependencies": { "@babel/runtime": "^7.23.9", "@types/prop-types": "^15.7.11", @@ -1535,6 +1536,18 @@ "node": ">= 10" } }, + "node_modules/@next/third-parties": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/@next/third-parties/-/third-parties-14.1.0.tgz", + "integrity": "sha512-f55SdvQ1WWxi4mb5QqtYQh5wRzbm1XaeP7s39DPn4ks3re+n9VlFccbMxBRHqkE62zAyIKmvkUB2cByT/gugGA==", + "dependencies": { + "third-party-capital": "1.0.20" + }, + "peerDependencies": { + "next": "^13.0.0 || ^14.0.0", + "react": "^18.2.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -1595,9 +1608,9 @@ } }, "node_modules/@prisma/client": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@prisma/client/-/client-5.10.1.tgz", - "integrity": "sha512-4R8Vp6sSwVJSnOxw8WU1WSLqE/G3WJy1xA05XvW87cINoB1hEY7endw5Ppy6TrIBCCtHQim2lqfHkbPvv+i7bQ==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@prisma/client/-/client-5.10.2.tgz", + "integrity": "sha512-ef49hzB2yJZCvM5gFHMxSFL9KYrIP9udpT5rYo0CsHD4P9IKj473MbhU1gjKKftiwWBTIyrt9jukprzZXazyag==", "hasInstallScript": true, "engines": { "node": ">=16.13" @@ -1612,22 +1625,22 @@ } }, "node_modules/@prisma/debug": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-5.10.1.tgz", - "integrity": "sha512-Ipo9y/lCMzedXMtEBe4YCdvVVivSy6MdG7aYTM15t86g4CRzwdlEsw8Czxnw20w9Qgzdx0MX2iLsCCIG4JoHbA==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-5.10.2.tgz", + "integrity": "sha512-bkBOmH9dpEBbMKFJj8V+Zp8IZHIBjy3fSyhLhxj4FmKGb/UBSt9doyfA6k1UeUREsMJft7xgPYBbHSOYBr8XCA==", "devOptional": true }, "node_modules/@prisma/engines": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-5.10.1.tgz", - "integrity": "sha512-75oJa900Pw+GAXjPJmKZqsD7bgSgQbpeGLxCwchrbgPIM70y3h0FbjIsiSAjuhwIGUCCNWzctUNv67rvSmoQAQ==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-5.10.2.tgz", + "integrity": "sha512-HkSJvix6PW8YqEEt3zHfCYYJY69CXsNdhU+wna+4Y7EZ+AwzeupMnUThmvaDA7uqswiHkgm5/SZ6/4CStjaGmw==", "devOptional": true, "hasInstallScript": true, "dependencies": { - "@prisma/debug": "5.10.1", + "@prisma/debug": "5.10.2", "@prisma/engines-version": "5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9", - "@prisma/fetch-engine": "5.10.1", - "@prisma/get-platform": "5.10.1" + "@prisma/fetch-engine": "5.10.2", + "@prisma/get-platform": "5.10.2" } }, "node_modules/@prisma/engines-version": { @@ -1637,23 +1650,23 @@ "devOptional": true }, "node_modules/@prisma/fetch-engine": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-5.10.1.tgz", - "integrity": "sha512-xg3I3RM/qENykZNGBna+14gBkkZL2TVkyX3OX2GWI8MV23Meq5jYdqvlgBrZne25ZxiulctSEA2D6Y5fq1eLog==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-5.10.2.tgz", + "integrity": "sha512-dSmXcqSt6DpTmMaLQ9K8ZKzVAMH3qwGCmYEZr/uVnzVhxRJ1EbT/w2MMwIdBNq1zT69Rvh0h75WMIi0mrIw7Hg==", "devOptional": true, "dependencies": { - "@prisma/debug": "5.10.1", + "@prisma/debug": "5.10.2", "@prisma/engines-version": "5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9", - "@prisma/get-platform": "5.10.1" + "@prisma/get-platform": "5.10.2" } }, "node_modules/@prisma/get-platform": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-5.10.1.tgz", - "integrity": "sha512-0rE8lSE3y+Ua3LaOcXlWADz21+kGkf9NWmGNuh8n9I6uaCq90LQxM002l4NSYg6ELtiJXyDgJ4nRhM0x0OXjDQ==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-5.10.2.tgz", + "integrity": "sha512-nqXP6vHiY2PIsebBAuDeWiUYg8h8mfjBckHh6Jezuwej0QJNnjDiOq30uesmg+JXxGk99nqyG3B7wpcOODzXvg==", "devOptional": true, "dependencies": { - "@prisma/debug": "5.10.1" + "@prisma/debug": "5.10.2" } }, "node_modules/@rushstack/eslint-patch": { @@ -1815,9 +1828,9 @@ "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==" }, "node_modules/@types/estree-jsx": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz", - "integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", "dependencies": { "@types/estree": "*" } @@ -1859,9 +1872,9 @@ "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" }, "node_modules/@types/node": { - "version": "20.11.19", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.19.tgz", - "integrity": "sha512-7xMnVEcZFu0DikYjWOlRq7NTPETrm7teqUT2WkQjrTIkEgUyyGdWsj/Zg8bEJt5TNklzbPD1X3fqfsHw3SpapQ==", + "version": "20.11.20", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.20.tgz", + "integrity": "sha512-7/rR21OS+fq8IyHTgtLkDK949uzsa6n8BkziAKtPVpugIkO6D+/ooXMvzXxDnZrmtXVfjb1bKQafYpb8s89LOg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -1899,9 +1912,9 @@ "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==" }, "node_modules/@types/react": { - "version": "18.2.57", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.57.tgz", - "integrity": "sha512-ZvQsktJgSYrQiMirAN60y4O/LRevIV8hUzSOSNB6gfR3/o3wCBFQx3sPwIYtuDMeiVgsSS3UzCV26tEzgnfvQw==", + "version": "18.2.59", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.59.tgz", + "integrity": "sha512-DE+F6BYEC8VtajY85Qr7mmhTd/79rJKIHCg99MU9SWPB4xvLb6D1za2vYflgZfmPqQVEr6UqJTnLXEwzpVPuOg==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2624,9 +2637,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001588", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001588.tgz", - "integrity": "sha512-+hVY9jE44uKLkH0SrUTqxjxqNTOWHsbnQDIKjwkZ3lNTzUUVdBLBGXtj/q5Mp5u98r3droaZAewQuEDzjQdZlQ==", + "version": "1.0.30001589", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001589.tgz", + "integrity": "sha512-vNQWS6kI+q6sBlHbh71IIeC+sRwK2N3EDySc/updIGhIee2x5z00J4c1242/5/d6EpEMdOnk/m+6tuk4/tcsqg==", "funding": [ { "type": "opencollective", @@ -3301,16 +3314,16 @@ } }, "node_modules/eslint": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", - "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.56.0", - "@humanwhocodes/config-array": "^0.11.13", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -3856,9 +3869,9 @@ } }, "node_modules/flatted": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.0.tgz", - "integrity": "sha512-noqGuLw158+DuD9UPRKHpJ2hGxpFyDlYYrfM0mWt4XhT4n0lwzTLh70Tkdyy4kyTmyTT9Bv7bWAJqw7cgkEXDg==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, "node_modules/for-each": { @@ -4760,12 +4773,15 @@ } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6578,13 +6594,13 @@ } }, "node_modules/prisma": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/prisma/-/prisma-5.10.1.tgz", - "integrity": "sha512-2VLQ8dMXZGd5qS6XIDyAzK2W4xdPJDt/E3IBcjByn64Fnp33llqq/HBxx0EroZO7ezKiw1gS3BOrkpyzPfeFhA==", + "version": "5.10.2", + "resolved": "https://registry.npmjs.org/prisma/-/prisma-5.10.2.tgz", + "integrity": "sha512-hqb/JMz9/kymRE25pMWCxkdyhbnIWrq+h7S6WysJpdnCvhstbJSNP/S6mScEcqiB8Qv2F+0R3yG+osRaWqZacQ==", "devOptional": true, "hasInstallScript": true, "dependencies": { - "@prisma/engines": "5.10.1" + "@prisma/engines": "5.10.2" }, "bin": { "prisma": "build/index.js" @@ -6812,9 +6828,9 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" }, "node_modules/react-resizable-panels": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-2.0.9.tgz", - "integrity": "sha512-ZylBvs7oG7Y/INWw3oYGolqgpFvoPW8MPeg9l1fURDeKpxrmUuCHBUmPj47BdZ11MODImu3kZYXG85rbySab7w==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-2.0.11.tgz", + "integrity": "sha512-tA3OvGFEK/U9rKuEg6TpXcr+i+cN5X8B4UIvs7jqr5lby629pDTGvqRjo1EJLhBpRZfkg0Zz1INJlSYigaS99g==", "peerDependencies": { "react": "^16.14.0 || ^17.0.0 || ^18.0.0", "react-dom": "^16.14.0 || ^17.0.0 || ^18.0.0" @@ -7712,9 +7728,9 @@ } }, "node_modules/tesseract.js": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/tesseract.js/-/tesseract.js-5.0.4.tgz", - "integrity": "sha512-GCIoSQMZlvTP2AaHrjUOH29/oyO7ZyHVe+BhTexEcO7/nDClRVDRjl2sYJLOWSSNbTDrm5q2m1+gfaf3lUrZ5Q==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/tesseract.js/-/tesseract.js-5.0.5.tgz", + "integrity": "sha512-xtTfec4IynE63sl6kAFkGl1mejlNxr9qQXzVGAUHd7IPdQXveopjGO9Eph6xkSuW5sUCC9AT6VdBmODh8ZymGg==", "hasInstallScript": true, "dependencies": { "bmp-js": "^0.1.0", @@ -7745,6 +7761,11 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, + "node_modules/third-party-capital": { + "version": "1.0.20", + "resolved": "https://registry.npmjs.org/third-party-capital/-/third-party-capital-1.0.20.tgz", + "integrity": "sha512-oB7yIimd8SuGptespDAZnNkzIz+NWaJCu2RMsbs4Wmp9zSDUM8Nhi3s2OOcqYuv3mN4hitXc8DVx+LyUmbUDiA==" + }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -7765,9 +7786,9 @@ "integrity": "sha512-JaL9ZnvTbGFMDIBeGdVkLt4qWTeCPw+n7Ock+wceAGRenuHA6nOOvMJFliNDyXsjg2osGKJWsXtO2xc74VxyDw==" }, "node_modules/tiny-invariant": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.1.tgz", - "integrity": "sha512-AD5ih2NlSssTCwsMznbvwMZpJ1cbhkGd2uueNxzv2jDlEeZdU04JQfRnggJQ8DrcVBGjAsCKwFBbDlVNtEMlzw==" + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==" }, "node_modules/to-fast-properties": { "version": "2.0.0", @@ -7888,15 +7909,16 @@ } }, "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" diff --git a/package.json b/package.json index 75be23bcf..af6f419cc 100644 --- a/package.json +++ b/package.json @@ -22,10 +22,11 @@ "@emotion/react": "^11.11.3", "@emotion/server": "^11.11.0", "@emotion/styled": "^11.11.0", - "@mui/icons-material": "^5.15.10", - "@mui/joy": "^5.0.0-beta.28", + "@mui/icons-material": "^5.15.11", + "@mui/joy": "^5.0.0-beta.29", "@next/bundle-analyzer": "^14.1.0", - "@prisma/client": "^5.10.1", + "@next/third-parties": "^14.1.0", + "@prisma/client": "^5.10.2", "@sanity/diff-match-patch": "^3.1.1", "@t3-oss/env-nextjs": "^0.9.2", "@tanstack/react-query": "~4.36.1", @@ -50,12 +51,12 @@ "react-katex": "^3.0.1", "react-markdown": "^9.0.1", "react-player": "^2.14.1", - "react-resizable-panels": "^2.0.9", + "react-resizable-panels": "^2.0.11", "react-timeago": "^7.2.0", "remark-gfm": "^4.0.0", "sharp": "^0.33.2", "superjson": "^2.2.1", - "tesseract.js": "^5.0.4", + "tesseract.js": "^5.0.5", "tiktoken": "^1.0.13", "uuid": "^9.0.1", "zod": "^3.22.4", @@ -63,21 +64,21 @@ }, "devDependencies": { "@cloudflare/puppeteer": "^0.0.5", - "@types/node": "^20.11.19", + "@types/node": "^20.11.20", "@types/nprogress": "^0.2.3", "@types/plantuml-encoder": "^1.4.2", "@types/prismjs": "^1.26.3", - "@types/react": "^18.2.57", + "@types/react": "^18.2.59", "@types/react-beautiful-dnd": "^13.1.8", "@types/react-csv": "^1.1.10", "@types/react-dom": "^18.2.19", "@types/react-katex": "^3.0.4", "@types/react-timeago": "^4.1.7", "@types/uuid": "^9.0.8", - "eslint": "^8.56.0", + "eslint": "^8.57.0", "eslint-config-next": "^14.1.0", "prettier": "^3.2.5", - "prisma": "^5.10.1", + "prisma": "^5.10.2", "typescript": "^5.3.3" }, "engines": { diff --git a/pages/_app.tsx b/pages/_app.tsx index 09d98e41f..e06aafebf 100644 --- a/pages/_app.tsx +++ b/pages/_app.tsx @@ -4,7 +4,6 @@ import { MyAppProps } from 'next/app'; import { Analytics as VercelAnalytics } from '@vercel/analytics/next'; import { SpeedInsights as VercelSpeedInsights } from '@vercel/speed-insights/next'; - import { Brand } from '~/common/app.config'; import { apiQuery } from '~/common/util/trpc.client'; @@ -20,6 +19,7 @@ import { ProviderSingleTab } from '~/common/providers/ProviderSingleTab'; import { ProviderSnacks } from '~/common/providers/ProviderSnacks'; import { ProviderTRPCQueryClient } from '~/common/providers/ProviderTRPCQueryClient'; import { ProviderTheming } from '~/common/providers/ProviderTheming'; +import { hasGoogleAnalytics, OptionalGoogleAnalytics } from '~/common/components/GoogleAnalytics'; import { isVercelFromFrontend } from '~/common/util/pwaUtils'; @@ -47,6 +47,7 @@ const MyApp = ({ Component, emotionCache, pageProps }: MyAppProps) => {isVercelFromFrontend && } {isVercelFromFrontend && } + {hasGoogleAnalytics && } ; diff --git a/pages/info/debug.tsx b/pages/info/debug.tsx index 3c8c06bfb..56bcc86e8 100644 --- a/pages/info/debug.tsx +++ b/pages/info/debug.tsx @@ -7,6 +7,7 @@ import DownloadIcon from '@mui/icons-material/Download'; import { AppPlaceholder } from '../../src/apps/AppPlaceholder'; import { backendCaps } from '~/modules/backend/state-backend'; +import { getPlantUmlServerUrl } from '~/modules/blocks/code/RenderCode'; import { withLayout } from '~/common/layout/withLayout'; @@ -29,7 +30,8 @@ import { useFolderStore } from '~/common/state/store-folders'; import { useUXLabsStore } from '~/common/state/store-ux-labs'; // utils access -import { clientHostName, isChromeDesktop, isFirefox, isIPhoneUser, isMacUser, isPwa, isVercelFromBackend, isVercelFromFrontend } from '~/common/util/pwaUtils'; +import { clientHostName, isChromeDesktop, isFirefox, isIPhoneUser, isMacUser, isPwa, isVercelFromFrontend } from '~/common/util/pwaUtils'; +import { getGA4MeasurementId } from '~/common/components/GoogleAnalytics'; import { supportsClipboardRead } from '~/common/util/clipboardUtils'; import { supportsScreenCapture } from '~/common/util/screenCaptureUtils'; @@ -114,8 +116,9 @@ function AppDebug() { deployment: { home: Brand.URIs.Home, hostName: clientHostName(), - isVercelFromBackend, isVercelFromFrontend, + measurementId: getGA4MeasurementId(), + plantUmlServerUrl: getPlantUmlServerUrl(), routeIndex: ROUTE_INDEX, routeChat: ROUTE_APP_CHAT, }, diff --git a/src/apps/call/AppCall.tsx b/src/apps/call/AppCall.tsx index 5d250e1b6..43f3851c5 100644 --- a/src/apps/call/AppCall.tsx +++ b/src/apps/call/AppCall.tsx @@ -65,6 +65,8 @@ export function AppCall() { display: 'flex', flexDirection: 'column', alignItems: 'center', justifyContent: hasIntent ? 'space-evenly' : undefined, gap: hasIntent ? 1 : undefined, + // shall force the contacts or telephone to stay within the container + overflowY: hasIntent ? 'hidden' : undefined, }}> {!hasIntent ? ( diff --git a/src/apps/call/CallWizard.tsx b/src/apps/call/CallWizard.tsx index ea4e6f5f4..7b80596cd 100644 --- a/src/apps/call/CallWizard.tsx +++ b/src/apps/call/CallWizard.tsx @@ -4,10 +4,10 @@ import { Box, Button, Card, CardContent, IconButton, ListItemDecorator, Typograp import ArrowForwardIcon from '@mui/icons-material/ArrowForward'; import ChatIcon from '@mui/icons-material/Chat'; import CheckIcon from '@mui/icons-material/Check'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import MicIcon from '@mui/icons-material/Mic'; import RecordVoiceOverIcon from '@mui/icons-material/RecordVoiceOver'; -import WarningIcon from '@mui/icons-material/Warning'; +import WarningRoundedIcon from '@mui/icons-material/WarningRounded'; import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout'; import { cssRainbowColorKeyframes } from '~/common/app.theme'; @@ -67,7 +67,7 @@ function StatusCard(props: { icon: React.JSX.Element, hasIssue: boolean, text: s {props.button} - {props.hasIssue ? : } + {props.hasIssue ? : } @@ -122,7 +122,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n - + Welcome to
your first call @@ -208,7 +208,7 @@ export function CallWizard(props: { strict?: boolean, conversationId: string | n // boxShadow: allGood ? 'md' : 'none', }} > - {allGood ? : } + {allGood ? : } diff --git a/src/apps/call/Telephone.tsx b/src/apps/call/Telephone.tsx index e964ab8c0..a762f5bf0 100644 --- a/src/apps/call/Telephone.tsx +++ b/src/apps/call/Telephone.tsx @@ -224,8 +224,9 @@ export function Telephone(props: { responseAbortController.current = new AbortController(); let finalText = ''; let error: any | null = null; - llmStreamingChatGenerate(chatLLMId, callPrompt, null, null, responseAbortController.current.signal, (updatedMessage: Partial) => { - const text = updatedMessage.text?.trim(); + setPersonaTextInterim('💭...'); + llmStreamingChatGenerate(chatLLMId, callPrompt, null, null, responseAbortController.current.signal, ({ textSoFar }) => { + const text = textSoFar?.trim(); if (text) { finalText = text; setPersonaTextInterim(text); @@ -354,7 +355,8 @@ export function Telephone(props: { text={message.text} variant={message.role === 'assistant' ? 'solid' : 'soft'} color={message.role === 'assistant' ? 'neutral' : 'primary'} - role={message.role} />, + role={message.role} + />, )} {/* Persona streaming text... */} diff --git a/src/apps/call/components/CallMessage.tsx b/src/apps/call/components/CallMessage.tsx index cb7a13ae5..1b3bdcbde 100644 --- a/src/apps/call/components/CallMessage.tsx +++ b/src/apps/call/components/CallMessage.tsx @@ -12,13 +12,19 @@ export function CallMessage(props: { role: VChatMessageIn['role'], sx?: SxProps, }) { + const isUserMessage = props.role === 'user'; return ( chatPanes.map(pane => { + return pane.conversationId ? ConversationManager.getHandler(pane.conversationId) : null; + }), [chatPanes]); + const setFocusedConversationId = React.useCallback((conversationId: DConversationId | null) => { conversationId && openConversationInFocusedPane(conversationId); }, [openConversationInFocusedPane]); @@ -148,6 +153,7 @@ export function AppChat() { } }, [focusedChatNumber, focusedChatTitle]); + // Execution const _handleExecute = React.useCallback(async (chatModeId: ChatModeId, conversationId: DConversationId, history: DMessage[]): Promise => { @@ -160,9 +166,12 @@ export function AppChat() { const chatCommand = extractChatCommand(lastMessage.text)[0]; if (chatCommand && chatCommand.type === 'cmd') { switch (chatCommand.providerId) { + case 'ass-beam': + return ConversationManager.getHandler(conversationId).beamStore.create(history); + case 'ass-browse': setMessages(conversationId, history); - return await runBrowseUpdatingState(conversationId, chatCommand.params!); + return await runBrowseGetPageUpdatingState(conversationId, chatCommand.params!); case 'ass-t2i': setMessages(conversationId, history); @@ -194,15 +203,26 @@ export function AppChat() { const helpMessage = createDMessage('assistant', 'Available Chat Commands:\n' + chatCommandsText); helpMessage.originLLM = Brand.Title.Base; return setMessages(conversationId, [...history, helpMessage]); + + default: + return setMessages(conversationId, [...history, createDMessage('assistant', 'This command is not supported.')]); } } } + // get the focused system purpose (note: we don't react to it, or it would invalidate half UI components..) + const conversationSystemPurposeId = getConversationSystemPurposeId(conversationId); + if (!conversationSystemPurposeId) + return setMessages(conversationId, [...history, createDMessage('assistant', 'No persona selected.')]); + // synchronous long-duration tasks, which update the state as they go - if (chatLLMId && focusedSystemPurposeId) { + if (chatLLMId) { switch (chatModeId) { case 'generate-text': - return await runAssistantUpdatingState(conversationId, history, chatLLMId, focusedSystemPurposeId, getUXLabsHighPerformance() ? 0 : getInstantAppChatPanesCount()); + return await runAssistantUpdatingState(conversationId, history, chatLLMId, conversationSystemPurposeId, getUXLabsHighPerformance() ? 0 : getInstantAppChatPanesCount()); + + case 'generate-text-beam': + return ConversationManager.getHandler(conversationId).beamStore.create(history); case 'append-user': return setMessages(conversationId, history); @@ -228,7 +248,7 @@ export function AppChat() { // ISSUE: if we're here, it means we couldn't do the job, at least sync the history console.log('handleExecuteConversation: issue running', chatModeId, conversationId, lastMessage); setMessages(conversationId, history); - }, [focusedSystemPurposeId, setMessages]); + }, [setMessages]); const handleComposerAction = React.useCallback((chatModeId: ChatModeId, conversationId: DConversationId, multiPartMessage: ComposerOutputMultiPart): boolean => { // validate inputs @@ -263,8 +283,8 @@ export function AppChat() { return enqueued; }, [chatPanes, willMulticast, _handleExecute]); - const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId, history: DMessage[], effectBestOf: boolean): Promise => { - await _handleExecute(effectBestOf ? 'generate-best-of' : 'generate-text', conversationId, history); + const handleConversationExecuteHistory = React.useCallback(async (conversationId: DConversationId, history: DMessage[], chatEffectBeam: boolean): Promise => { + await _handleExecute(!chatEffectBeam ? 'generate-text' : 'generate-text-beam', conversationId, history); }, [_handleExecute]); const handleMessageRegenerateLast = React.useCallback(async () => { @@ -295,6 +315,7 @@ export function AppChat() { await speakText(text); }, []); + // Chat actions const handleConversationNew = React.useCallback((forceNoRecycle?: boolean) => { @@ -302,7 +323,7 @@ export function AppChat() { // activate an existing new conversation if present, or create another const conversationId = (newConversationId && !forceNoRecycle) ? newConversationId - : prependNewConversation(focusedSystemPurposeId ?? undefined); + : prependNewConversation(getConversationSystemPurposeId(focusedConversationId) ?? undefined); setFocusedConversationId(conversationId); // if a folder is active, add the new conversation to the folder @@ -312,7 +333,7 @@ export function AppChat() { // focus the composer composerTextAreaRef.current?.focus(); - }, [activeFolderId, focusedSystemPurposeId, newConversationId, prependNewConversation, setFocusedConversationId]); + }, [activeFolderId, focusedConversationId, newConversationId, prependNewConversation, setFocusedConversationId]); const handleConversationImportDialog = React.useCallback(() => setTradeConfig({ dir: 'import' }), []); @@ -365,6 +386,7 @@ export function AppChat() { !!deleteConversationIds?.length && handleDeleteConversations(deleteConversationIds, true); }, [deleteConversationIds, handleDeleteConversations]); + // Shortcuts const handleOpenChatLlmOptions = React.useCallback(() => { @@ -387,7 +409,8 @@ export function AppChat() { ], [focusedConversationId, handleConversationBranch, handleConversationClear, handleConversationNew, handleDeleteConversations, handleMessageRegenerateLast, handleNavigateHistory, handleOpenChatLlmOptions, isFocusedChatEmpty]); useGlobalShortcuts(shortcuts); - // Pluggable ApplicationBar components + + // Pluggable Optima components const barAltTitle = showAltTitleBar ? focusedChatTitle ?? 'No Chat' : null; @@ -443,6 +466,7 @@ export function AppChat() { {chatPanes.map((pane, idx) => { const _paneConversationId = pane.conversationId; + const _paneChatHandler = chatHandlers[idx] ?? null; const _panesCount = chatPanes.length; const _keyAndId = `chat-pane-${idx}-${_paneConversationId}`; const _sepId = `sep-pane-${idx}-${_paneConversationId}`; @@ -499,10 +523,11 @@ export function AppChat() { - + {/**/} {/* Visibility and actions are handled via Context */} + + + {/* Best-Of Mode */} + + {/* Panel Separators & Resizers */} @@ -549,7 +589,7 @@ export function AppChat() { conversationId={focusedConversationId} capabilityHasT2I={capabilityHasT2I} isMulticast={!isMultiConversationId ? null : isComposerMulticast} - isDeveloperMode={focusedSystemPurposeId === 'Developer'} + isDeveloperMode={isFocusedChatDeveloper} onAction={handleComposerAction} onTextImagine={handleTextImagine} setIsMulticast={setIsComposerMulticast} diff --git a/src/apps/chat/commands/CommandsBeam.tsx b/src/apps/chat/commands/CommandsBeam.tsx new file mode 100644 index 000000000..4d82f26ea --- /dev/null +++ b/src/apps/chat/commands/CommandsBeam.tsx @@ -0,0 +1,17 @@ +import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon'; +import { getUXLabsChatBeam } from '~/common/state/store-ux-labs'; + +import type { ICommandsProvider } from './ICommandsProvider'; + +export const CommandsBeam: ICommandsProvider = { + id: 'ass-beam', + rank: 9, + + getCommands: () => getUXLabsChatBeam() ? [{ + primary: '/beam', + arguments: ['prompt'], + description: 'Best of multiple replies', + Icon: ChatBeamIcon, + }] : [], + +}; diff --git a/src/apps/chat/commands/commands.registry.ts b/src/apps/chat/commands/commands.registry.ts index 2831881bd..76d27c538 100644 --- a/src/apps/chat/commands/commands.registry.ts +++ b/src/apps/chat/commands/commands.registry.ts @@ -1,13 +1,14 @@ import { ChatCommand, ICommandsProvider } from './ICommandsProvider'; import { CommandsAlter } from './CommandsAlter'; +import { CommandsBeam } from './CommandsBeam'; import { CommandsBrowse } from './CommandsBrowse'; import { CommandsDraw } from './CommandsDraw'; import { CommandsHelp } from './CommandsHelp'; import { CommandsReact } from './CommandsReact'; -export type CommandsProviderId = 'ass-browse' | 'ass-t2i' | 'ass-react' | 'chat-alter' | 'cmd-help'; +export type CommandsProviderId = 'ass-beam' | 'ass-browse' | 'ass-t2i' | 'ass-react' | 'chat-alter' | 'cmd-help'; type TextCommandPiece = | { type: 'text'; value: string; } @@ -15,6 +16,7 @@ type TextCommandPiece = const ChatCommandsProviders: Record = { + 'ass-beam': CommandsBeam, 'ass-browse': CommandsBrowse, 'ass-react': CommandsReact, 'ass-t2i': CommandsDraw, diff --git a/src/apps/chat/components/ChatDrawer.tsx b/src/apps/chat/components/ChatDrawer.tsx index 8f5ba93d4..fda21ea22 100644 --- a/src/apps/chat/components/ChatDrawer.tsx +++ b/src/apps/chat/components/ChatDrawer.tsx @@ -19,6 +19,7 @@ import { FoldersToggleOff } from '~/common/components/icons/FoldersToggleOff'; import { FoldersToggleOn } from '~/common/components/icons/FoldersToggleOn'; import { PageDrawerHeader } from '~/common/layout/optima/components/PageDrawerHeader'; import { PageDrawerList } from '~/common/layout/optima/components/PageDrawerList'; +import { capitalizeFirstLetter } from '~/common/util/textUtils'; import { themeScalingMap, themeZIndexOverMobileDrawer } from '~/common/app.theme'; import { useOptimaDrawers } from '~/common/layout/optima/useOptimaDrawers'; import { useUIPreferencesStore } from '~/common/state/store-ui'; @@ -27,6 +28,7 @@ import { ChatDrawerItemMemo, FolderChangeRequest } from './ChatDrawerItem'; import { ChatFolderList } from './folders/ChatFolderList'; import { ChatNavGrouping, useChatNavRenderItems } from './useChatNavRenderItems'; import { ClearFolderText } from './folders/useFolderDropdown'; +import { useChatShowRelativeSize } from '../store-app-chat'; // this is here to make shallow comparisons work on the next hook @@ -77,9 +79,10 @@ function ChatDrawer(props: { // external state const { closeDrawer, closeDrawerOnMobile } = useOptimaDrawers(); + const { showRelativeSize, toggleRelativeSize } = useChatShowRelativeSize(); const { activeFolder, allFolders, enableFolders, toggleEnableFolders } = useFolders(props.activeFolderId); const { filteredChatsCount, filteredChatIDs, filteredChatsAreEmpty, filteredChatsBarBasis, filteredChatsIncludeActive, renderNavItems } = useChatNavRenderItems( - props.activeConversationId, props.chatPanesConversationIds, debouncedSearchQuery, activeFolder, allFolders, navGrouping, + props.activeConversationId, props.chatPanesConversationIds, debouncedSearchQuery, activeFolder, allFolders, navGrouping, showRelativeSize, ); const { contentScaling, showSymbols } = useUIPreferencesStore(state => ({ contentScaling: state.contentScaling, @@ -140,22 +143,38 @@ function ChatDrawer(props: { const groupingComponent = React.useMemo(() => ( - + + + Group By + {(['date', 'persona'] as const).map(_gName => ( - setNavGrouping(grouping => grouping === _gName ? false : _gName)}> + setNavGrouping(grouping => grouping === _gName ? false : _gName)} + > {navGrouping === _gName && } - Group by {_gName} + {capitalizeFirstLetter(_gName)} ))} + + + Show + + + {showRelativeSize && } + Relative Size + - ), [navGrouping]); + ), [navGrouping, showRelativeSize, toggleRelativeSize]); return <> @@ -256,7 +275,7 @@ function ChatDrawer(props: { key={'nav-chat-' + item.conversationId} item={item} showSymbols={showSymbols} - bottomBarBasis={showSymbols ? filteredChatsBarBasis : 0} + bottomBarBasis={filteredChatsBarBasis} onConversationActivate={handleConversationActivate} onConversationBranch={onConversationBranch} onConversationDelete={handleConversationDeleteNoConfirmation} diff --git a/src/apps/chat/components/ChatDrawerItem.tsx b/src/apps/chat/components/ChatDrawerItem.tsx index 7e34e74ba..fed881895 100644 --- a/src/apps/chat/components/ChatDrawerItem.tsx +++ b/src/apps/chat/components/ChatDrawerItem.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import { Avatar, Box, IconButton, ListItem, ListItemButton, ListItemDecorator, Sheet, styled, Tooltip, Typography } from '@mui/joy'; import AutoFixHighIcon from '@mui/icons-material/AutoFixHigh'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import DeleteForeverIcon from '@mui/icons-material/DeleteForever'; import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline'; import EditIcon from '@mui/icons-material/Edit'; @@ -168,8 +168,7 @@ function ChatDrawerItem(props: { const textSymbol = SystemPurposes[systemPurposeId]?.symbol || '❓'; - const progress = props.bottomBarBasis ? 100 * (searchFrequency ?? messageCount) / props.bottomBarBasis : 0; - + const progress = props.bottomBarBasis ? 100 * (searchFrequency || messageCount) / props.bottomBarBasis : 0; const titleRowComponent = React.useMemo(() => <> @@ -234,7 +233,7 @@ function ChatDrawerItem(props: { const progressBarFixedComponent = React.useMemo(() => progress > 0 && ( ), [progress]); @@ -312,17 +311,17 @@ function ChatDrawerItem(props: { - - - - - - + + + + + + } } @@ -342,7 +341,7 @@ function ChatDrawerItem(props: { - {deleteArmed ? : } + {deleteArmed ? : } {/*}*/} diff --git a/src/apps/chat/components/ChatMessageList.tsx b/src/apps/chat/components/ChatMessageList.tsx index bbbafae99..f32dbfcfb 100644 --- a/src/apps/chat/components/ChatMessageList.tsx +++ b/src/apps/chat/components/ChatMessageList.tsx @@ -6,14 +6,18 @@ import { SxProps } from '@mui/joy/styles/types'; import type { DiagramConfig } from '~/modules/aifn/digrams/DiagramsModal'; +import type { ConversationHandler } from '~/common/chats/ConversationHandler'; import { InlineError } from '~/common/components/InlineError'; import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout'; import { ShortcutKeyName, useGlobalShortcut } from '~/common/components/useGlobalShortcut'; import { createDMessage, DConversationId, DMessage, getConversation, useChatStore } from '~/common/state/store-chats'; +import { useBrowserTranslationWarning } from '~/common/components/useIsBrowserTranslating'; import { useCapabilityElevenLabs } from '~/common/components/useCapabilities'; +import { useEphemerals } from '~/common/chats/EphemeralsStore'; import { ChatMessage, ChatMessageMemo } from './message/ChatMessage'; import { CleanerMessage, MessagesSelectionHeader } from './message/CleanerMessage'; +import { Ephemerals } from './Ephemerals'; import { PersonaSelector } from './persona-selector/PersonaSelector'; import { useChatShowSystemMessages } from '../store-app-chat'; import { useScrollToBottom } from './scroll-to-bottom/useScrollToBottom'; @@ -24,12 +28,13 @@ import { useScrollToBottom } from './scroll-to-bottom/useScrollToBottom'; */ export function ChatMessageList(props: { conversationId: DConversationId | null, + conversationHandler: ConversationHandler | null, capabilityHasT2I: boolean, chatLLMContextTokens: number | null, + fitScreen: boolean, isMessageSelectionMode: boolean, - isMobile: boolean, onConversationBranch: (conversationId: DConversationId, messageId: string) => void, - onConversationExecuteHistory: (conversationId: DConversationId, history: DMessage[], chatEffectBestOf: boolean) => Promise, + onConversationExecuteHistory: (conversationId: DConversationId, history: DMessage[], chatEffectBeam: boolean) => Promise, onTextDiagram: (diagramConfig: DiagramConfig | null) => void, onTextImagine: (conversationId: DConversationId, selectedText: string) => Promise, onTextSpeak: (selectedText: string) => Promise, @@ -46,6 +51,7 @@ export function ChatMessageList(props: { const { notifyBooting } = useScrollToBottom(); const { openPreferencesTab } = useOptimaLayout(); const [showSystemMessages] = useChatShowSystemMessages(); + const optionalTranslationWarning = useBrowserTranslationWarning(); const { conversationMessages, historyTokenCount, editMessage, deleteMessage, setMessages } = useChatStore(state => { const conversation = state.conversations.find(conversation => conversation.id === props.conversationId); return { @@ -56,6 +62,7 @@ export function ChatMessageList(props: { setMessages: state.setMessages, }; }, shallow); + const ephemerals = useEphemerals(props.conversationHandler); const { mayWork: isSpeakable } = useCapabilityElevenLabs(); // derived state @@ -75,11 +82,11 @@ export function ChatMessageList(props: { conversationId && onConversationBranch(conversationId, messageId); }, [conversationId, onConversationBranch]); - const handleConversationRestartFrom = React.useCallback(async (messageId: string, offset: number, chatEffectBestOf: boolean) => { + const handleConversationRestartFrom = React.useCallback(async (messageId: string, offset: number, chatEffectBeam: boolean) => { const messages = getConversation(conversationId)?.messages; if (messages) { const truncatedHistory = messages.slice(0, messages.findIndex(m => m.id === messageId) + offset + 1); - conversationId && await onConversationExecuteHistory(conversationId, truncatedHistory, chatEffectBestOf); + conversationId && await onConversationExecuteHistory(conversationId, truncatedHistory, chatEffectBeam); } }, [conversationId, onConversationExecuteHistory]); @@ -196,6 +203,8 @@ export function ChatMessageList(props: { // marginBottom: '-1px', }}> + {optionalTranslationWarning} + {props.isMessageSelectionMode && ( 0} @@ -226,9 +235,9 @@ export function ChatMessageList(props: { key={'msg-' + message.id} message={message} diffPreviousText={message === diffTargetMessage ? diffPrevText : undefined} + fitScreen={props.fitScreen} isBottom={idx === count - 1} isImagining={isImagining} - isMobile={props.isMobile} isSpeaking={isSpeaking} onConversationBranch={handleConversationBranch} onConversationRestartFrom={handleConversationRestartFrom} @@ -244,6 +253,18 @@ export function ChatMessageList(props: { }, )} + {!!ephemerals.length && ( + + )} + ); } \ No newline at end of file diff --git a/src/apps/chat/components/ChatPageMenuItems.tsx b/src/apps/chat/components/ChatPageMenuItems.tsx index e7413f08c..87d24da18 100644 --- a/src/apps/chat/components/ChatPageMenuItems.tsx +++ b/src/apps/chat/components/ChatPageMenuItems.tsx @@ -127,11 +127,9 @@ export function ChatPageMenuItems(props: { - + {props.isMessageSelectionMode ? : } - - Cleanup ... - + Cleanup ... diff --git a/src/apps/chat/components/Ephemerals.tsx b/src/apps/chat/components/Ephemerals.tsx index d1a324856..d7c2ca563 100644 --- a/src/apps/chat/components/Ephemerals.tsx +++ b/src/apps/chat/components/Ephemerals.tsx @@ -1,11 +1,12 @@ import * as React from 'react'; -import { shallow } from 'zustand/shallow'; import { Box, Grid, IconButton, Sheet, styled, Typography } from '@mui/joy'; import { SxProps } from '@mui/joy/styles/types'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; -import { DConversationId, DEphemeral, useChatStore } from '~/common/state/store-chats'; +import { ConversationManager } from '~/common/chats/ConversationHandler'; +import { DConversationId } from '~/common/state/store-chats'; +import { DEphemeral } from '~/common/chats/EphemeralsStore'; import { lineHeightChatTextMd } from '~/common/app.theme'; @@ -75,6 +76,11 @@ function StateRenderer(props: { state: object }) { function EphemeralItem({ conversationId, ephemeral }: { conversationId: string, ephemeral: DEphemeral }) { + + const handleDelete = React.useCallback(() => { + ConversationManager.getHandler(conversationId).ephemeralsStore.delete(ephemeral.id); + }, [conversationId, ephemeral.id]); + return useChatStore.getState().deleteEphemeral(conversationId, ephemeral.id)} + onClick={handleDelete} sx={{ position: 'absolute', top: 8, right: 8, opacity: { xs: 1, sm: 0.5 }, transition: 'opacity 0.3s', }}> - + ; @@ -130,19 +136,22 @@ function EphemeralItem({ conversationId, ephemeral }: { conversationId: string, // `); -export function Ephemerals(props: { conversationId: DConversationId | null, sx?: SxProps }) { +export function Ephemerals(props: { ephemerals: DEphemeral[], conversationId: DConversationId | null, sx?: SxProps }) { // global state - const ephemerals = useChatStore(state => { - const conversation = state.conversations.find(conversation => conversation.id === props.conversationId); - return conversation ? conversation.ephemerals : []; - }, shallow); + // const ephemerals = useChatStore(state => { + // const conversation = state.conversations.find(conversation => conversation.id === props.conversationId); + // return conversation ? conversation.ephemerals : []; + // }, shallow); - if (!ephemerals?.length) return null; + const ephemerals = props.ephemerals; + // if (!ephemerals?.length) return null; return ( + + {/* Issues */} + {!!config.configError && ( + + {config.configError} + + )} + + {/* Models, [x] all same, */} + + + {allChatLlmComponent} + + + {!!lastMessage && ( + + {lastMessage.text} + + // + )} + + + {/* Grid */} + + + b + + + a + + + a + + + a + + + + {/* Auto-Gatherer: All-in-one, Best-Of */} + + Gatherer + + + + + + {/*{JSON.stringify(config, null, 2)}*/} + + + + + + + + + a + + + + + ); +} \ No newline at end of file diff --git a/src/apps/chat/components/composer/ChatModeMenu.tsx b/src/apps/chat/components/composer/ChatModeMenu.tsx index dae63f7db..b1d21fdb5 100644 --- a/src/apps/chat/components/composer/ChatModeMenu.tsx +++ b/src/apps/chat/components/composer/ChatModeMenu.tsx @@ -7,6 +7,7 @@ import { KeyStroke } from '~/common/components/KeyStroke'; import { useUIPreferencesStore } from '~/common/state/store-ui'; import { ChatModeId } from '../../AppChat'; +import { useUXLabsStore } from '~/common/state/store-ux-labs'; interface ChatModeDescription { @@ -31,7 +32,7 @@ const ChatModeItems: { [key in ChatModeId]: ChatModeDescription } = { description: 'AI Image Generation', requiresTTI: true, }, - 'generate-best-of': { + 'generate-text-beam': { label: 'Best-Of', // Best of, Auto-Prime, Top Pick, Select Best description: 'Smarter: best of multiple replies', }, @@ -55,6 +56,7 @@ export function ChatModeMenu(props: { }) { // external state + const labsChatBeam = useUXLabsStore(state => state.labsChatBeam); const enterIsNewline = useUIPreferencesStore(state => state.enterIsNewline); return ( @@ -72,6 +74,7 @@ export function ChatModeMenu(props: { {/* ChatMode items */} {Object.entries(ChatModeItems) + .filter(([key, data]) => key !== 'generate-text-beam' || labsChatBeam) .map(([key, data]) => props.onSetChatModeId(key as ChatModeId)}> diff --git a/src/apps/chat/components/composer/Composer.tsx b/src/apps/chat/components/composer/Composer.tsx index ad11e546a..063189c0a 100644 --- a/src/apps/chat/components/composer/Composer.tsx +++ b/src/apps/chat/components/composer/Composer.tsx @@ -23,7 +23,7 @@ import type { DLLM } from '~/modules/llms/store-llms'; import type { LLMOptionsOpenAI } from '~/modules/llms/vendors/openai/openai.vendor'; import { useBrowseCapability } from '~/modules/browse/store-module-browsing'; -import { ChatBestOfIcon } from '~/common/components/icons/ChatBestOfIcon'; +import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon'; import { DConversationId, useChatStore } from '~/common/state/store-chats'; import { PreferencesTab, useOptimaLayout } from '~/common/layout/optima/useOptimaLayout'; import { SpeechResult, useSpeechRecognition } from '~/common/components/useSpeechRecognition'; @@ -464,8 +464,8 @@ export function Composer(props: { const isText = chatModeId === 'generate-text'; + const isTextBeam = chatModeId === 'generate-text-beam'; const isAppend = chatModeId === 'append-user'; - const isBestOf = chatModeId === 'generate-best-of'; const isReAct = chatModeId === 'generate-react'; const isDraw = chatModeId === 'generate-image'; @@ -474,14 +474,14 @@ export function Composer(props: { const buttonColor: ColorPaletteProp = assistantAbortible ? 'warning' : isReAct ? 'success' - : isBestOf ? 'success' + : isTextBeam ? 'success' : isDraw ? 'warning' : 'primary'; const buttonText = isAppend ? 'Write' : isReAct ? 'ReAct' - : isBestOf ? 'Best-Of' + : isTextBeam ? 'Best-Of' : isDraw ? 'Draw' : 'Chat'; @@ -489,14 +489,14 @@ export function Composer(props: { micContinuation ? : isAppend ? : isReAct ? - : isBestOf ? /* */ + : isTextBeam ? /* */ : isDraw ? : ; let textPlaceholder: string = isDraw ? 'Describe an idea or a drawing...' : isReAct ? 'Multi-step reasoning question...' - : isBestOf ? 'Multi-chat with this persona...' + : isTextBeam ? 'Multi-chat with this persona...' : props.isDeveloperMode ? 'Chat with me' + (isDesktop ? ' · drop source' : '') + ' · attach code...' : props.capabilityHasT2I ? 'Chat · /react · /draw · drop files...' : 'Chat · /react · drop files...'; diff --git a/src/apps/chat/components/composer/actile/ActilePopup.tsx b/src/apps/chat/components/composer/actile/ActilePopup.tsx index b68a14005..c871d673a 100644 --- a/src/apps/chat/components/composer/actile/ActilePopup.tsx +++ b/src/apps/chat/components/composer/actile/ActilePopup.tsx @@ -64,7 +64,7 @@ export function ActilePopup(props: { - {labelBold}{labelNormal} + {labelBold}{labelNormal} {item.argument && {item.argument} diff --git a/src/apps/chat/components/composer/attachments/AttachmentItem.tsx b/src/apps/chat/components/composer/attachments/AttachmentItem.tsx index cd3779e9a..cd54ff8dd 100644 --- a/src/apps/chat/components/composer/attachments/AttachmentItem.tsx +++ b/src/apps/chat/components/composer/attachments/AttachmentItem.tsx @@ -184,7 +184,6 @@ export function AttachmentItem(props: { border: variant === 'soft' ? '1px solid' : undefined, borderColor: variant === 'soft' ? `${color}.solidBg` : undefined, borderRadius: 'sm', - fontWeight: 'normal', ...ATTACHMENT_MIN_STYLE, px: 1, py: 0.5, display: 'flex', flexDirection: 'row', gap: 1, diff --git a/src/apps/chat/components/folders/AddFolderButton.tsx b/src/apps/chat/components/folders/AddFolderButton.tsx index 1bd0d2046..bcdf2ec36 100644 --- a/src/apps/chat/components/folders/AddFolderButton.tsx +++ b/src/apps/chat/components/folders/AddFolderButton.tsx @@ -43,7 +43,7 @@ export function AddFolderButton() { sx={{ ml: -1.5, mr: -0.5, flexGrow: 1, minWidth: 100 }} /> {/**/} - {/* */} + {/* */} {/**/} ) : ( @@ -51,8 +51,9 @@ export function AddFolderButton() { diff --git a/src/apps/chat/components/folders/FolderListItem.tsx b/src/apps/chat/components/folders/FolderListItem.tsx index fedc2e412..c88591e75 100644 --- a/src/apps/chat/components/folders/FolderListItem.tsx +++ b/src/apps/chat/components/folders/FolderListItem.tsx @@ -1,8 +1,8 @@ import React, { useState } from 'react'; import type { DraggableProvided, DraggableStateSnapshot, DraggingStyle, NotDraggingStyle } from 'react-beautiful-dnd'; -import { FormLabel, IconButton, ListItem, ListItemButton, ListItemContent, ListItemDecorator, MenuItem, Radio, radioClasses, RadioGroup, Sheet, Typography } from '@mui/joy'; -import CloseIcon from '@mui/icons-material/Close'; +import { FormLabel, IconButton, ListItem, ListItemButton, ListItemContent, ListItemDecorator, MenuItem, Radio, radioClasses, RadioGroup, Sheet } from '@mui/joy'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline'; import Done from '@mui/icons-material/Done'; import EditIcon from '@mui/icons-material/Edit'; @@ -193,7 +193,7 @@ export function FolderListItem(props: { onClick={handleMenuOpen} sx={{ visibility: 'hidden', - my: '-0.25rem' /* absorb the button padding */ + my: '-0.25rem', /* absorb the button padding */ }} > @@ -230,7 +230,7 @@ export function FolderListItem(props: { <> - + Cancel @@ -257,7 +257,7 @@ export function FolderListItem(props: { sx={{ mb: 1.5, fontSize: 'xs', - fontWeight: 'xl', + fontWeight: 'xl', /* 700: this COLOR labels stands out positively */ letterSpacing: '0.1em', textTransform: 'uppercase', }} diff --git a/src/apps/chat/components/message/ChatMessage.tsx b/src/apps/chat/components/message/ChatMessage.tsx index 69bc48104..2657bc40a 100644 --- a/src/apps/chat/components/message/ChatMessage.tsx +++ b/src/apps/chat/components/message/ChatMessage.tsx @@ -1,6 +1,7 @@ import * as React from 'react'; import { shallow } from 'zustand/shallow'; +import type { SxProps } from '@mui/joy/styles/types'; import { Avatar, Box, CircularProgress, IconButton, ListDivider, ListItem, ListItemDecorator, MenuItem, Switch, Tooltip, Typography } from '@mui/joy'; import AccountTreeIcon from '@mui/icons-material/AccountTree'; import ClearIcon from '@mui/icons-material/Clear'; @@ -23,7 +24,7 @@ import { SystemPurposeId, SystemPurposes } from '../../../../data'; import { BlocksRenderer, editBlocksSx } from '~/modules/blocks/BlocksRenderer'; import { useSanityTextDiffs } from '~/modules/blocks/RenderTextDiff'; -import { ChatBestOfIcon } from '~/common/components/icons/ChatBestOfIcon'; +import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon'; import { CloseableMenu } from '~/common/components/CloseableMenu'; import { DMessage } from '~/common/state/store-chats'; import { InlineTextarea } from '~/common/components/InlineTextarea'; @@ -33,6 +34,7 @@ import { copyToClipboard } from '~/common/util/clipboardUtils'; import { cssRainbowColorKeyframes, themeScalingMap } from '~/common/app.theme'; import { prettyBaseModel } from '~/common/util/modelUtils'; import { useUIPreferencesStore } from '~/common/state/store-ui'; +import { useUXLabsStore } from '~/common/state/store-ux-labs'; import { useChatShowTextDiff } from '../../store-app-chat'; @@ -51,7 +53,7 @@ export function messageBackground(messageRole: DMessage['role'] | string, wasEdi case 'assistant': return unknownAssistantIssue ? 'danger.softBg' : 'background.surface'; case 'system': - return wasEdited ? 'warning.softHoverBg' : 'background.surface'; + return wasEdited ? 'warning.softHoverBg' : 'neutral.softBg'; default: return '#ff0000'; } @@ -183,19 +185,20 @@ export const ChatMessageMemo = React.memo(ChatMessage); export function ChatMessage(props: { message: DMessage, diffPreviousText?: string, + fitScreen: boolean, isBottom?: boolean, - isMobile: boolean, isImagining?: boolean, isSpeaking?: boolean, blocksShowDate?: boolean, onConversationBranch?: (messageId: string) => void, - onConversationRestartFrom?: (messageId: string, offset: number, chatEffectBestOf: boolean) => Promise, + onConversationRestartFrom?: (messageId: string, offset: number, chatEffectBeam: boolean) => Promise, onConversationTruncate?: (messageId: string) => void, onMessageDelete?: (messageId: string) => void, onMessageEdit?: (messageId: string, text: string) => void, onTextDiagram?: (messageId: string, text: string) => Promise onTextImagine?: (text: string) => Promise onTextSpeak?: (text: string) => Promise + sx?: SxProps, }) { // state @@ -206,6 +209,7 @@ export function ChatMessage(props: { const [isEditing, setIsEditing] = React.useState(false); // external state + const labsChatBeam = useUXLabsStore(state => state.labsChatBeam); const { cleanerLooks, contentScaling, doubleClickToEdit, renderMarkdown } = useUIPreferencesStore(state => ({ cleanerLooks: state.zenMode === 'cleaner', contentScaling: state.contentScaling, @@ -280,10 +284,10 @@ export function ChatMessage(props: { props.onConversationRestartFrom && await props.onConversationRestartFrom(messageId, fromAssistant ? -1 : 0, false); }; - const handleOpsConversationRestartBestOf = async (e: React.MouseEvent) => { + const handleOpsConversationRestartFromBeam = async (e: React.MouseEvent) => { e.stopPropagation(); closeOpsMenu(); - props.onConversationRestartFrom && await props.onConversationRestartFrom(messageId, fromAssistant ? -1 : 0, true); + props.onConversationRestartFrom && labsChatBeam && await props.onConversationRestartFrom(messageId, fromAssistant ? -1 : 0, true); }; const handleOpsToggleShowDiff = () => setShowDiff(!showDiff); @@ -412,6 +416,7 @@ export function ChatMessage(props: { borderBottomColor: 'divider', ...(ENABLE_COPY_MESSAGE_OVERLAY && { position: 'relative' }), '&:hover > button': { opacity: 1 }, + ...props.sx, }} > @@ -430,7 +435,7 @@ export function ChatMessage(props: { > {isHovering ? ( - + ) : ( @@ -468,8 +473,8 @@ export function ChatMessage(props: { fromRole={messageRole} contentScaling={contentScaling} errorMessage={errorMessage} + fitScreen={props.fitScreen} isBottom={props.isBottom} - isMobile={props.isMobile} renderTextAsMarkdown={renderMarkdown} renderTextDiff={textDiffs || undefined} showDate={props.blocksShowDate === true ? messageUpdated || messageCreated || undefined : undefined} @@ -504,6 +509,15 @@ export function ChatMessage(props: { open anchorEl={opsMenuAnchor} onClose={closeOpsMenu} sx={{ minWidth: 280 }} > + + {fromSystem && ( + + + System message + + + )} + {/* Edit / Copy */} {!!props.onMessageEdit && ( @@ -585,16 +599,18 @@ export function ChatMessage(props: { Retry } - - - {/**/} - - + {labsChatBeam && ( + + + {/**/} + + + )} )} diff --git a/src/apps/chat/components/persona-selector/PersonaSelector.tsx b/src/apps/chat/components/persona-selector/PersonaSelector.tsx index c9a577789..84adbde16 100644 --- a/src/apps/chat/components/persona-selector/PersonaSelector.tsx +++ b/src/apps/chat/components/persona-selector/PersonaSelector.tsx @@ -51,7 +51,7 @@ function Tile(props: { sx={{ aspectRatio: 1, height: `${tileSize}rem`, - fontWeight: 500, + fontWeight: 'md', ...((props.isEditMode || !props.isActive) ? { boxShadow: props.isHighlighted ? '0 2px 8px -2px rgb(var(--joy-palette-primary-mainChannel) / 50%)' : 'sm', backgroundColor: props.isHighlighted ? undefined : 'background.surface', @@ -59,6 +59,9 @@ function Tile(props: { backgroundImage: `linear-gradient(rgba(255 255 255 /0.85), rgba(255 255 255 /1)), url(${props.imageUrl})`, backgroundPosition: 'center', backgroundSize: 'cover', + '&:hover': { + backgroundImage: 'none', + }, }), } : {}), flexDirection: 'column', gap: 1, @@ -126,6 +129,8 @@ export function PersonaSelector(props: { conversationId: DConversationId, runExa // derived state + const isCustomPurpose = systemPurposeId === 'Custom'; + const { selectedPurpose, fourExamples } = React.useMemo(() => { const selectedPurpose: SystemPurposeData | null = systemPurposeId ? (SystemPurposes[systemPurposeId] ?? null) : null; // const selectedExample = selectedPurpose?.examples?.length @@ -306,13 +311,15 @@ export function PersonaSelector(props: { conversationId: DConversationId, runExa : selectedPurpose?.description || 'No description available'} {/* Examples Toggle */} + {/**/} {fourExamples && showExamplescomponent} - {showPromptComponent} + {!isCustomPurpose && showPromptComponent} + {/**/} {/* [row -3] Example incipits */} {systemPurposeId !== 'Custom' && ( - + {showExamples && ( )} - {showPrompt && ( + {(!isCustomPurpose && showPrompt) && ( diff --git a/src/apps/chat/components/persona-selector/store-purposes.ts b/src/apps/chat/components/persona-selector/store-purposes.ts index d36e12c03..271c67ca4 100644 --- a/src/apps/chat/components/persona-selector/store-purposes.ts +++ b/src/apps/chat/components/persona-selector/store-purposes.ts @@ -18,7 +18,7 @@ export const usePurposeStore = create()( (set) => ({ // default state - hiddenPurposeIDs: ['Designer'], + hiddenPurposeIDs: ['Developer', 'Designer'], toggleHiddenPurposeId: (purposeId: string) => { set(state => { @@ -34,5 +34,18 @@ export const usePurposeStore = create()( }), { name: 'app-purpose', + + /* versioning: + * 1: hide 'Developer' as 'DeveloperPreview' is best + */ + version: 1, + + migrate: (state: any, fromVersion: number): PurposeStore => { + // 0 -> 1: rename 'enterToSend' to 'enterIsNewline' (flip the meaning) + if (state && fromVersion === 0) + if (!state.hiddenPurposeIDs.includes('Developer')) + state.hiddenPurposeIDs.push('Developer'); + return state; + }, }), ); \ No newline at end of file diff --git a/src/apps/chat/components/useChatNavRenderItems.tsx b/src/apps/chat/components/useChatNavRenderItems.tsx index edb2f798d..87d0d9225 100644 --- a/src/apps/chat/components/useChatNavRenderItems.tsx +++ b/src/apps/chat/components/useChatNavRenderItems.tsx @@ -7,7 +7,6 @@ import type { ChatNavigationItemData } from './ChatDrawerItem'; // configuration -const AUTO_UNDERLINE_COUNT = 40; const SEARCH_MIN_CHARS = 3; @@ -79,6 +78,7 @@ export function useChatNavRenderItems( activeFolder: DFolder | null, allFolders: DFolder[], grouping: ChatNavGrouping, + showRelativeSize: boolean, ): { renderNavItems: ChatRenderItemData[], filteredChatIDs: DConversationId[], @@ -185,7 +185,7 @@ export function useChatNavRenderItems( const filteredChatIDs = chatNavItems.map(_c => _c.conversationId); const filteredChatsCount = chatNavItems.length; const filteredChatsAreEmpty = !filteredChatsCount || (filteredChatsCount === 1 && chatNavItems[0].isEmpty); - const filteredChatsBarBasis = (filteredChatsCount >= AUTO_UNDERLINE_COUNT || isSearching) + const filteredChatsBarBasis = ((showRelativeSize && filteredChatsCount >= 2) || isSearching) ? chatNavItems.reduce((longest, _c) => Math.max(longest, isSearching ? _c.searchFrequency : _c.messageCount), 1) : 0; @@ -202,7 +202,9 @@ export function useChatNavRenderItems( // we only compare the renderNavItems array, which shall be changed if the rest changes return a.renderNavItems.length === b.renderNavItems.length && a.renderNavItems.every((_a, i) => shallow(_a, b.renderNavItems[i])) - && shallow(a.filteredChatIDs, b.filteredChatIDs); + && shallow(a.filteredChatIDs, b.filteredChatIDs) + // we also compare this, as it changes with a parameter + && a.filteredChatsBarBasis === b.filteredChatsBarBasis; }, ); } \ No newline at end of file diff --git a/src/apps/chat/components/usePersonaDropdown.tsx b/src/apps/chat/components/usePersonaDropdown.tsx index b3394900d..dfee583a5 100644 --- a/src/apps/chat/components/usePersonaDropdown.tsx +++ b/src/apps/chat/components/usePersonaDropdown.tsx @@ -7,6 +7,8 @@ import { DConversationId, useChatStore } from '~/common/state/store-chats'; import { PageBarDropdownMemo } from '~/common/layout/optima/components/PageBarDropdown'; import { useUIPreferencesStore } from '~/common/state/store-ui'; +import { usePurposeStore } from './persona-selector/store-purposes'; + function PersonaDropdown(props: { systemPurposeId: SystemPurposeId | null, @@ -14,11 +16,23 @@ function PersonaDropdown(props: { }) { // external state + const hiddenPurposeIDs = usePurposeStore(state => state.hiddenPurposeIDs); const { zenMode } = useUIPreferencesStore(state => ({ zenMode: state.zenMode, }), shallow); + // filter by key in the object - must be missing the system purpose ids hidden by the user, or be the currently active one + const visibleSystemPurposes = React.useMemo(() => { + return Object.keys(SystemPurposes) + .filter(key => !hiddenPurposeIDs.includes(key as SystemPurposeId) || key === props.systemPurposeId) + .reduce((obj, key) => { + obj[key as SystemPurposeId] = SystemPurposes[key as SystemPurposeId]; + return obj; + }, {} as typeof SystemPurposes); + }, [hiddenPurposeIDs, props.systemPurposeId]); + + const { setSystemPurposeId } = props; const handleSystemPurposeChange = React.useCallback((value: string | null) => { @@ -28,7 +42,7 @@ function PersonaDropdown(props: { return ( { + const cHandler = ConversationManager.getHandler(conversationId); -export const runBrowseUpdatingState = async (conversationId: string, url: string) => { - - const { editMessage } = useChatStore.getState(); - - // create a blank and 'typing' message for the assistant - to be filled when we're done - // const assistantModelStr = 'react-' + assistantModelId.slice(4, 7); // HACK: this is used to change the Avatar animation // noinspection HttpUrlsUsage const shortUrl = url.replace('https://www.', '').replace('https://', '').replace('http://', '').replace('www.', ''); - const assistantMessageId = createAssistantTypingMessage(conversationId, 'web', undefined, `Loading page at ${shortUrl}...`); - const updateAssistantMessage = (update: Partial) => editMessage(conversationId, assistantMessageId, update, false); + const assistantMessageId = cHandler.messageAppendAssistant(`Loading page at ${shortUrl}...`, 'web', undefined); try { - const page = await callBrowseFetchPage(url); - if (!page.content) { - // noinspection ExceptionCaughtLocallyJS - throw new Error('No text found.'); - } - updateAssistantMessage({ - text: page.content, - typing: false, - }); - + cHandler.messageEdit(assistantMessageId, { text: page.content || 'Issue: page load did not produce an answer: no text found', typing: false }, true); } catch (error: any) { console.error(error); - updateAssistantMessage({ - text: 'Issue: browse did not produce an answer (error: ' + (error?.message || error?.toString() || 'unknown') + ').', - typing: false, - }); + cHandler.messageEdit(assistantMessageId, { text: 'Issue: browse did not produce an answer (error: ' + (error?.message || error?.toString() || 'unknown') + ').', typing: false }, true); } }; \ No newline at end of file diff --git a/src/apps/chat/editors/chat-stream.ts b/src/apps/chat/editors/chat-stream.ts index 3501f4647..689597bea 100644 --- a/src/apps/chat/editors/chat-stream.ts +++ b/src/apps/chat/editors/chat-stream.ts @@ -1,34 +1,35 @@ -import { DLLMId } from '~/modules/llms/store-llms'; +import type { DLLMId } from '~/modules/llms/store-llms'; +import type { StreamingClientUpdate } from '~/modules/llms/vendors/unifiedStreamingClient'; import { SystemPurposeId } from '../../../data'; import { autoSuggestions } from '~/modules/aifn/autosuggestions/autoSuggestions'; import { conversationAutoTitle } from '~/modules/aifn/autotitle/autoTitle'; import { llmStreamingChatGenerate } from '~/modules/llms/llm.client'; import { speakText } from '~/modules/elevenlabs/elevenlabs.client'; -import { DMessage, useChatStore } from '~/common/state/store-chats'; +import type { DMessage } from '~/common/state/store-chats'; +import { ConversationManager } from '~/common/chats/ConversationHandler'; import { ChatAutoSpeakType, getChatAutoAI } from '../store-app-chat'; -import { createAssistantTypingMessage, updatePurposeInHistory } from './editors'; /** * The main "chat" function. TODO: this is here so we can soon move it to the data model. */ export async function runAssistantUpdatingState(conversationId: string, history: DMessage[], assistantLlmId: DLLMId, systemPurpose: SystemPurposeId, parallelViewCount: number) { + const cHandler = ConversationManager.getHandler(conversationId); // ai follow-up operations (fire/forget) const { autoSpeak, autoSuggestDiagrams, autoSuggestQuestions, autoTitleChat } = getChatAutoAI(); // update the system message from the active Purpose, if not manually edited - history = updatePurposeInHistory(conversationId, history, assistantLlmId, systemPurpose); + history = cHandler.resyncPurposeInHistory(history, assistantLlmId, systemPurpose); // create a blank and 'typing' message for the assistant - const assistantMessageId = createAssistantTypingMessage(conversationId, assistantLlmId, history[0].purposeId, '...'); + const assistantMessageId = cHandler.messageAppendAssistant('...', assistantLlmId, history[0].purposeId); // when an abort controller is set, the UI switches to the "stop" mode - const controller = new AbortController(); - const { startTyping, editMessage } = useChatStore.getState(); - startTyping(conversationId, controller); + const abortController = new AbortController(); + cHandler.setAbortController(abortController); // stream the assistant's messages await streamAssistantMessage( @@ -36,12 +37,12 @@ export async function runAssistantUpdatingState(conversationId: string, history: history, parallelViewCount, autoSpeak, - (updatedMessage) => editMessage(conversationId, assistantMessageId, updatedMessage, false), - controller.signal, + (update) => cHandler.messageEdit(assistantMessageId, update, false), + abortController.signal, ); // clear to send, again - startTyping(conversationId, null); + cHandler.setAbortController(null); if (autoTitleChat) { // fire/forget, this will only set the title if it's not already set @@ -58,7 +59,7 @@ async function streamAssistantMessage( history: DMessage[], throttleUnits: number, // 0: disable, 1: default throttle (12Hz), 2+ reduce the message frequency with the square root autoSpeak: ChatAutoSpeakType, - editMessage: (updatedMessage: Partial) => void, + editMessage: (update: Partial) => void, abortSignal: AbortSignal, ) { @@ -67,7 +68,6 @@ async function streamAssistantMessage( const messages = history.map(({ role, text }) => ({ role, content: text })); - const incrementalAnswer: Partial = { text: '' }; // Throttling setup let lastCallTime = 0; @@ -83,33 +83,34 @@ async function streamAssistantMessage( } } + const incrementalAnswer: Partial = { text: '' }; + try { - await llmStreamingChatGenerate(llmId, messages, null, null, abortSignal, - ({ originLLM, textSoFar, typing }) => { - - // grow the incremental message - if (originLLM) incrementalAnswer.originLLM = originLLM; - if (textSoFar) incrementalAnswer.text = textSoFar; - if (typing !== undefined) incrementalAnswer.typing = typing; - - // Update the data store, with optional max-frequency throttling (e.g. OpenAI is downsamped 50 -> 12Hz) - // This can be toggled from the settings - throttledEditMessage(incrementalAnswer); - - // 📢 TTS: first-line - if (textSoFar && autoSpeak === 'firstLine' && !spokenLine) { - let cutPoint = textSoFar.lastIndexOf('\n'); - if (cutPoint < 0) - cutPoint = textSoFar.lastIndexOf('. '); - if (cutPoint > 100 && cutPoint < 400) { - spokenLine = true; - const firstParagraph = textSoFar.substring(0, cutPoint); - // fire/forget: we don't want to stall this loop - void speakText(firstParagraph); - } + await llmStreamingChatGenerate(llmId, messages, null, null, abortSignal, (update: StreamingClientUpdate) => { + const textSoFar = update.textSoFar; + + // grow the incremental message + if (update.originLLM) incrementalAnswer.originLLM = update.originLLM; + if (textSoFar) incrementalAnswer.text = textSoFar; + if (update.typing !== undefined) incrementalAnswer.typing = update.typing; + + // Update the data store, with optional max-frequency throttling (e.g. OpenAI is downsamped 50 -> 12Hz) + // This can be toggled from the settings + throttledEditMessage(incrementalAnswer); + + // 📢 TTS: first-line + if (textSoFar && autoSpeak === 'firstLine' && !spokenLine) { + let cutPoint = textSoFar.lastIndexOf('\n'); + if (cutPoint < 0) + cutPoint = textSoFar.lastIndexOf('. '); + if (cutPoint > 100 && cutPoint < 400) { + spokenLine = true; + const firstParagraph = textSoFar.substring(0, cutPoint); + // fire/forget: we don't want to stall this loop + void speakText(firstParagraph); } - }, - ); + } + }); } catch (error: any) { if (error?.name !== 'AbortError') { console.error('Fetch request error:', error); diff --git a/src/apps/chat/editors/editors.ts b/src/apps/chat/editors/editors.ts deleted file mode 100644 index d991bae30..000000000 --- a/src/apps/chat/editors/editors.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { DLLMId, getKnowledgeMapCutoff } from '~/modules/llms/store-llms'; -import { SystemPurposeId, SystemPurposes } from '../../../data'; - -import { bareBonesPromptMixer } from '~/modules/persona/pmix/pmix'; - -import { createDMessage, DMessage, useChatStore } from '~/common/state/store-chats'; - - -export function createAssistantTypingMessage(conversationId: string, assistantLlmLabel: DLLMId | string /* 'DALL·E' | 'Prodia' | 'react-...' | 'web' */, assistantPurposeId: SystemPurposeId | undefined, text: string): string { - const assistantMessage: DMessage = createDMessage('assistant', text); - assistantMessage.typing = true; - assistantMessage.purposeId = assistantPurposeId; - assistantMessage.originLLM = assistantLlmLabel; - useChatStore.getState().appendMessage(conversationId, assistantMessage); - return assistantMessage.id; -} - - -export function updatePurposeInHistory(conversationId: string, history: DMessage[], assistantLlmId: DLLMId, purposeId: SystemPurposeId): DMessage[] { - const systemMessageIndex = history.findIndex(m => m.role === 'system'); - const systemMessage: DMessage = systemMessageIndex >= 0 ? history.splice(systemMessageIndex, 1)[0] : createDMessage('system', ''); - if (!systemMessage.updated && purposeId && SystemPurposes[purposeId]?.systemMessage) { - systemMessage.purposeId = purposeId; - systemMessage.text = bareBonesPromptMixer(SystemPurposes[purposeId].systemMessage, assistantLlmId); - - // HACK: this is a special case for the "Custom" persona, to set the message in stone (so it doesn't get updated when switching to another persona) - if (purposeId === 'Custom') - systemMessage.updated = Date.now(); - } - history.unshift(systemMessage); - useChatStore.getState().setMessages(conversationId, history); - return history; -} \ No newline at end of file diff --git a/src/apps/chat/editors/image-generate.ts b/src/apps/chat/editors/image-generate.ts index 73e0498d4..33a2fec16 100644 --- a/src/apps/chat/editors/image-generate.ts +++ b/src/apps/chat/editors/image-generate.ts @@ -1,39 +1,42 @@ import { getActiveTextToImageProviderOrThrow, t2iGenerateImageOrThrow } from '~/modules/t2i/t2i.client'; -import { useChatStore } from '~/common/state/store-chats'; - -import { createAssistantTypingMessage } from './editors'; +import { ConversationManager } from '~/common/chats/ConversationHandler'; +import { TextToImageProvider } from '~/common/components/useCapabilities'; /** * Text to image, appended as an 'assistant' message */ export async function runImageGenerationUpdatingState(conversationId: string, imageText: string) { + const handler = ConversationManager.getHandler(conversationId); + + // Acquire the active TextToImageProvider + let t2iProvider: TextToImageProvider | undefined = undefined; + try { + t2iProvider = getActiveTextToImageProviderOrThrow(); + } catch (error: any) { + const assistantErrorMessageId = handler.messageAppendAssistant(`[Issue] Sorry, I can't generate images right now. ${error?.message || error?.toString() || 'Unknown error'}.`, 'issue', undefined); + handler.messageEdit(assistantErrorMessageId, { typing: false }, true); + return; + } // if the imageText ends with " xN" or " [N]" (where N is a number), then we'll generate N images const match = imageText.match(/\sx(\d+)$|\s\[(\d+)]$/); - const count = match ? parseInt(match[1] || match[2], 10) : 1; - if (count > 1) + const repeat = match ? parseInt(match[1] || match[2], 10) : 1; + if (repeat > 1) imageText = imageText.replace(/x(\d+)$|\[(\d+)]$/, '').trim(); // Remove the "xN" or "[N]" part from the imageText - // create a blank and 'typing' message for the assistant - const assistantMessageId = createAssistantTypingMessage(conversationId, '', undefined, - `Give me a few seconds while I draw ${imageText?.length > 20 ? 'that' : '"' + imageText + '"'}...`); - - // reference the state editing functions - const { editMessage } = useChatStore.getState(); + const assistantMessageId = handler.messageAppendAssistant( + `Give me ${t2iProvider.vendor === 'openai' ? 'a dozen' : 'a few'} seconds while I draw ${imageText?.length > 20 ? 'that' : '"' + imageText + '"'}...`, + '', undefined, + ); + handler.messageEdit(assistantMessageId, { originLLM: t2iProvider.painter }, false); try { - - const t2iProvider = getActiveTextToImageProviderOrThrow(); - editMessage(conversationId, assistantMessageId, { originLLM: t2iProvider.painter }, false); - - const imageUrls = await t2iGenerateImageOrThrow(t2iProvider, imageText, count); - editMessage(conversationId, assistantMessageId, { text: imageUrls.join('\n'), typing: false }, true); - + const imageUrls = await t2iGenerateImageOrThrow(t2iProvider, imageText, repeat); + handler.messageEdit(assistantMessageId, { text: imageUrls.join('\n'), typing: false }, true); } catch (error: any) { const errorMessage = error?.message || error?.toString() || 'Unknown error'; - if (assistantMessageId) - editMessage(conversationId, assistantMessageId, { text: `[Issue] Sorry, I couldn't create an image for you. ${errorMessage}`, typing: false }, false); + handler.messageEdit(assistantMessageId, { text: `[Issue] Sorry, I couldn't create an image for you. ${errorMessage}`, typing: false }, false); } } \ No newline at end of file diff --git a/src/apps/chat/editors/react-tangent.ts b/src/apps/chat/editors/react-tangent.ts index c7a8d789e..ef470832a 100644 --- a/src/apps/chat/editors/react-tangent.ts +++ b/src/apps/chat/editors/react-tangent.ts @@ -2,37 +2,31 @@ import { Agent } from '~/modules/aifn/react/react'; import { DLLMId } from '~/modules/llms/store-llms'; import { useBrowseStore } from '~/modules/browse/store-module-browsing'; -import { createDEphemeral, DMessage, useChatStore } from '~/common/state/store-chats'; +import { ConversationManager } from '~/common/chats/ConversationHandler'; -import { createAssistantTypingMessage } from './editors'; +const EPHEMERAL_DELETION_DELAY = 5 * 1000; /** * Synchronous ReAct chat function - TODO: event loop, auto-ui, cleanups, etc. */ export async function runReActUpdatingState(conversationId: string, question: string, assistantLlmId: DLLMId) { - - const { enableReactTool: enableBrowse } = useBrowseStore.getState(); - const { appendEphemeral, updateEphemeralText, updateEphemeralState, deleteEphemeral, editMessage } = useChatStore.getState(); + const cHandler = ConversationManager.getHandler(conversationId); // create a blank and 'typing' message for the assistant - to be filled when we're done const assistantModelLabel = 'react-' + assistantLlmId.slice(4, 7); // HACK: this is used to change the Avatar animation - const assistantMessageId = createAssistantTypingMessage(conversationId, assistantModelLabel, undefined, '...'); - const updateAssistantMessage = (update: Partial) => - editMessage(conversationId, assistantMessageId, update, false); - + const assistantMessageId = cHandler.messageAppendAssistant('...', assistantModelLabel, undefined); + const { enableReactTool: enableBrowse } = useBrowseStore.getState(); // create an ephemeral space - const ephemeral = createDEphemeral(`Reason+Act`, 'Initializing ReAct..'); - appendEphemeral(conversationId, ephemeral); - + const eHandler = cHandler.createEphemeral(`Reason+Act`, 'Initializing ReAct..'); let ephemeralText = ''; const logToEphemeral = (text: string) => { console.log(text); ephemeralText += (text.length > 300 ? text.slice(0, 300) + '...' : text) + '\n'; - updateEphemeralText(conversationId, ephemeral.id, ephemeralText); + eHandler.updateText(ephemeralText); }; - const showStateInEphemeral = (state: object) => updateEphemeralState(conversationId, ephemeral.id, state); + const showStateInEphemeral = (state: object) => eHandler.updateState(state); try { @@ -40,12 +34,12 @@ export async function runReActUpdatingState(conversationId: string, question: st const agent = new Agent(); const reactResult = await agent.reAct(question, assistantLlmId, 5, enableBrowse, logToEphemeral, showStateInEphemeral); - setTimeout(() => deleteEphemeral(conversationId, ephemeral.id), 4 * 1000); - updateAssistantMessage({ text: reactResult, typing: false }); + cHandler.messageEdit(assistantMessageId, { text: reactResult, typing: false }, false); + setTimeout(() => eHandler.delete(), EPHEMERAL_DELETION_DELAY); } catch (error: any) { console.error(error); logToEphemeral(ephemeralText + `\nIssue: ${error || 'unknown'}`); - updateAssistantMessage({ text: 'Issue: ReAct did not produce an answer.', typing: false }); + cHandler.messageEdit(assistantMessageId, { text: 'Issue: ReAct did not produce an answer.', typing: false }, false); } } \ No newline at end of file diff --git a/src/apps/chat/store-app-chat.ts b/src/apps/chat/store-app-chat.ts index 3d85309ee..4ea350a54 100644 --- a/src/apps/chat/store-app-chat.ts +++ b/src/apps/chat/store-app-chat.ts @@ -10,6 +10,8 @@ export type ChatAutoSpeakType = 'off' | 'firstLine' | 'all'; interface AppChatStore { + // chat AI + autoSpeak: ChatAutoSpeakType; setAutoSpeak: (autoSpeak: ChatAutoSpeakType) => void; @@ -22,9 +24,14 @@ interface AppChatStore { autoTitleChat: boolean; setAutoTitleChat: (autoTitleChat: boolean) => void; + // chat UI + micTimeoutMs: number; setMicTimeoutMs: (micTimeoutMs: number) => void; + showRelativeSize: boolean; + setShowRelativeSize: (showRelativeSize: boolean) => void; + showTextDiff: boolean; setShowTextDiff: (showTextDiff: boolean) => void; @@ -52,6 +59,9 @@ const useAppChatStore = create()(persist( micTimeoutMs: 2000, setMicTimeoutMs: (micTimeoutMs: number) => _set({ micTimeoutMs }), + showRelativeSize: false, + setShowRelativeSize: (showRelativeSize: boolean) => _set({ showRelativeSize }), + showTextDiff: false, setShowTextDiff: (showTextDiff: boolean) => _set({ showTextDiff }), @@ -103,6 +113,12 @@ export const useChatMicTimeoutMsValue = (): number => export const useChatMicTimeoutMs = (): [number, (micTimeoutMs: number) => void] => useAppChatStore(state => [state.micTimeoutMs, state.setMicTimeoutMs], shallow); +export const useChatShowRelativeSize = (): { showRelativeSize: boolean, toggleRelativeSize: () => void } => { + const showRelativeSize = useAppChatStore(state => state.showRelativeSize); + const toggleRelativeSize = () => useAppChatStore.getState().setShowRelativeSize(!showRelativeSize); + return { showRelativeSize, toggleRelativeSize }; +}; + export const useChatShowTextDiff = (): [boolean, (showDiff: boolean) => void] => useAppChatStore(state => [state.showTextDiff, state.setShowTextDiff], shallow); diff --git a/src/apps/draw/components/PromptDesigner.tsx b/src/apps/draw/components/PromptDesigner.tsx index 4627251bc..49e88ed40 100644 --- a/src/apps/draw/components/PromptDesigner.tsx +++ b/src/apps/draw/components/PromptDesigner.tsx @@ -184,8 +184,6 @@ export function PromptDesigner(props: { px: 0, minWidth: '3rem', pointerEvents: 'none', - fontSize: 'xs', - fontWeight: 600, }}> {tempCount > 1 ? `1 / ${tempCount}` : '1'} @@ -339,7 +337,7 @@ export function PromptDesigner(props: { key={n} variant={tempRepeat === n ? 'soft' : 'plain'} color='neutral' onClick={() => setTempRepeat(n)} - sx={{ fontWeight: tempRepeat === n ? 'xl' : 'sm' }} + sx={{ fontWeight: tempRepeat === n ? 'xl' : 400 /* reset, from 600 */ }} > {`x${n}`} diff --git a/src/apps/link-chat/LinkChatViewer.tsx b/src/apps/link-chat/LinkChatViewer.tsx index 336e9e299..3bec42776 100644 --- a/src/apps/link-chat/LinkChatViewer.tsx +++ b/src/apps/link-chat/LinkChatViewer.tsx @@ -141,7 +141,7 @@ export function LinkChatViewer(props: { conversation: DConversation, storedAt: D message.text = text} />, diff --git a/src/apps/news/AppNews.tsx b/src/apps/news/AppNews.tsx index f27fcecfb..85f7cae2f 100644 --- a/src/apps/news/AppNews.tsx +++ b/src/apps/news/AppNews.tsx @@ -93,12 +93,12 @@ export function AppNews() { - {ni.text ? ni.text : ni.versionName ? <>{ni.versionCode} · : `Version ${ni.versionCode}:`} + {ni.text ? ni.text : ni.versionName ? <>{ni.versionCode} · : `Version ${ni.versionCode}:`} diff --git a/src/apps/news/news.data.tsx b/src/apps/news/news.data.tsx index 8e3bbd4d8..b0f4aa5d7 100644 --- a/src/apps/news/news.data.tsx +++ b/src/apps/news/news.data.tsx @@ -39,7 +39,7 @@ function B(props: { : props.code ? `${Brand.URIs.OpenRepo}/blob/main/${props.code}` : props.href; const boldText = ( - + {props.children} ); diff --git a/src/apps/personas/creator/Creator.tsx b/src/apps/personas/creator/Creator.tsx index dc1c14363..2f5330b0f 100644 --- a/src/apps/personas/creator/Creator.tsx +++ b/src/apps/personas/creator/Creator.tsx @@ -220,7 +220,7 @@ export function Creator(props: { display: boolean }) { - + {chainStepName} diff --git a/src/apps/personas/creator/CreatorDrawer.tsx b/src/apps/personas/creator/CreatorDrawer.tsx index 9ba88c57a..bc8454083 100644 --- a/src/apps/personas/creator/CreatorDrawer.tsx +++ b/src/apps/personas/creator/CreatorDrawer.tsx @@ -143,7 +143,7 @@ export function CreatorDrawer(props: { - + Create diff --git a/src/apps/personas/creator/CreatorDrawerItem.tsx b/src/apps/personas/creator/CreatorDrawerItem.tsx index 40ef5f3c3..e0001f0dc 100644 --- a/src/apps/personas/creator/CreatorDrawerItem.tsx +++ b/src/apps/personas/creator/CreatorDrawerItem.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import TimeAgo from 'react-timeago'; import { Box, Checkbox, IconButton, ListItemButton, ListItemDecorator, Typography } from '@mui/joy'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import DeleteOutlineIcon from '@mui/icons-material/DeleteOutline'; import TextFieldsIcon from '@mui/icons-material/TextFields'; import YouTubeIcon from '@mui/icons-material/YouTube'; @@ -91,7 +91,7 @@ export function CreatorDrawerItem(props: { setDeleteArmed(false)}> - + } diff --git a/src/apps/personas/creator/FromYouTube.tsx b/src/apps/personas/creator/FromYouTube.tsx index 0b36ad488..a052d2fb8 100644 --- a/src/apps/personas/creator/FromYouTube.tsx +++ b/src/apps/personas/creator/FromYouTube.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import type { SxProps } from '@mui/joy/styles/types'; import { Box, Button, Card, IconButton, Input, Typography } from '@mui/joy'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import YouTubeIcon from '@mui/icons-material/YouTube'; import { useYouTubeTranscript, YTVideoTranscript } from '~/modules/youtube/useYouTubeTranscript'; @@ -60,7 +60,7 @@ function YouTubeVideoTranscriptCard(props: { transcript: YTVideoTranscript, onCl position: 'absolute', top: -8, right: -8, borderRadius: 'md', }}> - + diff --git a/src/apps/settings-modal/SettingsModal.tsx b/src/apps/settings-modal/SettingsModal.tsx index 2f96d8d3c..379567a7c 100644 --- a/src/apps/settings-modal/SettingsModal.tsx +++ b/src/apps/settings-modal/SettingsModal.tsx @@ -128,22 +128,22 @@ export function SettingsModal(props: { diff --git a/src/apps/settings-modal/ShortcutsModal.tsx b/src/apps/settings-modal/ShortcutsModal.tsx index 8cddc1976..881944bee 100644 --- a/src/apps/settings-modal/ShortcutsModal.tsx +++ b/src/apps/settings-modal/ShortcutsModal.tsx @@ -47,7 +47,7 @@ export function ShortcutsModal(props: { onClose: () => void }) { text={shortcutsMd} fromRole='assistant' contentScaling='sm' - isMobile={isMobile} + fitScreen={isMobile} renderTextAsMarkdown /> diff --git a/src/apps/settings-modal/UxLabsSettings.tsx b/src/apps/settings-modal/UxLabsSettings.tsx index 6e01d2b9b..159dc3554 100644 --- a/src/apps/settings-modal/UxLabsSettings.tsx +++ b/src/apps/settings-modal/UxLabsSettings.tsx @@ -6,6 +6,7 @@ import ScreenshotMonitorIcon from '@mui/icons-material/ScreenshotMonitor'; import SpeedIcon from '@mui/icons-material/Speed'; import TitleIcon from '@mui/icons-material/Title'; +import { ChatBeamIcon } from '~/common/components/icons/ChatBeamIcon'; import { FormLabelStart } from '~/common/components/forms/FormLabelStart'; import { FormSwitchControl } from '~/common/components/forms/FormSwitchControl'; import { Link } from '~/common/components/Link'; @@ -14,7 +15,7 @@ import { useUXLabsStore } from '~/common/state/store-ux-labs'; // uncomment for more settings -// const DEV_MODE_SETTINGS = false; +const DEV_MODE_SETTINGS = false; export function UxLabsSettings() { @@ -25,21 +26,27 @@ export function UxLabsSettings() { labsAttachScreenCapture, setLabsAttachScreenCapture, labsCameraDesktop, setLabsCameraDesktop, labsChatBarAlt, setLabsChatBarAlt, + labsChatBeam, setLabsChatBeam, labsHighPerformance, setLabsHighPerformance, } = useUXLabsStore(); return <> - Performance} description={'v1.14 · ' + (labsHighPerformance ? 'Unlocked' : 'Default')} - checked={labsHighPerformance} onChange={setLabsHighPerformance} - /> + {DEV_MODE_SETTINGS && Chat Beam} description={'v1.14 · ' + (labsChatBeam ? 'Active' : 'Off')} + checked={labsChatBeam} onChange={setLabsChatBeam} + />} Chat Title} description={'v1.14 · ' + (labsChatBarAlt === 'title' ? 'Show Title' : 'Show Options')} checked={labsChatBarAlt === 'title'} onChange={(on) => setLabsChatBarAlt(on ? 'title' : false)} /> + Performance} description={'v1.14 · ' + (labsHighPerformance ? 'Unlocked' : 'Default')} + checked={labsHighPerformance} onChange={setLabsHighPerformance} + /> + {!isMobile && Screen Capture} description={'v1.13 · ' + (labsAttachScreenCapture ? 'Enabled' : 'Disabled')} checked={labsAttachScreenCapture} onChange={setLabsAttachScreenCapture} diff --git a/src/apps/settings-modal/settings-ui/SettingContentScaling.tsx b/src/apps/settings-modal/settings-ui/SettingContentScaling.tsx index 66c930dfc..7a8f3f52a 100644 --- a/src/apps/settings-modal/settings-ui/SettingContentScaling.tsx +++ b/src/apps/settings-modal/settings-ui/SettingContentScaling.tsx @@ -16,13 +16,14 @@ export function SettingContentScaling(props: { noLabel?: boolean }) { return ( {!props.noLabel && ( - + )} @@ -40,6 +41,10 @@ export function SettingContentScaling(props: { noLabel?: boolean }) { sx={{ // style fontSize: sizeKey, + // 400 would be more representative because it's the default, but being in a button we're 500 (md) instead of 400. + // However it's good to have that extra confidence when choosing a lower font size, as then while reading text + // the 400 makes lots of sense. + // fontWeight: ...400?, // borderRadius: !isActive ? '50%' : undefined, borderRadius: '50%', width: '1rem', diff --git a/src/common/app.nav.ts b/src/common/app.nav.ts index a8097b74b..eba7891df 100644 --- a/src/common/app.nav.ts +++ b/src/common/app.nav.ts @@ -113,7 +113,7 @@ export const navItems: { route: '/draw', // hideOnMobile: true, hideDrawer: true, - // _delete: true, + _delete: true, }, { name: 'Cortex', @@ -139,7 +139,7 @@ export const navItems: { route: '/workspace', hideDrawer: true, hideOnMobile: true, - // _delete: true, + _delete: true, }, // <-- divider here --> { diff --git a/src/common/app.theme.ts b/src/common/app.theme.ts index db95526cf..6d6b62a7a 100644 --- a/src/common/app.theme.ts +++ b/src/common/app.theme.ts @@ -16,7 +16,7 @@ export const formLabelStartWidth = 140; // Theme & Fonts const inter = Inter({ - weight: ['400', '500', '600', '700'], + weight: [ /* '300', sm */ '400' /* (undefined, default) */, '500' /* md */, '600' /* lg */, '700' /* xl */], subsets: ['latin'], display: 'swap', fallback: ['Helvetica', 'Arial', 'sans-serif'], @@ -153,6 +153,7 @@ interface ContentScalingOptions { // BlocksRenderer blockCodeFontSize: string; blockFontSize: string; + blockImageGap: number; blockLineHeight: string | number; // ChatMessage chatMessagePadding: number; @@ -165,6 +166,7 @@ export const themeScalingMap: Record = { xs: { blockCodeFontSize: '0.75rem', blockFontSize: 'xs', + blockImageGap: 1, blockLineHeight: 1.666667, chatMessagePadding: 1.25, chatDrawerItemSx: { '--ListItem-minHeight': '2.25rem', fontSize: 'sm' }, // 36px @@ -173,6 +175,7 @@ export const themeScalingMap: Record = { sm: { blockCodeFontSize: '0.75rem', blockFontSize: 'sm', + blockImageGap: 1.5, blockLineHeight: 1.714286, chatMessagePadding: 1.5, chatDrawerItemSx: { '--ListItem-minHeight': '2.25rem', fontSize: 'sm' }, @@ -181,6 +184,7 @@ export const themeScalingMap: Record = { md: { blockCodeFontSize: '0.875rem', blockFontSize: 'md', + blockImageGap: 2, blockLineHeight: 1.75, chatMessagePadding: 2, chatDrawerItemSx: { '--ListItem-minHeight': '2.5rem', fontSize: 'md' }, // 40px diff --git a/src/common/chats/BeamStore.ts b/src/common/chats/BeamStore.ts new file mode 100644 index 000000000..66d189fe5 --- /dev/null +++ b/src/common/chats/BeamStore.ts @@ -0,0 +1,99 @@ +import * as React from 'react'; +import { v4 as uuidv4 } from 'uuid'; + +import { DMessage } from '~/common/state/store-chats'; + +import type { ConversationHandler } from './ConversationHandler'; + + +export type BeamConfig = { + history: DMessage[]; + lastMessage: string; + configError?: string; +}; + +function createConfig(history: DMessage[]): BeamConfig { + return { history, lastMessage: history.slice(-1)[0]?.text || '' }; +} + +export interface BeamCandidate { + id: string; + text: string; + placeholder: string; +} + +function createCandidate(): BeamCandidate { + return { + id: uuidv4(), + text: '', + placeholder: '...', + }; +} + +export class BeamStore extends EventTarget { + private config: BeamConfig | null = null; + private readonly candidates: BeamCandidate[] = []; + + constructor() { + super(); + } + + get(): { config: BeamConfig | null, candidates: BeamCandidate[] } { + return { config: this.config, candidates: this.candidates }; + } + + create(history: DMessage[]) { + if (this.config) { + this.config.configError = 'Warning: config already exists. Skipping...'; + } else { + this.config = createConfig([...history]); + } + if (history.length < 1) + this.config.configError = 'Warning: empty history. Skipping...'; + this.dispatchEvent(new CustomEvent('stateChanged', { detail: { config: this.config } })); + } + + appendCandidate(candidate: BeamCandidate): void { + this.candidates.push(candidate); + this.dispatchEvent(new CustomEvent('stateChanged', { detail: { candidates: this.candidates } })); + } + + deleteCandidate(candidateId: BeamCandidate['id']): void { + const index = this.candidates.findIndex(e => e.id === candidateId); + if (index >= 0) { + this.candidates.splice(index, 1); + this.dispatchEvent(new CustomEvent('stateChanged', { detail: { candidates: this.candidates } })); + } + } + + updateCandidate(candidateId: BeamCandidate['id'], update: Partial): void { + const candidate = this.candidates.find(c => c.id === candidateId); + if (candidate) { + Object.assign(candidate, update); + this.dispatchEvent(new CustomEvent('stateChanged', { detail: { candidates: this.candidates } })); + } + } +} + + +export function useBeam(conversationHandler: ConversationHandler | null): { config: BeamConfig | null, candidates: BeamCandidate[] } { + + // state + const [beamState, setBeamState] = React.useState<{ config: BeamConfig | null, candidates: BeamCandidate[] }>(() => { + return conversationHandler ? conversationHandler.beamStore.get() : { config: null, candidates: [] }; + }); + + // [effect] subscribe to events + React.useEffect(() => { + if (!conversationHandler) return; + const handleStateChanged = (event: Event) => { + setBeamState(state => ({ ...state, ...(event as CustomEvent<{ config?: BeamConfig, candidates?: BeamCandidate[] }>).detail })); + }; + conversationHandler.beamStore.addEventListener('stateChanged', handleStateChanged); + return () => { + conversationHandler.beamStore.removeEventListener('stateChanged', handleStateChanged); + }; + }, [conversationHandler]); + + return beamState; +} diff --git a/src/common/chats/ConversationHandler.ts b/src/common/chats/ConversationHandler.ts new file mode 100644 index 000000000..372c6a6c4 --- /dev/null +++ b/src/common/chats/ConversationHandler.ts @@ -0,0 +1,101 @@ +import type { DLLMId } from '~/modules/llms/store-llms'; +import { bareBonesPromptMixer } from '~/modules/persona/pmix/pmix'; + +import { SystemPurposeId, SystemPurposes } from '../../data'; + +import { ChatActions, createDMessage, DConversationId, DMessage, useChatStore } from '../state/store-chats'; + +import { BeamStore } from './BeamStore'; +import { EphemeralHandler, EphemeralsStore } from './EphemeralsStore'; + + +export class ConversationHandler { + private readonly chatActions: ChatActions; + private readonly conversationId: DConversationId; + + readonly beamStore: BeamStore = new BeamStore(); + readonly ephemeralsStore: EphemeralsStore = new EphemeralsStore(); + + + constructor(conversationId: DConversationId) { + this.chatActions = useChatStore.getState(); + this.conversationId = conversationId; + } + + + // Conversation Management + + resyncPurposeInHistory(history: DMessage[], assistantLlmId: DLLMId, purposeId: SystemPurposeId): DMessage[] { + const systemMessageIndex = history.findIndex(m => m.role === 'system'); + const systemMessage: DMessage = systemMessageIndex >= 0 ? history.splice(systemMessageIndex, 1)[0] : createDMessage('system', ''); + if (!systemMessage.updated && purposeId && SystemPurposes[purposeId]?.systemMessage) { + systemMessage.purposeId = purposeId; + systemMessage.text = bareBonesPromptMixer(SystemPurposes[purposeId].systemMessage, assistantLlmId); + + // HACK: this is a special case for the 'Custom' persona, to set the message in stone (so it doesn't get updated when switching to another persona) + if (purposeId === 'Custom') + systemMessage.updated = Date.now(); + } + history.unshift(systemMessage); + this.chatActions.setMessages(this.conversationId, history); + return history; + } + + setAbortController(abortController: AbortController | null): void { + this.chatActions.setAbortController(this.conversationId, abortController); + } + + + // Message Management + + messageAppendAssistant(text: string, llmLabel: DLLMId | string /* 'DALL·E' | 'Prodia' | 'react-...' | 'web'*/, purposeId?: SystemPurposeId): string { + const assistantMessage: DMessage = createDMessage('assistant', text); + assistantMessage.typing = true; + assistantMessage.purposeId = purposeId; + assistantMessage.originLLM = llmLabel; + this.chatActions.appendMessage(this.conversationId, assistantMessage); + return assistantMessage.id; + } + + messageEdit(messageId: string, update: Partial, touch: boolean): void { + this.chatActions.editMessage(this.conversationId, messageId, update, touch); + } + + + // Ephemerals + + createEphemeral(title: string, initialText: string): EphemeralHandler { + return new EphemeralHandler(title, initialText, this.ephemeralsStore); + } + +} + + +// Singleton to get a global instance relate to a conversationId. Note we don't have reference counting, and mainly because we cannot +// do comprehensive lifecycle tracking. +export class ConversationManager { + private static _instance: ConversationManager; + private readonly handlers: Map = new Map(); + + static getHandler(conversationId: DConversationId): ConversationHandler { + const instance = ConversationManager._instance || (ConversationManager._instance = new ConversationManager()); + let handler = instance.handlers.get(conversationId); + if (!handler) { + handler = new ConversationHandler(conversationId); + instance.handlers.set(conversationId, handler); + } + return handler; + } + + // Acquires a ConversationHandler, ensuring automatic release when done, with debug location. + // enable in 2025, after support from https://github.com/tc39/proposal-explicit-resource-management + /*usingHandler(conversationId: DConversationId, debugLocation: string) { + const handler = this.getHandler(conversationId, debugLocation); + return { + handler, + [Symbol.dispose]: () => { + this.releaseHandler(handler, debugLocation); + }, + }; + }*/ +} \ No newline at end of file diff --git a/src/common/chats/EphemeralsStore.ts b/src/common/chats/EphemeralsStore.ts new file mode 100644 index 000000000..276158096 --- /dev/null +++ b/src/common/chats/EphemeralsStore.ts @@ -0,0 +1,103 @@ +import * as React from 'react'; +import { v4 as uuidv4 } from 'uuid'; + +import type { ConversationHandler } from './ConversationHandler'; + + +/** + * DEphemeral: For ReAct sidebars, displayed under the chat + */ +export interface DEphemeral { + id: string; + title: string; + text: string; + state: object; +} + +function createDEphemeral(title: string, initialText: string): DEphemeral { + return { + id: uuidv4(), + title: title, + text: initialText, + state: {}, + }; +} + +/** + * [store]: diy reactive store for a list of ephemerals + */ +export class EphemeralsStore extends EventTarget { + private readonly ephemerals: DEphemeral[] = []; + + constructor() { + super(); + } + + find(): DEphemeral[] { + return this.ephemerals; + } + + append(ephemeral: DEphemeral): void { + this.ephemerals.push(ephemeral); + this.dispatchEvent(new CustomEvent('ephemeralsChanged', { detail: { ephemerals: this.ephemerals } })); + } + + delete(ephemeralId: string): void { + const index = this.ephemerals.findIndex(e => e.id === ephemeralId); + console.log('EphemeralsStore: delete', index); + if (index >= 0) { + this.ephemerals.splice(index, 1); + this.dispatchEvent(new CustomEvent('ephemeralsChanged', { detail: { ephemerals: this.ephemerals } })); + } + } + + update(ephemeralId: string, update: Partial): void { + const ephemeral = this.ephemerals.find(e => e.id === ephemeralId); + if (ephemeral) { + Object.assign(ephemeral, update); + this.dispatchEvent(new CustomEvent('ephemeralsChanged', { detail: { ephemerals: this.ephemerals } })); + } + } +} + +export class EphemeralHandler { + private readonly ephemeralId: string; + + constructor(title: string, initialText: string, readonly ephemeralsStore: EphemeralsStore) { + const dEphemeral = createDEphemeral(title, initialText); + this.ephemeralId = dEphemeral.id; + this.ephemeralsStore.append(dEphemeral); + } + + updateText(text: string): void { + this.ephemeralsStore.update(this.ephemeralId, { text }); + } + + updateState(state: object): void { + this.ephemeralsStore.update(this.ephemeralId, { state }); + } + + delete(): void { + this.ephemeralsStore.delete(this.ephemeralId); + } +} + + +export function useEphemerals(conversationHandler: ConversationHandler | null): DEphemeral[] { + + // state + const [ephemerals, setEphemerals] = React.useState( + () => conversationHandler ? conversationHandler.ephemeralsStore.find() : []); + + React.useEffect(() => { + if (!conversationHandler) return; + const handleEphemeralsChanged = (event: Event) => { + const customEvent = event as CustomEvent<{ ephemerals: DEphemeral[] }>; + setEphemerals([...customEvent.detail.ephemerals]); + }; + conversationHandler.ephemeralsStore.addEventListener('ephemeralsChanged', handleEphemeralsChanged); + return () => conversationHandler.ephemeralsStore.removeEventListener('ephemeralsChanged', handleEphemeralsChanged); + }, [conversationHandler]); + + return ephemerals; +} diff --git a/src/common/components/GoogleAnalytics.tsx b/src/common/components/GoogleAnalytics.tsx new file mode 100644 index 000000000..2796e5d42 --- /dev/null +++ b/src/common/components/GoogleAnalytics.tsx @@ -0,0 +1,21 @@ +import * as React from 'react'; +import { GoogleAnalytics as NextGoogleAnalytics } from '@next/third-parties/google'; + + +export const hasGoogleAnalytics = !!process.env.NEXT_PUBLIC_GA4_MEASUREMENT_ID; + +export function getGA4MeasurementId(): string | null { + return process.env.NEXT_PUBLIC_GA4_MEASUREMENT_ID || null; +} + +/** + * Note: we are using this third-party component from Vercel which is very experimental + * and has just been launched weeks back (at the time of writing this code). There could + * be issues. + * + * Note: this causes a 2.8kb increase in the bundle size. + */ +export function OptionalGoogleAnalytics() { + const gaId = getGA4MeasurementId(); + return gaId ? : null; +} \ No newline at end of file diff --git a/src/common/components/KeyStroke.tsx b/src/common/components/KeyStroke.tsx index 7344512bc..9e74001df 100644 --- a/src/common/components/KeyStroke.tsx +++ b/src/common/components/KeyStroke.tsx @@ -27,7 +27,7 @@ export function KeyStroke(props: { combo: string, dark?: boolean, sx?: SxProps } // // // {props.combo} diff --git a/src/common/components/forms/useLLMSelect.tsx b/src/common/components/forms/useLLMSelect.tsx index 2197cc8d4..ffa659050 100644 --- a/src/common/components/forms/useLLMSelect.tsx +++ b/src/common/components/forms/useLLMSelect.tsx @@ -60,7 +60,7 @@ export function useLLMSelect(localState: boolean = true, label: string = 'Model' {addSeparator && } + ); }; \ No newline at end of file diff --git a/src/modules/blocks/code/RenderCode.tsx b/src/modules/blocks/code/RenderCode.tsx index 820ad3837..ba5867c3a 100644 --- a/src/modules/blocks/code/RenderCode.tsx +++ b/src/modules/blocks/code/RenderCode.tsx @@ -19,8 +19,17 @@ import { ButtonStackBlitz, isStackBlitzSupported } from './ButtonStackBlitz'; import { heuristicIsHtml, IFrameComponent } from '../RenderHtml'; import { patchSvgString, RenderCodeMermaid } from './RenderCodeMermaid'; +export function getPlantUmlServerUrl(): string { + // set at nextjs build time + return process.env.NEXT_PUBLIC_PLANTUML_SERVER_URL || 'https://www.plantuml.com/plantuml/svg/'; +} async function fetchPlantUmlSvg(plantUmlCode: string): Promise { + // Get the PlantUML server from inline env var + let plantUmlServerUrl = getPlantUmlServerUrl(); + if (!plantUmlServerUrl.endsWith('/')) + plantUmlServerUrl += '/'; + // fetch the PlantUML SVG let text: string = ''; try { @@ -29,23 +38,27 @@ async function fetchPlantUmlSvg(plantUmlCode: string): Promise { // retrieve and manually adapt the SVG, to remove the background const encodedPlantUML: string = plantUmlEncode(plantUmlCode); - const response = await frontendSideFetch(`https://www.plantuml.com/plantuml/svg/${encodedPlantUML}`); + const response = await frontendSideFetch(`${plantUmlServerUrl}${encodedPlantUML}`); text = await response.text(); - } catch (e) { + } catch (error) { + console.error('Error rendering PlantUML on server:', plantUmlServerUrl, error); return null; } + // validate/extract the SVG const start = text.indexOf(''); if (start < 0 || end <= start) throw new Error('Could not render PlantUML'); + + // remove the background color const svg = text .slice(start, end + 6) // - .replace('background:#FFFFFF;', ''); // transparent background + .replace('background:#FFFFFF;', ''); // check for syntax errors if (svg.includes('>Syntax Error?')) - throw new Error('syntax issue (it happens!). Please regenerate or change generator model.'); + throw new Error('llm syntax issue (it happens!). Please regenerate or change the language model.'); return svg; } @@ -54,7 +67,7 @@ async function fetchPlantUmlSvg(plantUmlCode: string): Promise { export const overlayButtonsSx: SxProps = { position: 'absolute', top: 0, right: 0, zIndex: 10, display: 'flex', flexDirection: 'row', gap: 1, - opacity: 0, transition: 'opacity 0.2s', + opacity: 0, transition: 'opacity 0.15s', // '& > button': { // backgroundColor: 'background.level2', // backdropFilter: 'blur(12px)', @@ -64,7 +77,7 @@ export const overlayButtonsSx: SxProps = { interface RenderCodeBaseProps { codeBlock: CodeBlock, - isMobile?: boolean, + fitScreen?: boolean, noCopyButton?: boolean, optimizeLightweight?: boolean, sx?: SxProps, @@ -78,7 +91,7 @@ interface RenderCodeImplProps extends RenderCodeBaseProps { function RenderCodeImpl(props: RenderCodeImplProps) { // state - const [fitScreen, setFitScreen] = React.useState(!!props.isMobile); + const [fitScreen, setFitScreen] = React.useState(!!props.fitScreen); const [showHTML, setShowHTML] = React.useState(false); const [showMermaid, setShowMermaid] = React.useState(true); const [showPlantUML, setShowPlantUML] = React.useState(true); @@ -148,7 +161,7 @@ function RenderCodeImpl(props: RenderCodeImplProps) { component='code' className={`language-${inferredCodeLanguage || 'unknown'}`} sx={{ - fontWeight: 500, whiteSpace: 'pre', // was 'break-spaces' before we implemented per-block scrolling + whiteSpace: 'pre', // was 'break-spaces' before we implemented per-block scrolling mx: 0, p: 1.5, // this block gets a thicker border display: 'block', overflowX: 'auto', @@ -217,7 +230,7 @@ function RenderCodeImpl(props: RenderCodeImplProps) { )} - {((isMermaid && showMermaid) || (isPlantUML && showPlantUML) || (isSVG && showSVG && canScaleSVG)) && ( + {((isMermaid && showMermaid) || (isPlantUML && showPlantUML && !plantUmlError) || (isSVG && showSVG && canScaleSVG)) && ( setFitScreen(on => !on)}> diff --git a/src/modules/llms/models-modal/ModelsList.tsx b/src/modules/llms/models-modal/ModelsList.tsx index 5defd9651..379c8fa42 100644 --- a/src/modules/llms/models-modal/ModelsList.tsx +++ b/src/modules/llms/models-modal/ModelsList.tsx @@ -2,9 +2,11 @@ import * as React from 'react'; import { shallow } from 'zustand/shallow'; import type { SxProps } from '@mui/joy/styles/types'; -import { Box, Chip, IconButton, List, ListItem, ListItemButton, Typography } from '@mui/joy'; +import { Chip, IconButton, List, ListItem, ListItemButton, Typography } from '@mui/joy'; import SettingsOutlinedIcon from '@mui/icons-material/SettingsOutlined'; import VisibilityOffOutlinedIcon from '@mui/icons-material/VisibilityOffOutlined'; +import VisibilityOutlinedIcon from '@mui/icons-material/VisibilityOutlined'; + import { GoodTooltip } from '~/common/components/GoodTooltip'; @@ -13,6 +15,8 @@ import { IModelVendor } from '../vendors/IModelVendor'; import { findVendorById } from '../vendors/vendors.registry'; +const absorbListPadding: SxProps = { my: 'calc(var(--ListItem-paddingY) / -2)' }; + function ModelItem(props: { llm: DLLM, vendor: IModelVendor, @@ -26,12 +30,17 @@ function ModelItem(props: { // derived const { llm, onModelClicked, onModelSetHidden } = props; - const handleClick = React.useCallback((event: React.MouseEvent) => { + const handleLLMConfigure = React.useCallback((event: React.MouseEvent) => { event.stopPropagation(); onModelClicked(llm.id); }, [llm.id, onModelClicked]); - const handleUnhide = React.useCallback((event: React.MouseEvent) => { + const handleLLMHide = React.useCallback((event: React.MouseEvent) => { + event.stopPropagation(); + onModelSetHidden(llm.id, true); + }, [llm.id, onModelSetHidden]); + + const handleLLMUnhide = React.useCallback((event: React.MouseEvent) => { event.stopPropagation(); onModelSetHidden(llm.id, false); }, [llm.id, onModelSetHidden]); @@ -53,40 +62,42 @@ function ModelItem(props: { return ( {/* Model Name */} - + {label} - {/* --> */} - - + {/* Chips */} {props.chipChat && chat} - {props.chipFast && fast} - {props.chipFunc && 𝑓n} - {llm.hidden && ( - - + + + {llm.hidden ? : } - )} + - - - + + + + + @@ -154,13 +165,7 @@ export function ModelsList(props: { } return ( - + {items.length > 0 ? items : ( diff --git a/src/modules/llms/models-modal/ModelsModal.tsx b/src/modules/llms/models-modal/ModelsModal.tsx index 1d4566d5d..40d49d0a0 100644 --- a/src/modules/llms/models-modal/ModelsModal.tsx +++ b/src/modules/llms/models-modal/ModelsModal.tsx @@ -103,6 +103,18 @@ export function ModelsModal(props: { suspendAutoModelsSetup?: boolean }) { // works in tandem with the parent (GoodModal > Dialog) overflow: 'auto' minHeight: '6rem', overflowY: 'auto', + + // style (list variant=outlined) + '--ListItem-paddingY': '0rem', + '--ListItem-paddingRight': '0.5rem', // instead of 0.75 + backgroundColor: 'rgb(var(--joy-palette-neutral-lightChannel) / 20%)', + borderRadius: 'md', + + // [mobile] a bit less padding + '@media (max-width: 900px)': { + '--ListItem-paddingLeft': '0.5rem', + '--ListItem-paddingRight': '0.25rem', + }, }} /> )} diff --git a/src/modules/llms/server/gemini/gemini.wiretypes.ts b/src/modules/llms/server/gemini/gemini.wiretypes.ts index c7e4f9a3e..c879f7746 100644 --- a/src/modules/llms/server/gemini/gemini.wiretypes.ts +++ b/src/modules/llms/server/gemini/gemini.wiretypes.ts @@ -22,6 +22,7 @@ export const geminiModelsListOutputSchema = z.object({ 'countMessageTokens', 'countTextTokens', 'countTokens', + 'createTunedModel', 'createTunedTextModel', 'embedContent', 'embedText', @@ -168,7 +169,7 @@ export const geminiGeneratedContentResponseSchema = z.object({ // no candidates are returned only if there was something wrong with the prompt (see promptFeedback) candidates: z.array(z.object({ index: z.number(), - content: geminiContentSchema, + content: geminiContentSchema.optional(), // this can be missing if the finishReason is not 'MAX_TOKENS' finishReason: geminiFinishReasonSchema.optional(), safetyRatings: z.array(geminiSafetyRatingSchema), citationMetadata: z.object({ diff --git a/src/modules/llms/server/llm.server.streaming.ts b/src/modules/llms/server/llm.server.streaming.ts index a073169b2..12e958e2e 100644 --- a/src/modules/llms/server/llm.server.streaming.ts +++ b/src/modules/llms/server/llm.server.streaming.ts @@ -272,7 +272,14 @@ function createStreamParserGemini(modelName: string): AIStreamParser { // parse the JSON chunk const wireGenerationChunk = JSON.parse(data); - const generationChunk = geminiGeneratedContentResponseSchema.parse(wireGenerationChunk); + let generationChunk: ReturnType; + try { + generationChunk = geminiGeneratedContentResponseSchema.parse(wireGenerationChunk); + } catch (error: any) { + // log the malformed data to the console, and rethrow to transmit as 'error' + console.log(`/api/llms/stream: Gemini parsing issue: ${error?.message || error}`, wireGenerationChunk); + throw error; + } // Prompt Safety Errors: pass through errors from Gemini if (generationChunk.promptFeedback?.blockReason) { @@ -282,12 +289,19 @@ function createStreamParserGemini(modelName: string): AIStreamParser { // expect a single completion const singleCandidate = generationChunk.candidates?.[0] ?? null; - if (!singleCandidate || !singleCandidate.content?.parts.length) + if (!singleCandidate) throw new Error(`Gemini: expected 1 completion, got ${generationChunk.candidates?.length}`); + // no contents: could be an expected or unexpected condition + if (!singleCandidate.content) { + if (singleCandidate.finishReason === 'MAX_TOKENS') + return { text: ' 🧱', close: true }; + throw new Error('Gemini: server response missing content'); + } + // expect a single part - if (singleCandidate.content.parts.length !== 1 || !('text' in singleCandidate.content.parts[0])) - throw new Error(`Gemini: expected 1 text part, got ${singleCandidate.content.parts.length}`); + if (singleCandidate.content.parts?.length !== 1 || !('text' in singleCandidate.content.parts[0])) + throw new Error(`Gemini: expected 1 text part, got ${singleCandidate.content.parts?.length}`); // expect a single text in the part let text = singleCandidate.content.parts[0].text || ''; diff --git a/src/modules/llms/server/openai/localai.wiretypes.ts b/src/modules/llms/server/openai/localai.wiretypes.ts index e81a09dcd..9bd873c35 100644 --- a/src/modules/llms/server/openai/localai.wiretypes.ts +++ b/src/modules/llms/server/openai/localai.wiretypes.ts @@ -4,7 +4,7 @@ import { z } from 'zod'; export const wireLocalAIModelsAvailableOutputSchema = z.array(z.object({ name: z.string(), // (e.g.) tinydream url: z.string(), // (e.g.) github:go-skynet/model-gallery/tinydream.yaml - license: z.string(), // (e.g.) other + license: z.string().optional(), // (e.g.) other gallery: z.object({ url: z.string(), // (e.g.) github:go-skynet/model-gallery/index.yaml name: z.string(), // (e.g.) model-gallery diff --git a/src/modules/llms/server/openai/models.data.ts b/src/modules/llms/server/openai/models.data.ts index 56d2283fa..13663fa80 100644 --- a/src/modules/llms/server/openai/models.data.ts +++ b/src/modules/llms/server/openai/models.data.ts @@ -286,19 +286,115 @@ export function localAIModelToModelDescription(modelId: string): ModelDescriptio // [Mistral] const _knownMistralChatModels: ManualMappings = [ + // Large + { + idPrefix: 'mistral-large-2402', + label: 'Mistral Large (2402)', + description: 'Top-tier reasoning for high-complexity tasks.', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn], + isLatest: true, + }, + { + idPrefix: 'mistral-large-latest', + label: 'Mistral Large (latest)', + symLink: 'mistral-large-2402', + hidden: true, + // copied + description: 'Top-tier reasoning for high-complexity tasks.', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn], + }, + { + idPrefix: 'mistral-large', + label: 'Mistral Large (?)', + description: 'Flagship model, with top-tier reasoning capabilities and language support (English, French, German, Italian, Spanish, and Code)', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + hidden: true, + }, + + // Medium - not updated on 2024-02-26 + { + idPrefix: 'mistral-medium-2312', + label: 'Mistral Medium (2312)', + description: 'Mistral internal prototype model.', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + }, + { + idPrefix: 'mistral-medium-latest', + label: 'Mistral Medium (latest)', + symLink: 'mistral-medium-2312', + hidden: true, + // copied + description: 'Mistral internal prototype model.', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + }, { idPrefix: 'mistral-medium', label: 'Mistral Medium', description: 'Mistral internal prototype model.', contextWindow: 32768, interfaces: [LLM_IF_OAI_Chat], + hidden: true, + }, + + // Small (8x7B) + { + idPrefix: 'mistral-small-2402', + label: 'Mistral Small (2402)', + description: 'Optimized endpoint. Cost-efficient reasoning for low-latency workloads. Mistral Small outperforms Mixtral 8x7B and has lower latency', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn], + isLatest: true, + }, + { + idPrefix: 'mistral-small-2312', + label: 'Mistral Small (2312)', + description: 'Aka open-mixtral-8x7b. Cost-efficient reasoning for low-latency workloads. Mistral Small outperforms Mixtral 8x7B and has lower latency', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + hidden: true, + }, + { + idPrefix: 'mistral-small-latest', + label: 'Mistral Small (latest)', + symLink: 'mistral-small-2402', + hidden: true, + // copied + description: 'Cost-efficient reasoning for low-latency workloads. Mistral Small outperforms Mixtral 8x7B and has lower latency', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat, LLM_IF_OAI_Fn], }, { idPrefix: 'mistral-small', label: 'Mistral Small', - description: 'Higher reasoning capabilities and more capabilities (English, French, German, Italian, Spanish, and Code)', + description: 'Cost-efficient reasoning for low-latency workloads.', contextWindow: 32768, interfaces: [LLM_IF_OAI_Chat], + hidden: true, + }, + // Open Mixtral (8x7B) + { + idPrefix: 'open-mixtral-8x7b', + label: 'Open Mixtral (8x7B)', + description: 'Mixtral 8x7B model, aka mistral-small-2312', + // symLink: 'mistral-small-2312', + // copied + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + }, + + // Tiny (7B) + { + idPrefix: 'mistral-tiny-2312', + label: 'Mistral Tiny (2312)', + description: 'Aka open-mistral-7b. Used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial', + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { idPrefix: 'mistral-tiny', @@ -306,11 +402,24 @@ const _knownMistralChatModels: ManualMappings = [ description: 'Used for large batch processing tasks where cost is a significant factor but reasoning capabilities are not crucial', contextWindow: 32768, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, + // Open Mistral (7B) + { + idPrefix: 'open-mistral-7b', + label: 'Open Mistral (7B)', + description: 'Mistral 7B model, aka mistral-tiny-2312', + // symLink: 'mistral-tiny-2312', + // copied + contextWindow: 32768, + interfaces: [LLM_IF_OAI_Chat], + }, + + { idPrefix: 'mistral-embed', label: 'Mistral Embed', - description: 'Mistral Medium on Mistral', + description: 'State-of-the-art semantic for extracting representation of text extracts.', // output: 1024 dimensions maxCompletionTokens: 1024, // HACK - it's 1024 dimensions, but those are not 'completion tokens' contextWindow: 32768, // actually unknown, assumed from the other models @@ -319,6 +428,11 @@ const _knownMistralChatModels: ManualMappings = [ }, ]; + +const mistralModelFamilyOrder = [ + 'mistral-large', 'mistral-medium', 'mistral-small', 'open-mixtral-8x7b', 'mistral-tiny', 'open-mistral-7b', 'mistral-embed', '🔗', +]; + export function mistralModelToModelDescription(_model: unknown): ModelDescriptionSchema { const model = wireMistralModelsListOutputSchema.parse(_model); return fromManualMapping(_knownMistralChatModels, model.id, model.created, undefined, { @@ -332,11 +446,16 @@ export function mistralModelToModelDescription(_model: unknown): ModelDescriptio } export function mistralModelsSort(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number { - if (a.hidden && !b.hidden) - return 1; - if (!a.hidden && b.hidden) - return -1; - return a.id.localeCompare(b.id); + const aPrefixIndex = mistralModelFamilyOrder.findIndex(prefix => a.id.startsWith(prefix)); + const bPrefixIndex = mistralModelFamilyOrder.findIndex(prefix => b.id.startsWith(prefix)); + if (aPrefixIndex !== -1 && bPrefixIndex !== -1) { + if (aPrefixIndex !== bPrefixIndex) + return aPrefixIndex - bPrefixIndex; + if (a.label.startsWith('🔗') && !b.label.startsWith('🔗')) return 1; + if (!a.label.startsWith('🔗') && b.label.startsWith('🔗')) return -1; + return b.label.localeCompare(a.label); + } + return aPrefixIndex !== -1 ? 1 : -1; } @@ -524,10 +643,11 @@ export function togetherAIModelsToModelDescriptions(wireModels: unknown): ModelD const _knownPerplexityChatModels: ModelDescriptionSchema[] = [ { id: 'codellama-34b-instruct', - label: 'Codellama 34B Instruct', - description: 'Code Llama is a collection of pretrained and fine-tuned generative text models. This model is designed for general code synthesis and understanding.', + label: 'Codellama 34B Instruct (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Codellama 70B Instruct as a replacement.', contextWindow: 16384, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'codellama-70b-instruct', @@ -538,67 +658,106 @@ const _knownPerplexityChatModels: ModelDescriptionSchema[] = [ }, { id: 'llama-2-70b-chat', - label: 'Llama 2 70B Chat', - description: 'Llama 2 is a collection of pretrained and fine-tuned generative text models.', + label: 'Llama 2 70B Chat (deprecated)', + description: 'Will be removed on March 15th, 2024. Try mixtral-8x7b-instruct as a replacement.', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'mistral-7b-instruct', label: 'Mistral 7B Instruct', description: 'The Mistral-7B-Instruct-v0.1 Large Language Model (LLM) is a instruct fine-tuned version of the Mistral-7B-v0.1 generative text model using a variety of publicly available conversation datasets.', - contextWindow: 4096, + contextWindow: 16384, interfaces: [LLM_IF_OAI_Chat], }, { id: 'mixtral-8x7b-instruct', label: 'Mixtral 8x7B Instruct', description: 'The Mixtral-8x7B Large Language Model (LLM) is a pretrained generative Sparse Mixture of Experts.', - contextWindow: 4096, + contextWindow: 16384, interfaces: [LLM_IF_OAI_Chat], }, { id: 'pplx-7b-online', - label: 'Perplexity 7B Online', - description: 'Perplexity 7B Online', + label: 'Perplexity 7B Online (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Small Online as a replacement.', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'pplx-70b-online', - label: 'Perplexity 70B Online', - description: 'Perplexity 70B Online', + label: 'Perplexity 70B Online (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Medium Online as a replacement.', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'pplx-8x7b-online', - label: 'Perplexity 8x7B Online', - description: 'Perplexity 8x7B Online', + label: 'Perplexity 8x7B Online (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Medium Online as a replacement.', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'pplx-7b-chat', - label: 'Perplexity 7B Chat', - description: 'Perplexity 7B Chat', + label: 'Perplexity 7B Chat (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Small Chat as a replacement.', contextWindow: 8192, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'pplx-70b-chat', - label: 'Perplexity 70B Chat', - description: 'Perplexity 70B Chat', + label: 'Perplexity 70B Chat (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Medium Chat as a replacement.', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], + hidden: true, }, { id: 'pplx-8x7b-chat', - label: 'Perplexity 8x7B Chat', - description: 'Perplexity 8x7B Chat', + label: 'Perplexity 8x7B Chat (deprecated)', + description: 'Will be removed on March 15th, 2024. Try Sonar Medium Chat as a replacement.', + contextWindow: 4096, + interfaces: [LLM_IF_OAI_Chat], + hidden: true, + }, + { + id: 'sonar-small-chat', + label: 'Sonar Small Chat', + description: 'Sonar Small Chat', + contextWindow: 16384, + interfaces: [LLM_IF_OAI_Chat], + }, + { + id: 'sonar-medium-chat', + label: 'Sonar Medium Chat', + description: 'Sonar Medium Chat', + contextWindow: 16384, + interfaces: [LLM_IF_OAI_Chat], + }, + { + id: 'sonar-small-online', + label: 'Sonar Small Online 🌐', + description: 'Sonar Small Online', contextWindow: 4096, interfaces: [LLM_IF_OAI_Chat], }, + { + id: 'sonar-medium-online', + label: 'Sonar Medium Online 🌐', + description: 'Sonar Medium Online', + contextWindow: 4096, + interfaces: [LLM_IF_OAI_Chat], + }, +]; + +const perplexityAIModelFamilyOrder = [ + 'sonar-medium', 'sonar-small', 'mixtral', 'mistral', 'codellama', 'llama-2', '', ]; export function perplexityAIModelDescriptions() { @@ -606,6 +765,17 @@ export function perplexityAIModelDescriptions() { return _knownPerplexityChatModels; } +export function perplexityAIModelSort(a: ModelDescriptionSchema, b: ModelDescriptionSchema): number { + const aPrefixIndex = perplexityAIModelFamilyOrder.findIndex(prefix => a.id.startsWith(prefix)); + const bPrefixIndex = perplexityAIModelFamilyOrder.findIndex(prefix => b.id.startsWith(prefix)); + // sort by family + if (aPrefixIndex !== -1 && bPrefixIndex !== -1) + if (aPrefixIndex !== bPrefixIndex) + return aPrefixIndex - bPrefixIndex; + // then by reverse label + return b.label.localeCompare(a.label); +} + // Helpers diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts index ff1db1399..2b46e5d35 100644 --- a/src/modules/llms/server/openai/openai.router.ts +++ b/src/modules/llms/server/openai/openai.router.ts @@ -11,7 +11,7 @@ import { Brand } from '~/common/app.config'; import { fixupHost } from '~/common/util/urlUtils'; import { OpenAIWire, WireOpenAICreateImageOutput, wireOpenAICreateImageOutputSchema, WireOpenAICreateImageRequest } from './openai.wiretypes'; -import { azureModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, togetherAIModelsToModelDescriptions } from './models.data'; +import { azureModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, perplexityAIModelSort, togetherAIModelsToModelDescriptions } from './models.data'; import { llmsChatGenerateWithFunctionsOutputSchema, llmsListModelsOutputSchema, ModelDescriptionSchema } from '../llm.server.types'; import { wilreLocalAIModelsApplyOutputSchema, wireLocalAIModelsAvailableOutputSchema, wireLocalAIModelsListOutputSchema } from './localai.wiretypes'; @@ -135,7 +135,7 @@ export const llmOpenAIRouter = createTRPCRouter({ // [Perplexity]: there's no API for models listing (upstream: https://docs.perplexity.ai/discuss/65cf7fd19ac9a5002e8f1341) if (access.dialect === 'perplexity') - return { models: perplexityAIModelDescriptions() }; + return { models: perplexityAIModelDescriptions().sort(perplexityAIModelSort) }; // [non-Azure]: fetch openAI-style for all but Azure (will be then used in each dialect) @@ -370,6 +370,7 @@ export const llmOpenAIRouter = createTRPCRouter({ const DEFAULT_HELICONE_OPENAI_HOST = 'oai.hconeai.com'; +const DEFAULT_LOCALAI_HOST = 'http://127.0.0.1:8080'; const DEFAULT_MISTRAL_HOST = 'https://api.mistral.ai'; const DEFAULT_OPENAI_HOST = 'api.openai.com'; const DEFAULT_OPENROUTER_HOST = 'https://openrouter.ai/api'; @@ -405,7 +406,6 @@ export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | nu case 'lmstudio': - case 'localai': case 'oobabooga': case 'openai': const oaiKey = access.oaiKey || env.OPENAI_API_KEY || ''; @@ -459,6 +459,18 @@ export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | nu }; + case 'localai': + const localAIKey = access.oaiKey || env.LOCALAI_API_KEY || ''; + let localAIHost = fixupHost(access.oaiHost || env.LOCALAI_API_HOST || DEFAULT_LOCALAI_HOST, apiPath); + return { + headers: { + 'Content-Type': 'application/json', + ...(localAIKey && { Authorization: `Bearer ${localAIKey}` }), + }, + url: localAIHost + apiPath, + }; + + case 'mistral': // https://docs.mistral.ai/platform/client const mistralKey = access.oaiKey || env.MISTRAL_API_KEY || ''; diff --git a/src/modules/llms/vendors/IModelVendor.ts b/src/modules/llms/vendors/IModelVendor.ts index d0ddb1110..d1f4dc35f 100644 --- a/src/modules/llms/vendors/IModelVendor.ts +++ b/src/modules/llms/vendors/IModelVendor.ts @@ -15,7 +15,7 @@ export interface IModelVendor boolean; + readonly hasBackendCap?: () => boolean; // used to show a 'geen checkmark' in the list of vendors when adding sources // components readonly Icon: React.ComponentType | string; @@ -26,7 +26,7 @@ export interface IModelVendor): TAccess; diff --git a/src/modules/llms/vendors/localai/LocalAIAdmin.tsx b/src/modules/llms/vendors/localai/LocalAIAdmin.tsx index 412c80a05..7d0e3c524 100644 --- a/src/modules/llms/vendors/localai/LocalAIAdmin.tsx +++ b/src/modules/llms/vendors/localai/LocalAIAdmin.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import { Alert, Box, Button, Card, CircularProgress, IconButton, LinearProgress, List, ListItem, Switch, Typography } from '@mui/joy'; -import CloseIcon from '@mui/icons-material/Close'; +import CloseRoundedIcon from '@mui/icons-material/CloseRounded'; import { ExpanderAccordion } from '~/common/components/ExpanderAccordion'; import { GoodModal } from '~/common/components/GoodModal'; @@ -102,7 +102,7 @@ function ModelInstallPanel(props: { access: OpenAIAccessSchema, modelName: strin Installing {props.modelName} from the {props.galleryName} setHideSelf(true)} sx={{ ml: 'auto' }}> - + diff --git a/src/modules/llms/vendors/localai/LocalAISourceSetup.tsx b/src/modules/llms/vendors/localai/LocalAISourceSetup.tsx index 2457d9ade..a8a96c45a 100644 --- a/src/modules/llms/vendors/localai/LocalAISourceSetup.tsx +++ b/src/modules/llms/vendors/localai/LocalAISourceSetup.tsx @@ -4,6 +4,8 @@ import { z } from 'zod'; import { Button, Typography } from '@mui/joy'; import CheckBoxOutlinedIcon from '@mui/icons-material/CheckBoxOutlined'; +import { backendCaps } from '~/modules/backend/state-backend'; + import { ExpanderAccordion } from '~/common/components/ExpanderAccordion'; import { FormInputKey } from '~/common/components/forms/FormInputKey'; import { InlineError } from '~/common/components/InlineError'; @@ -18,22 +20,27 @@ import { LocalAIAdmin } from './LocalAIAdmin'; import { ModelVendorLocalAI } from './localai.vendor'; +const localAIHostSchema = z.string().url().startsWith('http'); + + export function LocalAISourceSetup(props: { sourceId: DModelSourceId }) { // state const [adminOpen, setAdminOpen] = React.useState(false); // external state + const { hasLlmLocalAIHost: backendHasHost, hasLlmLocalAIKey: backendHasKey } = backendCaps(); const { source, access, updateSetup } = useSourceSetup(props.sourceId, ModelVendorLocalAI); // derived state - const { oaiHost } = access; + const { oaiHost: localAIHost, oaiKey: localAIKey } = access; - // validate if url is a well formed proper url with zod - const urlSchema = z.string().url().startsWith('http'); - const { success: isValidHost } = urlSchema.safeParse(oaiHost); - const shallFetchSucceed = isValidHost; + // host validation + const userHostRequired = !backendHasHost; + const userHostValid = localAIHost.length >= 6 && localAIHostSchema.safeParse(localAIHost).success; + const userHostError = !!localAIHost && !userHostValid; + const shallFetchSucceed = localAIHost ? userHostValid : backendHasHost; // fetch models - the OpenAI way const { isFetching, refetch, isError, error } = @@ -69,11 +76,21 @@ export function LocalAISourceSetup(props: { sourceId: DModelSourceId }) { Learn more} + id='localai-host' label='LocalAI URL' placeholder='e.g., http://127.0.0.1:8080' - value={oaiHost} onChange={value => updateSetup({ oaiHost: value })} + noKey + required={userHostRequired} + isError={userHostError} + rightLabel={backendHasHost ? '✔️ already set in server' : Learn more} + value={localAIHost} onChange={value => updateSetup({ localAIHost: value })} + /> + + updateSetup({ localAIKey: value })} /> = { @@ -19,6 +21,10 @@ export const ModelVendorLocalAI: IModelVendor { + const { hasLlmLocalAIHost, hasLlmLocalAIKey } = backendCaps(); + return hasLlmLocalAIHost || hasLlmLocalAIKey; + }, // components Icon: LocalAIIcon, @@ -27,13 +33,14 @@ export const ModelVendorLocalAI: IModelVendor ({ - oaiHost: 'http://localhost:8080', + localAIHost: '', + localAIKey: '', }), getTransportAccess: (partialSetup) => ({ dialect: 'localai', - oaiKey: '', + oaiKey: partialSetup?.localAIKey || '', oaiOrg: '', - oaiHost: partialSetup?.oaiHost || '', + oaiHost: partialSetup?.localAIHost || '', heliKey: '', moderationCheck: false, }), diff --git a/src/modules/llms/vendors/mistral/MistralSourceSetup.tsx b/src/modules/llms/vendors/mistral/MistralSourceSetup.tsx index 27d07f396..73ac8e140 100644 --- a/src/modules/llms/vendors/mistral/MistralSourceSetup.tsx +++ b/src/modules/llms/vendors/mistral/MistralSourceSetup.tsx @@ -1,5 +1,7 @@ import * as React from 'react'; +import { Typography } from '@mui/joy'; + import { FormInputKey } from '~/common/components/forms/FormInputKey'; import { InlineError } from '~/common/components/InlineError'; import { Link } from '~/common/components/Link'; @@ -45,6 +47,11 @@ export function MistralSourceSetup(props: { sourceId: DModelSourceId }) { placeholder='...' /> + + In order of capabilities we have Large, Medium, Small (Open 8x7B = Small 2312) and Tiny (Open 7B = Tiny 2312) models. + Note the elegance of the numbers, representing the Year and Month or release (YYMM). + + {isError && } diff --git a/src/modules/llms/vendors/perplexity/PerplexitySourceSetup.tsx b/src/modules/llms/vendors/perplexity/PerplexitySourceSetup.tsx index 82b60e4d0..804f53d88 100644 --- a/src/modules/llms/vendors/perplexity/PerplexitySourceSetup.tsx +++ b/src/modules/llms/vendors/perplexity/PerplexitySourceSetup.tsx @@ -27,7 +27,7 @@ export function PerplexitySourceSetup(props: { sourceId: DModelSourceId }) { // derived state const { oaiKey: perplexityKey } = access; - // validate if url is a well formed proper url with zod + // key validation const needsUserKey = !ModelVendorPerplexity.hasBackendCap?.(); const shallFetchSucceed = !needsUserKey || (!!perplexityKey && sourceSetupValid); const showKeyError = !!perplexityKey && !sourceSetupValid; @@ -53,6 +53,7 @@ export function PerplexitySourceSetup(props: { sourceId: DModelSourceId }) { The Perplexity API offers inference as a service for a variety of models. See the Perplexity AI website for more information. + 🌐 Online models are quite unique as they can make use of internet data. diff --git a/src/modules/persona/pmix/pmix.ts b/src/modules/persona/pmix/pmix.ts index e8e29c2fd..416fd5c57 100644 --- a/src/modules/persona/pmix/pmix.ts +++ b/src/modules/persona/pmix/pmix.ts @@ -3,6 +3,7 @@ import { DLLMId, getKnowledgeMapCutoff } from '~/modules/llms/store-llms'; /*type Variables = | '{{Today}}' | '{{Cutoff}}' + | '{{PreferTables}}' | '{{RenderMermaid}}' | '{{RenderPlantUML}}' | '{{RenderSVG}}' @@ -22,6 +23,7 @@ const variableResolvers: { [key in Variables]: (context: VariableResolverContext return getKnowledgeMapCutoff(context.assistantLlmId) || ''; }, + '{{PreferTables}}': () => 'Data presentation: prefer tables (auto-columns)', '{{RenderMermaid}}': () => 'Mermaid rendering: Enabled', '{{RenderPlantUML}}': () => 'PlantUML rendering: Enabled', '{{RenderSVG}}': () => 'SVG rendering: Enabled', @@ -63,6 +65,9 @@ export function bareBonesPromptMixer(_template: string, assistantLlmId: DLLMId | mixed = mixed.replaceAll('{{LocaleNow}}', formattedDateTime /*`${formattedDateTime} (${userTimezone})`*/); } + // {{Prefer...}} + mixed = mixed.replace('{{PreferTables}}', 'Data presentation: prefer tables (auto-columns)'); + // {{Render...}} mixed = mixed.replace('{{RenderMermaid}}', 'Mermaid rendering: Enabled'); mixed = mixed.replace('{{RenderPlantUML}}', 'PlantUML rendering: Enabled'); diff --git a/src/modules/t2i/dalle/DallESettings.tsx b/src/modules/t2i/dalle/DallESettings.tsx index 6021c5b05..b7db1d823 100644 --- a/src/modules/t2i/dalle/DallESettings.tsx +++ b/src/modules/t2i/dalle/DallESettings.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import { shallow } from 'zustand/shallow'; import { FormControl, Option, Select, Switch, Typography } from '@mui/joy'; -import WarningIcon from '@mui/icons-material/Warning'; +import WarningRoundedIcon from '@mui/icons-material/WarningRounded'; import { FormLabelStart } from '~/common/components/forms/FormLabelStart'; import { FormRadioControl } from '~/common/components/forms/FormRadioControl'; @@ -75,7 +75,7 @@ export function DallESettings() {