Skip to content

Commit

Permalink
Merge branch 'master' into 2670-feat-can-the-font-size-of-the-chat-in…
Browse files Browse the repository at this point in the history
…put-box-be-increased
  • Loading branch information
timothycarambat authored Nov 21, 2024
2 parents 48a78b9 + 304796e commit 6af16df
Show file tree
Hide file tree
Showing 6 changed files with 69 additions and 4 deletions.
3 changes: 2 additions & 1 deletion docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ GID='1000'
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# EMBEDDING_BASE_PATH='http://127.0.0.1:4000'
# GENERIC_OPEN_AI_EMBEDDING_API_KEY='sk-123abc'
# GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS=500

###########################################
######## Vector Database Selection ########
Expand Down Expand Up @@ -299,4 +300,4 @@ GID='1000'

# Enable simple SSO passthrough to pre-authenticate users from a third party service.
# See https://docs.anythingllm.com/configuration#simple-sso-passthrough for more information.
# SIMPLE_SSO_ENABLED=1
# SIMPLE_SSO_ENABLED=1
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import React, { useState } from "react";
import { CaretDown, CaretUp } from "@phosphor-icons/react";

export default function GenericOpenAiEmbeddingOptions({ settings }) {
const [showAdvancedControls, setShowAdvancedControls] = useState(false);
return (
<div className="w-full flex flex-col gap-y-7">
<div className="w-full flex items-center gap-[36px] mt-1.5 flex-wrap">
Expand Down Expand Up @@ -69,6 +73,46 @@ export default function GenericOpenAiEmbeddingOptions({ settings }) {
/>
</div>
</div>
<div className="flex justify-start mt-4">
<button
onClick={(e) => {
e.preventDefault();
setShowAdvancedControls(!showAdvancedControls);
}}
className="text-white hover:text-white/70 flex items-center text-sm"
>
{showAdvancedControls ? "Hide" : "Show"} advanced settings
{showAdvancedControls ? (
<CaretUp size={14} className="ml-1" />
) : (
<CaretDown size={14} className="ml-1" />
)}
</button>
</div>
<div hidden={!showAdvancedControls}>
<div className="w-full flex items-start gap-4">
<div className="flex flex-col w-60">
<div className="flex flex-col gap-y-1 mb-4">
<label className="text-white text-sm font-semibold flex items-center gap-x-2">
Max concurrent Chunks
<p className="!text-xs !italic !font-thin">optional</p>
</label>
</div>
<input
type="number"
name="GenericOpenAiEmbeddingMaxConcurrentChunks"
className="bg-theme-settings-input-bg text-white placeholder:text-theme-settings-input-placeholder text-sm rounded-lg focus:outline-primary-button active:outline-primary-button outline-none block w-full p-2.5"
placeholder="500"
min={1}
onScroll={(e) => e.target.blur()}
defaultValue={settings?.GenericOpenAiEmbeddingMaxConcurrentChunks}
required={false}
autoComplete="off"
spellCheck={false}
/>
</div>
</div>
</div>
</div>
);
}
1 change: 1 addition & 0 deletions server/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ SIG_SALT='salt' # Please generate random string at least 32 chars long.
# EMBEDDING_MODEL_MAX_CHUNK_LENGTH=8192
# EMBEDDING_BASE_PATH='http://127.0.0.1:4000'
# GENERIC_OPEN_AI_EMBEDDING_API_KEY='sk-123abc'
# GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS=500

###########################################
######## Vector Database Selection ########
Expand Down
2 changes: 2 additions & 0 deletions server/models/systemSettings.js
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,8 @@ const SystemSettings = {
process.env.EMBEDDING_MODEL_MAX_CHUNK_LENGTH,
GenericOpenAiEmbeddingApiKey:
!!process.env.GENERIC_OPEN_AI_EMBEDDING_API_KEY,
GenericOpenAiEmbeddingMaxConcurrentChunks:
process.env.GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS || 500,

// --------------------------------------------------------
// VectorDB Provider Selection Settings & Configs
Expand Down
19 changes: 16 additions & 3 deletions server/utils/EmbeddingEngines/genericOpenAi/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,26 @@ class GenericOpenAiEmbedder {
});
this.model = process.env.EMBEDDING_MODEL_PREF ?? null;

// Limit of how many strings we can process in a single pass to stay with resource or network limits
this.maxConcurrentChunks = 500;

// this.maxConcurrentChunks is delegated to the getter below.
// Refer to your specific model and provider you use this class with to determine a valid maxChunkLength
this.embeddingMaxChunkLength = 8_191;
}

/**
* returns the `GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS` env variable as a number
* or 500 if the env variable is not set or is not a number.
* @returns {number}
*/
get maxConcurrentChunks() {
if (!process.env.GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS)
return 500;
if (
isNaN(Number(process.env.GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS))
)
return 500;
return Number(process.env.GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS);
}

async embedTextInput(textInput) {
const result = await this.embedChunks(
Array.isArray(textInput) ? textInput : [textInput]
Expand Down
4 changes: 4 additions & 0 deletions server/utils/helpers/updateENV.js
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,10 @@ const KEY_MAPPING = {
envKey: "GENERIC_OPEN_AI_EMBEDDING_API_KEY",
checks: [],
},
GenericOpenAiEmbeddingMaxConcurrentChunks: {
envKey: "GENERIC_OPEN_AI_EMBEDDING_MAX_CONCURRENT_CHUNKS",
checks: [nonZero],
},

// Vector Database Selection Settings
VectorDB: {
Expand Down

0 comments on commit 6af16df

Please sign in to comment.