Skip to content

Commit

Permalink
fix: update llamaindex, use 127.0.0.1 for ollama as default
Browse files Browse the repository at this point in the history
  • Loading branch information
marcusschiesser committed May 22, 2024
1 parent 0950cb9 commit f5da662
Show file tree
Hide file tree
Showing 7 changed files with 6 additions and 8 deletions.
2 changes: 1 addition & 1 deletion helpers/env-variables.ts
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => {
{
name: "OLLAMA_BASE_URL",
description:
"The base URL for the Ollama API. Eg: http://localhost:11434",
"The base URL for the Ollama API. Eg: http://127.0.0.1:11434",
},
]
: []),
Expand Down
2 changes: 0 additions & 2 deletions helpers/proxy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,5 @@ export async function initializeGlobalAgent() {
/* Dynamically import global-agent/bootstrap */
await import("global-agent/bootstrap");
console.log("Proxy enabled via global-agent.");
} else {
console.log("No proxy configuration found. Continuing without proxy.");
}
}
2 changes: 1 addition & 1 deletion templates/types/streaming/express/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"express": "^4.18.2",
"llamaindex": "0.3.9",
"llamaindex": "0.3.13",
"pdf2json": "3.0.5",
"ajv": "^8.12.0"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ function initOpenAI() {

function initOllama() {
const config = {
host: process.env.OLLAMA_BASE_URL ?? "http://localhost:11434",
host: process.env.OLLAMA_BASE_URL ?? "http://127.0.0.1:11434",
};

Settings.llm = new Ollama({
Expand Down
2 changes: 1 addition & 1 deletion templates/types/streaming/fastapi/app/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def init_ollama():
from llama_index.llms.ollama import Ollama
from llama_index.embeddings.ollama import OllamaEmbedding

base_url = os.getenv("OLLAMA_BASE_URL") or "http://localhost:11434"
base_url = os.getenv("OLLAMA_BASE_URL") or "http://127.0.0.1:11434"
Settings.embed_model = OllamaEmbedding(
base_url=base_url,
model_name=os.getenv("EMBEDDING_MODEL"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ function initOpenAI() {

function initOllama() {
const config = {
host: process.env.OLLAMA_BASE_URL ?? "http://localhost:11434",
host: process.env.OLLAMA_BASE_URL ?? "http://127.0.0.1:11434",
};
Settings.llm = new Ollama({
model: process.env.MODEL ?? "",
Expand Down
2 changes: 1 addition & 1 deletion templates/types/streaming/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"class-variance-authority": "^0.7.0",
"clsx": "^2.1.1",
"dotenv": "^16.3.1",
"llamaindex": "0.3.8",
"llamaindex": "0.3.13",
"lucide-react": "^0.294.0",
"next": "^14.0.3",
"pdf2json": "3.0.5",
Expand Down

0 comments on commit f5da662

Please sign in to comment.