Skip to content

Commit

Permalink
Fix: unexpected keyword argument error.
Browse files Browse the repository at this point in the history
  • Loading branch information
LarFii committed Dec 5, 2024
1 parent c352eb6 commit c31a265
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ print(rag.query("What are the top themes in this story?", param=QueryParam(mode=
* LightRAG also supports Open AI-like chat/embeddings APIs:
```python
async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
"solar-mini",
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_api_openai_compatible_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
LLM_MODEL,
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_api_oracle_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
LLM_MODEL,
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_azure_openai_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
client = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY,
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_lmdeploy_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


async def lmdeploy_model_complete(
prompt=None, system_prompt=None, history_messages=[], **kwargs
prompt=None, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
model_name = kwargs["hashing_kv"].global_config["llm_model_name"]
return await lmdeploy_model_if_cache(
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_openai_compatible_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
"solar-mini",
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_oracle_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
CHATMODEL,
Expand Down
2 changes: 1 addition & 1 deletion examples/lightrag_siliconcloud_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


async def llm_model_func(
prompt, system_prompt=None, history_messages=[], **kwargs
prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs
) -> str:
return await openai_complete_if_cache(
"Qwen/Qwen2.5-7B-Instruct",
Expand Down

0 comments on commit c31a265

Please sign in to comment.