Skip to content

Commit

Permalink
fix: 修复豆包智能体对话报错的缺陷
Browse files Browse the repository at this point in the history
--bug=1047570 --user=王孝刚 【github#1374】【大模型】豆包模型对接企业微信,可以正常询问问题。对接豆包智能体大模型,在企业微信询问问题,后台日志报错 https://www.tapd.cn/57709429/s/1595490
  • Loading branch information
wxg0103 committed Oct 22, 2024
1 parent 723b8b5 commit 60cb13d
Showing 1 changed file with 17 additions and 4 deletions.
21 changes: 17 additions & 4 deletions apps/setting/models_provider/impl/base_chat_open_ai.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
# coding=utf-8

from typing import List, Dict, Optional, Any, Iterator, Type, cast
from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LanguageModelInput
from langchain_core.messages import BaseMessage, AIMessageChunk, BaseMessageChunk
from langchain_core.messages import BaseMessage, get_buffer_string
from langchain_core.outputs import ChatGenerationChunk, ChatGeneration
from langchain_core.runnables import RunnableConfig, ensure_config
from langchain_openai import ChatOpenAI
from langchain_openai.chat_models.base import _convert_delta_to_message_chunk

from common.config.tokenizer_manage_config import TokenizerManage


class BaseChatOpenAI(ChatOpenAI):
Expand All @@ -17,9 +17,21 @@ def get_last_generation_info(self) -> Optional[Dict[str, Any]]:
return self.usage_metadata

def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
if self.usage_metadata is None or self.usage_metadata == {}:
try:
return super().get_num_tokens_from_messages(messages)
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
return self.usage_metadata.get('input_tokens', 0)

def get_num_tokens(self, text: str) -> int:
if self.usage_metadata is None or self.usage_metadata == {}:
try:
return super().get_num_tokens(text)
except Exception as e:
tokenizer = TokenizerManage.get_tokenizer()
return len(tokenizer.encode(text))
return self.get_last_generation_info().get('output_tokens', 0)

def _stream(
Expand Down Expand Up @@ -54,5 +66,6 @@ def invoke(
**kwargs,
).generations[0][0],
).message
self.usage_metadata = chat_result.response_metadata['token_usage'] if 'token_usage' in chat_result.response_metadata else chat_result.usage_metadata
self.usage_metadata = chat_result.response_metadata[
'token_usage'] if 'token_usage' in chat_result.response_metadata else chat_result.usage_metadata
return chat_result

0 comments on commit 60cb13d

Please sign in to comment.