Skip to content

Commit

Permalink
fay进化
Browse files Browse the repository at this point in the history
1、stream_manager.py补充注释;
2、 gevent包版本更新;#146
3、修复booter与core双互引入问题;#148
4、fay gui支持流式显示,包括<think>标签的内容;
5、修复gpt_stream模式时qa回复不合成语音问题;
6、优化流式deepseek,fay gui及tts对think内容的处理。
7、重构了部分fay_core.py交互逻辑代码;
8、优化机器人表情显示逻辑;
9、把数字人驱动接口标识“Unreal”更改成“human”,消除误解。
  • Loading branch information
xszyou committed Feb 27, 2025
1 parent d70547d commit 7a51270
Show file tree
Hide file tree
Showing 9 changed files with 245 additions and 111 deletions.
4 changes: 2 additions & 2 deletions asr/ali_nls.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,15 +95,15 @@ def on_message(self, ws, message):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": self.finalResults, "Username" : self.username})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
wsa_server.get_instance().add_cmd(content)
ws.close()#TODO
elif name == 'TranscriptionResultChanged':
self.finalResults = data['payload']['result']
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": self.finalResults, "Username" : self.username})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
wsa_server.get_instance().add_cmd(content)

except Exception as e:
Expand Down
2 changes: 1 addition & 1 deletion asr/funasr.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def on_message(self, ws, message):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": self.finalResults, "Username" : self.username})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': self.finalResults}, 'Username' : self.username}
wsa_server.get_instance().add_cmd(content)

except Exception as e:
Expand Down
201 changes: 140 additions & 61 deletions core/fay_core.py

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions core/recorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def __waitingResult(self, iat: asrclient, audio_data):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "唤醒成功!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "唤醒成功!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': "唤醒成功!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
wsa_server.get_instance().add_cmd(content)
self.wakeup_matched = True # 唤醒成功
with fay_core.auto_play_lock:
Expand All @@ -138,7 +138,7 @@ def __waitingResult(self, iat: asrclient, audio_data):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "[!] 待唤醒!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
wsa_server.get_instance().add_cmd(content)
else:
self.on_speaking(text)
Expand All @@ -162,7 +162,7 @@ def __waitingResult(self, iat: asrclient, audio_data):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "唤醒成功!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "唤醒成功!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': "唤醒成功!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'}
wsa_server.get_instance().add_cmd(content)
#去除唤醒词后语句
question = text#[len(wake_up_word):].lstrip()
Expand All @@ -175,7 +175,7 @@ def __waitingResult(self, iat: asrclient, audio_data):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "[!] 待唤醒!", "Username" : self.username , 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': "[!] 待唤醒!"}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
wsa_server.get_instance().add_cmd(content)

#非唤醒模式
Expand All @@ -192,7 +192,7 @@ def __waitingResult(self, iat: asrclient, audio_data):
if wsa_server.get_web_instance().is_connected(self.username):
wsa_server.get_web_instance().add_cmd({"panelMsg": "", 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'})
if wsa_server.get_instance().is_connected(self.username):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': ""}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': ""}, 'Username' : self.username, 'robot': f'http://{cfg.fay_url}:5000/robot/Normal.jpg'}
wsa_server.get_instance().add_cmd(content)

def __record(self):
Expand Down Expand Up @@ -317,7 +317,7 @@ def send_ws_notification():
})
if wsa_server.get_instance().is_connected(self.username):
content = {
'Topic': 'Unreal',
'Topic': 'human',
'Data': {'Key': 'log', 'Value': "聆听中..."},
'Username': self.username,
'robot': f'http://{cfg.fay_url}:5000/robot/Listening.jpg'
Expand Down
67 changes: 53 additions & 14 deletions core/stream_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,29 +6,48 @@
from core import member_db
from core.interact import Interact

# 全局变量,用于存储StreamManager的单例实例
__streams = None
# 线程锁,用于保护全局变量的访问
__streams_lock = threading.Lock()

def new_instance(max_sentences=1024):
"""
创建并返回StreamManager的单例实例
:param max_sentences: 最大句子缓存数量
:return: StreamManager实例
"""
global __streams
with __streams_lock:
if __streams is None:
__streams = StreamManager(max_sentences)
return __streams

class StreamManager:
"""
流管理器类,用于管理和处理文本流数据
"""
def __init__(self, max_sentences=3):
"""
初始化StreamManager
:param max_sentences: 每个流的最大句子缓存数量
"""
if hasattr(self, '_initialized') and self._initialized:
return
self.lock = threading.Lock()
self.streams = {}
self.max_sentences = max_sentences
self.listener_threads = {}
self.running = True
self._initialized = True
self.msgid = ""
self.lock = threading.Lock() # 线程锁,用于保护streams字典的访问
self.streams = {} # 存储用户ID到句子缓存的映射
self.max_sentences = max_sentences # 最大句子缓存数量
self.listener_threads = {} # 存储用户ID到监听线程的映射
self.running = True # 控制监听线程的运行状态
self._initialized = True # 标记是否已初始化
self.msgid = "" # 消息ID

def get_Stream(self, uid):
"""
获取指定用户ID的文本流,如果不存在则创建新的
:param uid: 用户ID
:return: 对应的句子缓存对象
"""
need_start_thread = False
with self.lock:
if uid not in self.streams:
Expand All @@ -42,39 +61,59 @@ def get_Stream(self, uid):
return self.streams[uid]

def write_sentence(self, uid, sentence):
"""
写入句子到指定用户的文本流
:param uid: 用户ID
:param sentence: 要写入的句子
:return: 写入是否成功
"""
if sentence.endswith('_<isfirst>'):
self.clear_Stream(uid)
Stream = self.get_Stream(uid)
success = Stream.write(sentence)
return success

def clear_Stream(self, uid):
"""
清除指定用户ID的文本流数据
:param uid: 用户ID
"""
with self.lock:
if uid in self.streams:
self.streams[uid].clear()


def listen(self, uid):
"""
监听指定用户的文本流,并处理接收到的句子
:param uid: 用户ID
"""
Stream = self.streams[uid]
username = member_db.new_instance().find_username_by_uid(uid)
while self.running:
sentence = Stream.read()
sentence = Stream.read() # 读取文本流中的句子
if sentence:
self.execute(username, sentence)
self.execute(username, sentence) # 执行句子处理
else:
time.sleep(0.1)
time.sleep(0.1) # 无数据时短暂休眠

def execute(self, username, sentence):
"""
执行句子处理逻辑
:param username: 用户名
:param sentence: 要处理的句子
"""
fay_core = fay_booter.feiFei
# 处理打招呼类型的消息
if sentence.endswith('_<hello>'):
sentence = sentence[:-len('_<hello>')]
interact = Interact("hello", 1, {'user': username, 'msg': sentence})
else:
# 处理普通消息,区分是否是会话的第一句
if sentence.endswith('_<isfirst>'):
sentence = sentence[:-len('_<isfirst>')]
interact = Interact("stream", 1, {'user': username, 'msg': sentence, 'isfirst': True})
else:
interact = Interact("stream", 1, {'user': username, 'msg': sentence, 'isfirst': False})
fay_core.say(interact, sentence)
# MyThread(target=fay_core.say, args=[interact, sentence]).start()
time.sleep(0.1)
fay_core.say(interact, sentence) # 调用核心处理模块进行响应
# MyThread(target=fay_core.say, args=[interact, sentence]).start() # 异步处理方式(已注释)
time.sleep(0.1) # 短暂休眠以控制处理频率
2 changes: 1 addition & 1 deletion docker/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ azure-cognitiveservices-speech
aliyun-python-sdk-core
simhash
pytz
gevent~=22.10.1
gevent
edge_tts~=6.1.3
eyed3
ultralytics~=8.0.2
Expand Down
24 changes: 14 additions & 10 deletions fay_booter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import requests
from core.interact import Interact
from core.recorder import Recorder
from core import fay_core
from scheduler.thread_manager import MyThread
from utils import util, config_util, stream_util
from core.wsa_server import MyServer
Expand All @@ -18,14 +17,19 @@
from llm.agent import agent_service
import subprocess

feiFei: fay_core.FeiFei = None
recorderListener: Recorder = None
# 全局变量声明
feiFei = None
recorderListener = None
__running = False
deviceSocketServer = None
DeviceInputListenerDict = {}
ngrok = None
socket_service_instance = None

# 延迟导入fay_core
def get_fay_core():
from core import fay_core
return fay_core

#启动状态
def is_running():
Expand Down Expand Up @@ -247,9 +251,9 @@ def start_auto_play_service(): #TODO 评估一下有无优化的空间
util.printInfo(1, user, '60s后重连自动播放服务器')
time.sleep(60)
# 请求自动播放服务器
with fay_core.auto_play_lock:
if config_util.config['source']['automatic_player_status'] and config_util.config['source']['automatic_player_url'] is not None and fay_core.can_auto_play == True and (config_util.config["interact"]["playSound"] or wsa_server.get_instance().is_connected(user)):
fay_core.can_auto_play = False
with get_fay_core().auto_play_lock:
if config_util.config['source']['automatic_player_status'] and config_util.config['source']['automatic_player_url'] is not None and get_fay_core().can_auto_play == True and (config_util.config["interact"]["playSound"] or wsa_server.get_instance().is_connected(user)):
get_fay_core().can_auto_play = False
post_data = {"user": user}
try:
response = requests.post(url, json=post_data, timeout=5)
Expand All @@ -268,11 +272,11 @@ def start_auto_play_service(): #TODO 评估一下有无优化的空间
feiFei.on_interact(interact)
else:
is_auto_server_error = True
fay_core.can_auto_play = True
get_fay_core().can_auto_play = True
util.printInfo(1, user, '请求自动播放服务器失败,错误代码是:{}'.format(response.status_code))
except requests.exceptions.RequestException as e:
is_auto_server_error = True
fay_core.can_auto_play = True
get_fay_core().can_auto_play = True
util.printInfo(1, user, '请求自动播放服务器失败,错误信息是:{}'.format(e))
time.sleep(0.01)

Expand Down Expand Up @@ -332,7 +336,7 @@ def start():

#开启核心服务
util.log(1, '开启核心服务...')
feiFei = fay_core.FeiFei()
feiFei = get_fay_core().FeiFei()
feiFei.start()

#加载本地知识库
Expand Down Expand Up @@ -371,6 +375,6 @@ def start():

if __name__ == '__main__':
ws_server: MyServer = None
feiFei: fay_core.FeiFei = None
feiFei: get_fay_core().FeiFei = None
recorderListener: Recorder = None
start()
42 changes: 27 additions & 15 deletions gui/static/js/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -194,32 +194,44 @@ class FayInterface {
vueInstance.$set(vueInstance, 'robot', data.robot);
}
if (data.panelReply !== undefined) {
vueInstance.panelReply = data.panelReply.content;
console.log('收到消息:', data.panelReply);
vueInstance.panelReply = data.panelReply.content;

// 更新用户列表
const userExists = vueInstance.userList.some(user => user[1] === data.panelReply.username);
if (!userExists) {
vueInstance.userList.push([data.panelReply.uid, data.panelReply.username]);
}

if (vueInstance.selectedUser && data.panelReply.username === vueInstance.selectedUser[1]) {
if ('is_adopted' in data.panelReply && data.panelReply.is_adopted === true) {
vueInstance.messages.push({
id: data.panelReply.id,
username: data.panelReply.username,
content: data.panelReply.content,
type: data.panelReply.type,
timetext: this.getTime(),
is_adopted: 1
});
} else {
// 处理content_id为0的消息
if (data.panelReply.id === 0) {
// 查找最后一条消息
const lastIndex = vueInstance.messages.length - 1;
if (lastIndex >= 0) {
const lastMessage = vueInstance.messages[lastIndex];
// 如果最后一条消息也是content_id为0,则更新它
if (lastMessage.id === 0) {
lastMessage.content = lastMessage.content + data.panelReply.content;
lastMessage.timetext = this.getTime();
// 强制更新视图
vueInstance.$forceUpdate();
return;
}
}
}

// 添加新消息
vueInstance.messages.push({
id: data.panelReply.id,
username: data.panelReply.username,
content: data.panelReply.content,
type: data.panelReply.type,
timetext: this.getTime(),
is_adopted: 0
});
}
is_adopted: data.panelReply.is_adopted ? 1 : 0
});

// 滚动到底部
vueInstance.$nextTick(() => {
const chatContainer = vueInstance.$el.querySelector('.chatmessage');
if (chatContainer) {
Expand Down Expand Up @@ -261,7 +273,7 @@ new Vue({
play_sound_enabled: false,
source_record_enabled: false,
userListTimer: null,
thinkPanelExpanded: false,
thinkPanelExpanded: true,
thinkContent: '',
isThinkPanelMinimized: false,
};
Expand Down
2 changes: 1 addition & 1 deletion utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def printInfo(level, sender, text, send_time=-1):
if wsa_server.get_web_instance().is_connected(sender):
wsa_server.get_web_instance().add_cmd({"panelMsg": text} if sender == "系统" else {"panelMsg": text, "Username" : sender})
if wsa_server.get_instance().is_connected(sender):
content = {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': text}} if sender == "系统" else {'Topic': 'Unreal', 'Data': {'Key': 'log', 'Value': text}, "Username" : sender}
content = {'Topic': 'human', 'Data': {'Key': 'log', 'Value': text}} if sender == "系统" else {'Topic': 'human', 'Data': {'Key': 'log', 'Value': text}, "Username" : sender}
wsa_server.get_instance().add_cmd(content)
MyThread(target=__write_to_file, args=[logStr]).start()

Expand Down

0 comments on commit 7a51270

Please sign in to comment.