From c23d114094561c499be4f289a3bf3aa1d35074c6 Mon Sep 17 00:00:00 2001 From: LINSTCL Date: Fri, 3 Mar 2023 15:20:42 +0800 Subject: [PATCH] =?UTF-8?q?proxy=E5=90=8E=E5=90=91=E5=85=BC=E5=AE=B9?= =?UTF-8?q?=EF=BC=8C=E4=BF=AE=E5=A4=8D=E9=83=A8=E5=88=86=E6=8A=A5=E9=94=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pkg/openai/manager.py | 2 +- pkg/openai/modelmgr.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/openai/manager.py b/pkg/openai/manager.py index cfafbd84..e5cef33d 100644 --- a/pkg/openai/manager.py +++ b/pkg/openai/manager.py @@ -39,7 +39,7 @@ def request_completion(self, prompts): ai: ModelRequest = create_openai_model_request( config.completion_api_params['model'], 'user', - config.openai_config["http_proxy"] + config.openai_config["http_proxy"] if "http_proxy" in config.openai_config else None ) ai.request( prompts, diff --git a/pkg/openai/modelmgr.py b/pkg/openai/modelmgr.py index 4bffc7b4..64d427a1 100644 --- a/pkg/openai/modelmgr.py +++ b/pkg/openai/modelmgr.py @@ -45,8 +45,9 @@ async def __a_request__(self, **kwargs): def request(self, **kwargs): if self.proxy != None: #异步请求 + loop = asyncio.new_event_loop() self.runtime = threading.Thread( - target=asyncio.run, + target=loop.run_until_complete, args=(self.__a_request__(**kwargs),) ) self.runtime.start()