diff --git a/code/config.example.yaml b/code/config.example.yaml index 14744be7..e81a4748 100644 --- a/code/config.example.yaml +++ b/code/config.example.yaml @@ -13,4 +13,6 @@ HTTPS_PORT: 9001 USE_HTTPS: false CERT_FILE: cert.pem KEY_FILE: key.pem +# openai 地址, 一般不需要修改, 除非你有自己的反向代理 +API_URL: https://api.openai.com diff --git a/code/initialization/config.go b/code/initialization/config.go index 5a8ac829..bb18876f 100644 --- a/code/initialization/config.go +++ b/code/initialization/config.go @@ -21,6 +21,7 @@ type Config struct { UseHttps bool CertFile string KeyFile string + OpenaiApiUrl string } func LoadConfig(cfg string) *Config { @@ -45,6 +46,7 @@ func LoadConfig(cfg string) *Config { UseHttps: getViperBoolValue("USE_HTTPS", false), CertFile: getViperStringValue("CERT_FILE", "cert.pem"), KeyFile: getViperStringValue("KEY_FILE", "key.pem"), + OpenaiApiUrl: getViperStringValue("API_URL", "https://api.openai.com"), } return config diff --git a/code/main.go b/code/main.go index f448fc10..bcfcbe34 100644 --- a/code/main.go +++ b/code/main.go @@ -26,7 +26,7 @@ func main() { pflag.Parse() config := initialization.LoadConfig(*cfg) initialization.LoadLarkClient(*config) - gpt := services.NewChatGPT(config.OpenaiApiKeys) + gpt := services.NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) handlers.InitHandlers(gpt, *config) eventHandler := dispatcher.NewEventDispatcher( diff --git a/code/services/gpt3.go b/code/services/gpt3.go index 549426ae..112f7ff5 100644 --- a/code/services/gpt3.go +++ b/code/services/gpt3.go @@ -13,7 +13,6 @@ import ( ) const ( - BASEURL = "https://api.openai.com/v1/" maxTokens = 2000 temperature = 0.7 engine = "gpt-3.5-turbo" @@ -52,6 +51,7 @@ type ChatGPTRequestBody struct { type ChatGPT struct { Lb *loadbalancer.LoadBalancer ApiKey []string + ApiUrl string } type ImageGenerationRequestBody struct { @@ -125,7 +125,7 @@ func (gpt ChatGPT) Completions(msg []Messages) (resp Messages, err error) { PresencePenalty: 0, } gptResponseBody := &ChatGPTResponseBody{} - err = gpt.sendRequest(BASEURL+"chat/completions", "POST", + err = gpt.sendRequest(gpt.ApiUrl+"/v1/chat/completions", "POST", requestBody, gptResponseBody) if err == nil { @@ -143,7 +143,8 @@ func (gpt ChatGPT) GenerateImage(prompt string, size string, n int) ([]string, e } imageResponseBody := &ImageGenerationResponseBody{} - err := gpt.sendRequest(BASEURL+"images/generations", "POST", requestBody, imageResponseBody) + err := gpt.sendRequest(gpt.ApiUrl+"/v1/images/generations", + "POST", requestBody, imageResponseBody) if err != nil { return nil, err @@ -164,10 +165,11 @@ func (gpt ChatGPT) GenerateOneImage(prompt string, size string) (string, error) return b64s[0], nil } -func NewChatGPT(apiKeys []string) *ChatGPT { +func NewChatGPT(apiKeys []string, apiUrl string) *ChatGPT { lb := loadbalancer.NewLoadBalancer(apiKeys) return &ChatGPT{ Lb: lb, ApiKey: apiKeys, + ApiUrl: apiUrl, } } diff --git a/code/services/gpt3_test.go b/code/services/gpt3_test.go index f9cb20b1..31b9bb02 100644 --- a/code/services/gpt3_test.go +++ b/code/services/gpt3_test.go @@ -14,7 +14,7 @@ func TestCompletions(t *testing.T) { {Role: "user", Content: "翻译这段话: The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior."}, } - gpt := NewChatGPT(config.OpenaiApiKeys) + gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) resp, err := gpt.Completions(msgs) if err != nil { @@ -27,7 +27,7 @@ func TestCompletions(t *testing.T) { func TestGenerateOneImage(t *testing.T) { config := initialization.LoadConfig("../config.yaml") - gpt := NewChatGPT(config.OpenaiApiKeys) + gpt := NewChatGPT(config.OpenaiApiKeys, config.OpenaiApiUrl) prompt := "a red apple" size := "256x256" diff --git a/entrypoint.sh b/entrypoint.sh index cf79e69e..6a912fb0 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -16,6 +16,7 @@ HTTPS_PORT=${HTTPS_PORT:-""} USE_HTTPS=${USE_HTTPS:-""} CERT_FILE=${CERT_FILE:-""} KEY_FILE=${KEY_FILE:-""} +API_URL=${API_URL:-""} CONFIG_PATH=${CONFIG_PATH:-"config.yaml"} @@ -79,6 +80,10 @@ if [ "$KEY_FILE" != "" ] ; then sed -i "15c KEY_FILE: $KEY_FILE" $CONFIG_PATH fi +if [ "$API_URL" != "" ] ; then + sed -i "17c API_URL: $API_URL" $CONFIG_PATH +fi + echo -e "\033[32m[Success] Configuration file has been generated!\033[0m" /dist/feishu_chatgpt diff --git a/readme.md b/readme.md index c35086d9..b9885083 100644 --- a/readme.md +++ b/readme.md @@ -207,6 +207,7 @@ docker run -d --name feishu-chatgpt -p 9000:9000 \ --env APP_VERIFICATION_TOKEN=xxx \ --env BOT_NAME=chatGpt \ --env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \ +--env API_URL=https://api.openai.com \ feishu-chatgpt:latest ``` @@ -224,6 +225,7 @@ docker run -d --restart=always --name feishu-chatgpt2 -p 9000:9000 -v /etc/local --env APP_VERIFICATION_TOKEN=xxx \ --env BOT_NAME=chatGpt \ --env OPENAI_KEY="sk-xxx1,sk-xxx2,sk-xxx3" \ +--env API_URL=https://api.openai.com \ dockerproxy.com/leizhenpeng/feishu-chatgpt:latest ```