From ce94c6e62d3883f936da36b4414bd005b4786de2 Mon Sep 17 00:00:00 2001 From: kai2321 <70182551+kai2321@users.noreply.github.com> Date: Tue, 21 Jan 2025 16:04:15 +0800 Subject: [PATCH] =?UTF-8?q?feat:=E6=8E=A5=E5=85=A5dify=20(#1664)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- plugins/wasm-go/extensions/ai-proxy/README.md | 504 ++++++++++-------- .../extensions/ai-proxy/provider/dify.go | 307 +++++++++++ .../extensions/ai-proxy/provider/provider.go | 14 + 3 files changed, 604 insertions(+), 221 deletions(-) create mode 100644 plugins/wasm-go/extensions/ai-proxy/provider/dify.go diff --git a/plugins/wasm-go/extensions/ai-proxy/README.md b/plugins/wasm-go/extensions/ai-proxy/README.md index 0153abb822..8f2ae49a5d 100644 --- a/plugins/wasm-go/extensions/ai-proxy/README.md +++ b/plugins/wasm-go/extensions/ai-proxy/README.md @@ -247,6 +247,17 @@ Cohere 所对应的 `type` 为 `cohere`。它并无特有的配置字段。 #### Together-AI Together-AI 所对应的 `type` 为 `together-ai`。它并无特有的配置字段。 +#### Dify +Dify 所对应的 `type` 为 `dify`。它特有的配置字段如下: + +| 名称 | 数据类型 | 填写要求 | 默认值 | 描述 | +| -- | -------- |------| ------ | ---------------------------- | +| `difyApiUrl` | string | 非必填 | - | dify私有化部署的url | +| `botType` | string | 非必填 | - | dify的应用类型,Chat/Completion/Agent/Workflow | +| `inputVariable` | string | 非必填 | - | dify中应用类型为workflow时需要设置输入变量,当botType为workflow时一起使用 | +| `outputVariable` | string | 非必填 | - | dify中应用类型为workflow时需要设置输出变量,当botType为workflow时一起使用 | + + ## 用法示例 ### 使用 OpenAI 协议代理 Azure OpenAI 服务 @@ -421,25 +432,25 @@ URL: http://your-domain/v1/chat/completions ```json { - "model": "gpt-4o", - "messages": [ + "model": "gpt-4o", + "messages": [ + { + "role": "user", + "content": [ { - "role": "user", - "content": [ - { - "type": "image_url", - "image_url": { - "url": "https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg" - } - }, - { - "type": "text", - "text": "这个图片是哪里?" - } - ] + "type": "image_url", + "image_url": { + "url": "https://dashscope.oss-cn-beijing.aliyuncs.com/images/dog_and_girl.jpeg" + } + }, + { + "type": "text", + "text": "这个图片是哪里?" } - ], - "temperature": 0.3 + ] + } + ], + "temperature": 0.3 } ``` @@ -447,28 +458,28 @@ URL: http://your-domain/v1/chat/completions ```json { - "id": "17c5955d-af9c-9f28-bbde-293a9c9a3515", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": [ - { - "text": "这张照片显示的是一位女士和一只狗在海滩上。由于我无法获取具体的地理位置信息,所以不能确定这是哪个地方的海滩。但是从视觉内容来看,它可能是一个位于沿海地区的沙滩海岸线,并且有海浪拍打着岸边。这样的场景在全球许多美丽的海滨地区都可以找到。如果您需要更精确的信息,请提供更多的背景或细节描述。" - } - ] - }, - "finish_reason": "stop" - } - ], - "created": 1723949230, - "model": "qwen-vl-plus", - "object": "chat.completion", - "usage": { - "prompt_tokens": 1279, - "completion_tokens": 78 + "id": "17c5955d-af9c-9f28-bbde-293a9c9a3515", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": [ + { + "text": "这张照片显示的是一位女士和一只狗在海滩上。由于我无法获取具体的地理位置信息,所以不能确定这是哪个地方的海滩。但是从视觉内容来看,它可能是一个位于沿海地区的沙滩海岸线,并且有海浪拍打着岸边。这样的场景在全球许多美丽的海滨地区都可以找到。如果您需要更精确的信息,请提供更多的背景或细节描述。" + } + ] + }, + "finish_reason": "stop" } + ], + "created": 1723949230, + "model": "qwen-vl-plus", + "object": "chat.completion", + "usage": { + "prompt_tokens": 1279, + "completion_tokens": 78 + } } ``` @@ -591,8 +602,8 @@ provider: modelMapping: "*": "qwen-long" # 通义千问的文件上下文只能在 qwen-long 模型下使用 qwenFileIds: - - "file-fe-xxx" - - "file-fe-yyy" + - "file-fe-xxx" + - "file-fe-yyy" ``` **请求示例** @@ -650,7 +661,7 @@ provider: ```json { "input": { - "prompt": "介绍一下Dubbo" + "prompt": "介绍一下Dubbo" }, "parameters": {}, "debug": {} @@ -661,21 +672,21 @@ provider: ```json { - "output": { - "finish_reason": "stop", - "session_id": "677e7e8fbb874e1b84792b65042e1599", - "text": "Apache Dubbo 是一个..." - }, - "usage": { - "models": [ - { - "output_tokens": 449, - "model_id": "qwen-max", - "input_tokens": 282 - } - ] - }, - "request_id": "b59e45e3-5af4-91df-b7c6-9d746fd3297c" + "output": { + "finish_reason": "stop", + "session_id": "677e7e8fbb874e1b84792b65042e1599", + "text": "Apache Dubbo 是一个..." + }, + "usage": { + "models": [ + { + "output_tokens": 449, + "model_id": "qwen-max", + "input_tokens": 282 + } + ] + }, + "request_id": "b59e45e3-5af4-91df-b7c6-9d746fd3297c" } ``` @@ -918,25 +929,25 @@ curl --location 'http:///v1/chat/completions' \ ```json { - "id": "fd140c3e-0b69-4b19-849b-d354d32a6162", - "choices": [ - { - "index": 0, - "delta": { - "role": "assistant", - "content": "你好!我是一名专业的开发人员。" - }, - "finish_reason": "stop" - } - ], - "created": 1717493117, - "model": "hunyuan-lite", - "object": "chat.completion", - "usage": { - "prompt_tokens": 15, - "completion_tokens": 9, - "total_tokens": 24 + "id": "fd140c3e-0b69-4b19-849b-d354d32a6162", + "choices": [ + { + "index": 0, + "delta": { + "role": "assistant", + "content": "你好!我是一名专业的开发人员。" + }, + "finish_reason": "stop" } + ], + "created": 1717493117, + "model": "hunyuan-lite", + "object": "chat.completion", + "usage": { + "prompt_tokens": 15, + "completion_tokens": 9, + "total_tokens": 24 + } } ``` @@ -958,14 +969,14 @@ provider: ```json { - "model": "gpt-4-turbo", - "messages": [ - { - "role": "user", - "content": "你好,你是谁?" - } - ], - "stream": false + "model": "gpt-4-turbo", + "messages": [ + { + "role": "user", + "content": "你好,你是谁?" + } + ], + "stream": false } ``` @@ -973,25 +984,25 @@ provider: ```json { - "id": "as-e90yfg1pk1", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": "你好,我是文心一言,英文名是ERNIE Bot。我能够与人对话互动,回答问题,协助创作,高效便捷地帮助人们获取信息、知识和灵感。" - }, - "finish_reason": "stop" - } - ], - "created": 1717251488, - "model": "ERNIE-4.0", - "object": "chat.completion", - "usage": { - "prompt_tokens": 4, - "completion_tokens": 33, - "total_tokens": 37 + "id": "as-e90yfg1pk1", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "你好,我是文心一言,英文名是ERNIE Bot。我能够与人对话互动,回答问题,协助创作,高效便捷地帮助人们获取信息、知识和灵感。" + }, + "finish_reason": "stop" } + ], + "created": 1717251488, + "model": "ERNIE-4.0", + "object": "chat.completion", + "usage": { + "prompt_tokens": 4, + "completion_tokens": 33, + "total_tokens": 37 + } } ``` @@ -1014,14 +1025,14 @@ provider: ```json { - "model": "gpt-3", - "messages": [ - { - "role": "user", - "content": "你好,你是谁?" - } - ], - "stream": false + "model": "gpt-3", + "messages": [ + { + "role": "user", + "content": "你好,你是谁?" + } + ], + "stream": false } ``` @@ -1029,37 +1040,37 @@ provider: ```json { - "id": "03ac4fcfe1c6cc9c6a60f9d12046e2b4", - "choices": [ - { - "finish_reason": "stop", - "index": 0, - "message": { - "content": "你好,我是一个由MiniMax公司研发的大型语言模型,名为MM智能助理。我可以帮助回答问题、提供信息、进行对话和执行多种语言处理任务。如果你有任何问题或需要帮助,请随时告诉我!", - "role": "assistant", - "name": "MM智能助理", - "audio_content": "" - } - } - ], - "created": 1734155471, - "model": "abab6.5s-chat", - "object": "chat.completion", - "usage": { - "total_tokens": 116, - "total_characters": 0, - "prompt_tokens": 70, - "completion_tokens": 46 - }, - "input_sensitive": false, - "output_sensitive": false, - "input_sensitive_type": 0, - "output_sensitive_type": 0, - "output_sensitive_int": 0, - "base_resp": { - "status_code": 0, - "status_msg": "" + "id": "03ac4fcfe1c6cc9c6a60f9d12046e2b4", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": "你好,我是一个由MiniMax公司研发的大型语言模型,名为MM智能助理。我可以帮助回答问题、提供信息、进行对话和执行多种语言处理任务。如果你有任何问题或需要帮助,请随时告诉我!", + "role": "assistant", + "name": "MM智能助理", + "audio_content": "" + } } + ], + "created": 1734155471, + "model": "abab6.5s-chat", + "object": "chat.completion", + "usage": { + "total_tokens": 116, + "total_characters": 0, + "prompt_tokens": 70, + "completion_tokens": 46 + }, + "input_sensitive": false, + "output_sensitive": false, + "input_sensitive_type": 0, + "output_sensitive_type": 0, + "output_sensitive_int": 0, + "base_resp": { + "status_code": 0, + "status_msg": "" + } } ``` @@ -1348,18 +1359,18 @@ provider: ```json { - "model": "gpt-4o", - "messages": [ - { - "role": "system", - "content": "你是一名专业的开发人员!" - }, - { - "role": "user", - "content": "你好,你是谁?" - } - ], - "stream": false + "model": "gpt-4o", + "messages": [ + { + "role": "system", + "content": "你是一名专业的开发人员!" + }, + { + "role": "user", + "content": "你好,你是谁?" + } + ], + "stream": false } ``` @@ -1367,24 +1378,24 @@ provider: ```json { - "id": "cha000c23c6@dx190ef0b4b96b8f2532", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": "你好!我是一名专业的开发人员,擅长编程和解决技术问题。有什么我可以帮助你的吗?" - } - } - ], - "created": 1721997415, - "model": "generalv3.5", - "object": "chat.completion", - "usage": { - "prompt_tokens": 10, - "completion_tokens": 19, - "total_tokens": 29 + "id": "cha000c23c6@dx190ef0b4b96b8f2532", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "你好!我是一名专业的开发人员,擅长编程和解决技术问题。有什么我可以帮助你的吗?" + } } + ], + "created": 1721997415, + "model": "generalv3.5", + "object": "chat.completion", + "usage": { + "prompt_tokens": 10, + "completion_tokens": 19, + "total_tokens": 29 + } } ``` @@ -1410,14 +1421,14 @@ provider: ```json { - "model": "gpt-3.5", - "messages": [ - { - "role": "user", - "content": "Who are you?" - } - ], - "stream": false + "model": "gpt-3.5", + "messages": [ + { + "role": "user", + "content": "Who are you?" + } + ], + "stream": false } ``` @@ -1425,25 +1436,25 @@ provider: ```json { - "id": "chatcmpl-b010867c-0d3f-40ba-95fd-4e8030551aeb", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": "I am a large multi-modal model, trained by Google. I am designed to provide information and answer questions to the best of my abilities." - }, - "finish_reason": "stop" - } - ], - "created": 1722756984, - "model": "gemini-pro", - "object": "chat.completion", - "usage": { - "prompt_tokens": 5, - "completion_tokens": 29, - "total_tokens": 34 + "id": "chatcmpl-b010867c-0d3f-40ba-95fd-4e8030551aeb", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "I am a large multi-modal model, trained by Google. I am designed to provide information and answer questions to the best of my abilities." + }, + "finish_reason": "stop" } + ], + "created": 1722756984, + "model": "gemini-pro", + "object": "chat.completion", + "usage": { + "prompt_tokens": 5, + "completion_tokens": 29, + "total_tokens": 34 + } } ``` @@ -1515,13 +1526,13 @@ provider: **请求示例** ```json { - "model": "Qwen/Qwen2.5-72B-Instruct-Turbo", - "messages": [ - { - "role": "user", - "content": "Who are you?" - } - ] + "model": "Qwen/Qwen2.5-72B-Instruct-Turbo", + "messages": [ + { + "role": "user", + "content": "Who are you?" + } + ] } ``` @@ -1554,6 +1565,57 @@ provider: } ``` +### 使用 OpenAI 协议代理 Dify 服务 + +**配置信息** +```yaml +provider: + type: dify + apiTokens: + - "YOUR_DIFY_API_TOKEN" + modelMapping: + "*": "dify" +``` + +**请求示例** +```json +{ + "model": "gpt-4-turbo", + "messages": [ + { + "role": "user", + "content": "你好,你是谁?" + } + ], + "stream": false +} +``` + +**响应示例** +```json +{ + "id": "e33fc636-f9e8-4fae-8d5e-fbd0acb09401", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "你好!我是ChatGPT,由OpenAI开发的人工智能语言模型。我可以帮助回答问题、提供建议或进行各种对话。如果你有任何需要,随时告诉我哦!" + }, + "finish_reason": "stop" + } + ], + "created": 1736657752, + "model": "dify", + "object": "chat.completion", + "usage": { + "prompt_tokens": 16, + "completion_tokens": 243, + "total_tokens": 259 + } +} +``` + ## 完整配置示例 @@ -1569,13 +1631,13 @@ metadata: namespace: higress-system spec: matchRules: - - config: - provider: - type: groq - apiTokens: - - "YOUR_API_TOKEN" - ingress: - - groq + - config: + provider: + type: groq + apiTokens: + - "YOUR_API_TOKEN" + ingress: + - groq url: oci://higress-registry.cn-hangzhou.cr.aliyuncs.com/plugins/ai-proxy:1.0.0 --- apiVersion: networking.k8s.io/v1 @@ -1593,16 +1655,16 @@ metadata: spec: ingressClassName: higress rules: - - host: - http: - paths: - - backend: - resource: - apiGroup: networking.higress.io - kind: McpBridge - name: default - path: / - pathType: Prefix + - host: + http: + paths: + - backend: + resource: + apiGroup: networking.higress.io + kind: McpBridge + name: default + path: / + pathType: Prefix --- apiVersion: networking.higress.io/v1 kind: McpBridge @@ -1611,10 +1673,10 @@ metadata: namespace: higress-system spec: registries: - - domain: api.groq.com - name: groq - port: 443 - type: dns + - domain: api.groq.com + name: groq + port: 443 + type: dns ``` 访问示例: diff --git a/plugins/wasm-go/extensions/ai-proxy/provider/dify.go b/plugins/wasm-go/extensions/ai-proxy/provider/dify.go new file mode 100644 index 0000000000..28b6dec794 --- /dev/null +++ b/plugins/wasm-go/extensions/ai-proxy/provider/dify.go @@ -0,0 +1,307 @@ +package provider + +import ( + "encoding/json" + "errors" + "fmt" + "github.com/alibaba/higress/plugins/wasm-go/extensions/ai-proxy/util" + "github.com/alibaba/higress/plugins/wasm-go/pkg/wrapper" + "github.com/higress-group/proxy-wasm-go-sdk/proxywasm" + "github.com/higress-group/proxy-wasm-go-sdk/proxywasm/types" + "net/http" + "strings" + "time" +) + +const ( + difyDomain = "api.dify.ai" + difyChatPath = "/v1/chat-messages" + difyCompletionPath = "/v1/completion-messages" + difyWorkflowPath = "/v1/workflows/run" + BotTypeChat = "Chat" + BotTypeCompletion = "Completion" + BotTypeWorkflow = "Workflow" + BotTypeAgent = "Agent" +) + +type difyProviderInitializer struct{} + +func (d *difyProviderInitializer) ValidateConfig(config *ProviderConfig) error { + if config.apiTokens == nil || len(config.apiTokens) == 0 { + return errors.New("no apiToken found in provider config") + } + return nil +} + +func (d *difyProviderInitializer) CreateProvider(config ProviderConfig) (Provider, error) { + return &difyProvider{ + config: config, + contextCache: createContextCache(&config), + }, nil +} + +type difyProvider struct { + config ProviderConfig + contextCache *contextCache +} + +func (d *difyProvider) GetProviderType() string { + return providerTypeDify +} + +func (d *difyProvider) OnRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, log wrapper.Log) error { + if apiName != ApiNameChatCompletion { + return errUnsupportedApiName + } + d.config.handleRequestHeaders(d, ctx, apiName, log) + return nil +} + +func (d *difyProvider) TransformRequestHeaders(ctx wrapper.HttpContext, apiName ApiName, headers http.Header, log wrapper.Log) { + if d.config.difyApiUrl != "" { + log.Debugf("use local host: %s", d.config.difyApiUrl) + util.OverwriteRequestHostHeader(headers, d.config.difyApiUrl) + } else { + util.OverwriteRequestHostHeader(headers, difyDomain) + } + switch d.config.botType { + case BotTypeChat, BotTypeAgent: + util.OverwriteRequestPathHeader(headers, difyChatPath) + case BotTypeCompletion: + util.OverwriteRequestPathHeader(headers, difyCompletionPath) + case BotTypeWorkflow: + util.OverwriteRequestPathHeader(headers, difyWorkflowPath) + } + util.OverwriteRequestAuthorizationHeader(headers, "Bearer "+d.config.GetApiTokenInUse(ctx)) +} + +func (d *difyProvider) OnRequestBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) (types.Action, error) { + if apiName != ApiNameChatCompletion { + return types.ActionContinue, errUnsupportedApiName + } + return d.config.handleRequestBody(d, d.contextCache, ctx, apiName, body, log) +} + +func (d *difyProvider) TransformRequestBodyHeaders(ctx wrapper.HttpContext, apiName ApiName, body []byte, headers http.Header, log wrapper.Log) ([]byte, error) { + request := &chatCompletionRequest{} + err := d.config.parseRequestAndMapModel(ctx, request, body, log) + if err != nil { + return nil, err + } + + difyRequest := d.difyChatGenRequest(request) + + return json.Marshal(difyRequest) +} + +func (d *difyProvider) TransformResponseBody(ctx wrapper.HttpContext, apiName ApiName, body []byte, log wrapper.Log) ([]byte, error) { + difyResponse := &DifyChatResponse{} + if err := json.Unmarshal(body, difyResponse); err != nil { + return nil, fmt.Errorf("unable to unmarshal dify response: %v", err) + } + response := d.responseDify2OpenAI(ctx, difyResponse) + return json.Marshal(response) +} + +func (d *difyProvider) responseDify2OpenAI(ctx wrapper.HttpContext, response *DifyChatResponse) *chatCompletionResponse { + var choice chatCompletionChoice + var id string + switch d.config.botType { + case BotTypeChat, BotTypeAgent: + choice = chatCompletionChoice{ + Index: 0, + Message: &chatMessage{Role: roleAssistant, Content: response.Answer}, + FinishReason: finishReasonStop, + } + //response header中增加conversationId字段 + _ = proxywasm.ReplaceHttpResponseHeader("ConversationId", response.ConversationId) + id = response.ConversationId + case BotTypeCompletion: + choice = chatCompletionChoice{ + Index: 0, + Message: &chatMessage{Role: roleAssistant, Content: response.Answer}, + FinishReason: finishReasonStop, + } + id = response.MessageId + case BotTypeWorkflow: + choice = chatCompletionChoice{ + Index: 0, + Message: &chatMessage{Role: roleAssistant, Content: response.Data.Outputs[d.config.outputVariable]}, + FinishReason: finishReasonStop, + } + id = response.Data.WorkflowId + } + return &chatCompletionResponse{ + Id: id, + Created: time.Now().UnixMilli() / 1000, + Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""), + SystemFingerprint: "", + Object: objectChatCompletion, + Choices: []chatCompletionChoice{choice}, + Usage: response.MetaData.Usage, + } +} + +func (d *difyProvider) OnStreamingResponseBody(ctx wrapper.HttpContext, name ApiName, chunk []byte, isLastChunk bool, log wrapper.Log) ([]byte, error) { + if isLastChunk || len(chunk) == 0 { + return nil, nil + } + // sample event response: + // data: {"event": "agent_thought", "id": "8dcf3648-fbad-407a-85dd-73a6f43aeb9f", "task_id": "9cf1ddd7-f94b-459b-b942-b77b26c59e9b", "message_id": "1fb10045-55fd-4040-99e6-d048d07cbad3", "position": 1, "thought": "", "observation": "", "tool": "", "tool_input": "", "created_at": 1705639511, "message_files": [], "conversation_id": "c216c595-2d89-438c-b33c-aae5ddddd142"} + + // sample end event response: + // data: {"event": "message_end", "id": "5e52ce04-874b-4d27-9045-b3bc80def685", "conversation_id": "45701982-8118-4bc5-8e9b-64562b4555f2", "metadata": {"usage": {"prompt_tokens": 1033, "prompt_unit_price": "0.001", "prompt_price_unit": "0.001", "prompt_price": "0.0010330", "completion_tokens": 135, "completion_unit_price": "0.002", "completion_price_unit": "0.001", "completion_price": "0.0002700", "total_tokens": 1168, "total_price": "0.0013030", "currency": "USD", "latency": 1.381760165997548}, "retriever_resources": [{"position": 1, "dataset_id": "101b4c97-fc2e-463c-90b1-5261a4cdcafb", "dataset_name": "iPhone", "document_id": "8dd1ad74-0b5f-4175-b735-7d98bbbb4e00", "document_name": "iPhone List", "segment_id": "ed599c7f-2766-4294-9d1d-e5235a61270a", "score": 0.98457545, "content": "\"Model\",\"Release Date\",\"Display Size\",\"Resolution\",\"Processor\",\"RAM\",\"Storage\",\"Camera\",\"Battery\",\"Operating System\"\n\"iPhone 13 Pro Max\",\"September 24, 2021\",\"6.7 inch\",\"1284 x 2778\",\"Hexa-core (2x3.23 GHz Avalanche + 4x1.82 GHz Blizzard)\",\"6 GB\",\"128, 256, 512 GB, 1TB\",\"12 MP\",\"4352 mAh\",\"iOS 15\""}]}} + responseBuilder := &strings.Builder{} + lines := strings.Split(string(chunk), "\n") + for _, data := range lines { + if len(data) < 6 { + // ignore blank line or wrong format + continue + } + data = data[6:] + var difyResponse DifyChunkChatResponse + if err := json.Unmarshal([]byte(data), &difyResponse); err != nil { + log.Errorf("unable to unmarshal dify response: %v", err) + continue + } + response := d.streamResponseDify2OpenAI(ctx, &difyResponse) + responseBody, err := json.Marshal(response) + if err != nil { + log.Errorf("unable to marshal response: %v", err) + return nil, err + } + d.appendResponse(responseBuilder, string(responseBody)) + } + modifiedResponseChunk := responseBuilder.String() + log.Debugf("=== modified response chunk: %s", modifiedResponseChunk) + return []byte(modifiedResponseChunk), nil +} + +func (d *difyProvider) streamResponseDify2OpenAI(ctx wrapper.HttpContext, response *DifyChunkChatResponse) *chatCompletionResponse { + var choice chatCompletionChoice + var id string + switch d.config.botType { + case BotTypeChat, BotTypeAgent: + choice = chatCompletionChoice{ + Index: 0, + Delta: &chatMessage{Role: roleAssistant, Content: response.Answer}, + } + id = response.ConversationId + _ = proxywasm.ReplaceHttpResponseHeader("ConversationId", response.ConversationId) + case BotTypeCompletion: + choice = chatCompletionChoice{ + Index: 0, + Delta: &chatMessage{Role: roleAssistant, Content: response.Answer}, + } + id = response.MessageId + case BotTypeWorkflow: + choice = chatCompletionChoice{ + Index: 0, + Delta: &chatMessage{Role: roleAssistant, Content: response.Data.Outputs[d.config.outputVariable]}, + } + id = response.Data.WorkflowId + } + if response.Event == "message_end" || response.Event == "workflow_finished" { + choice.FinishReason = finishReasonStop + } + return &chatCompletionResponse{ + Id: id, + Created: time.Now().UnixMilli() / 1000, + Model: ctx.GetStringContext(ctxKeyFinalRequestModel, ""), + SystemFingerprint: "", + Object: objectChatCompletionChunk, + Choices: []chatCompletionChoice{choice}, + } +} + +func (d *difyProvider) appendResponse(responseBuilder *strings.Builder, responseBody string) { + responseBuilder.WriteString(fmt.Sprintf("%s %s\n\n", streamDataItemKey, responseBody)) +} + +func (d *difyProvider) difyChatGenRequest(request *chatCompletionRequest) *DifyChatRequest { + content := "" + for _, message := range request.Messages { + if message.Role == "system" { + content += "SYSTEM: \n" + message.StringContent() + "\n" + } else if message.Role == "assistant" { + content += "ASSISTANT: \n" + message.StringContent() + "\n" + } else { + content += "USER: \n" + message.StringContent() + "\n" + } + } + mode := "blocking" + if request.Stream { + mode = "streaming" + } + user := request.User + if user == "" { + user = "api-user" + } + switch d.config.botType { + case BotTypeChat, BotTypeAgent: + conversationId, _ := proxywasm.GetHttpRequestHeader("ConversationId") + return &DifyChatRequest{ + Inputs: make(map[string]interface{}), + Query: content, + ResponseMode: mode, + User: user, + AutoGenerateName: false, + ConversationId: conversationId, + } + case BotTypeCompletion: + return &DifyChatRequest{ + Inputs: map[string]interface{}{ + "query": content, + }, + ResponseMode: mode, + User: user, + } + case BotTypeWorkflow: + return &DifyChatRequest{ + Inputs: map[string]interface{}{ + d.config.inputVariable: content, + }, + ResponseMode: mode, + User: user, + } + default: + return &DifyChatRequest{} + } +} + +type DifyChatRequest struct { + Inputs map[string]interface{} `json:"inputs"` + Query string `json:"query"` + ResponseMode string `json:"response_mode"` + User string `json:"user"` + AutoGenerateName bool `json:"auto_generate_name"` + ConversationId string `json:"conversation_id"` +} + +type DifyMetaData struct { + Usage usage `json:"usage"` +} + +type DifyData struct { + WorkflowId string `json:"workflow_id"` + Id string `json:"id"` + Outputs map[string]interface{} `json:"outputs"` +} + +type DifyChatResponse struct { + ConversationId string `json:"conversation_id"` + MessageId string `json:"message_id"` + Answer string `json:"answer"` + CreateAt int64 `json:"create_at"` + Data DifyData `json:"data"` + MetaData DifyMetaData `json:"metadata"` +} + +type DifyChunkChatResponse struct { + Event string `json:"event"` + ConversationId string `json:"conversation_id"` + MessageId string `json:"message_id"` + Answer string `json:"answer"` + Data DifyData `json:"data"` + MetaData DifyMetaData `json:"metadata"` +} diff --git a/plugins/wasm-go/extensions/ai-proxy/provider/provider.go b/plugins/wasm-go/extensions/ai-proxy/provider/provider.go index 6b6239828f..c59787e6da 100644 --- a/plugins/wasm-go/extensions/ai-proxy/provider/provider.go +++ b/plugins/wasm-go/extensions/ai-proxy/provider/provider.go @@ -47,6 +47,7 @@ const ( providerTypeDoubao = "doubao" providerTypeCoze = "coze" providerTypeTogetherAI = "together-ai" + providerTypeDify = "dify" protocolOpenAI = "openai" protocolOriginal = "original" @@ -110,6 +111,7 @@ var ( providerTypeDoubao: &doubaoProviderInitializer{}, providerTypeCoze: &cozeProviderInitializer{}, providerTypeTogetherAI: &togetherAIProviderInitializer{}, + providerTypeDify: &difyProviderInitializer{}, } ) @@ -240,6 +242,14 @@ type ProviderConfig struct { // @Title zh-CN 自定义大模型参数配置 // @Description zh-CN 用于填充或者覆盖大模型调用时的参数 customSettings []CustomSetting + // @Title zh-CN dify私有化部署的url + difyApiUrl string `required:"false" yaml:"difyApiUrl" json:"difyApiUrl"` + // @Title zh-CN dify的应用类型,Chat/Completion/Agent/Workflow + botType string `required:"false" yaml:"botType" json:"botType"` + // @Title zh-CN dify中应用类型为workflow时需要设置输入变量,当botType为workflow时一起使用 + inputVariable string `required:"false" yaml:"inputVariable" json:"inputVariable"` + // @Title zh-CN dify中应用类型为workflow时需要设置输出变量,当botType为workflow时一起使用 + outputVariable string `required:"false" yaml:"outputVariable" json:"outputVariable"` } func (c *ProviderConfig) GetId() string { @@ -347,6 +357,10 @@ func (c *ProviderConfig) FromJson(json gjson.Result) { if retryOnFailureJson.Exists() { c.retryOnFailure.FromJson(retryOnFailureJson) } + c.difyApiUrl = json.Get("difyApiUrl").String() + c.botType = json.Get("botType").String() + c.inputVariable = json.Get("inputVariable").String() + c.outputVariable = json.Get("outputVariable").String() } func (c *ProviderConfig) Validate() error {