From 5bfd9791affaa7b6b819183f544d9e1b21600a15 Mon Sep 17 00:00:00 2001 From: Daishan Peng Date: Thu, 23 Jan 2025 15:45:03 -0700 Subject: [PATCH] Chore: Add openai o1 support Signed-off-by: Daishan Peng --- openai-model-provider/go.mod | 2 ++ openai-model-provider/go.sum | 2 ++ openai-model-provider/proxy/proxy.go | 40 ++++++++++++++++++++++++++++ 3 files changed, 44 insertions(+) diff --git a/openai-model-provider/go.mod b/openai-model-provider/go.mod index d477dca4..4435c182 100644 --- a/openai-model-provider/go.mod +++ b/openai-model-provider/go.mod @@ -1,3 +1,5 @@ module github.com/obot-platform/tools/openai-model-provider go 1.23.4 + +require github.com/gptscript-ai/chat-completion-client v0.0.0-20250123123106-c86554320789 diff --git a/openai-model-provider/go.sum b/openai-model-provider/go.sum index e69de29b..35cd45d7 100644 --- a/openai-model-provider/go.sum +++ b/openai-model-provider/go.sum @@ -0,0 +1,2 @@ +github.com/gptscript-ai/chat-completion-client v0.0.0-20250123123106-c86554320789 h1:rfriXe+FFqZ5fZ+wGzLUivrq7Fyj2xfRdZjDsHf6Ps0= +github.com/gptscript-ai/chat-completion-client v0.0.0-20250123123106-c86554320789/go.mod h1:7P/o6/IWa1KqsntVf68hSnLKuu3+xuqm6lYhch1w4jo= diff --git a/openai-model-provider/proxy/proxy.go b/openai-model-provider/proxy/proxy.go index 1af40c6a..11cb45f0 100644 --- a/openai-model-provider/proxy/proxy.go +++ b/openai-model-provider/proxy/proxy.go @@ -1,11 +1,16 @@ package proxy import ( + "bytes" + "encoding/json" "errors" "fmt" + "io" "net/http" "net/http/httputil" "strings" + + openai "github.com/gptscript-ai/chat-completion-client" ) type Config struct { @@ -96,6 +101,41 @@ func (s *server) proxyDirector(req *http.Request) { if s.cfg.PathPrefix != "" && !strings.HasPrefix(req.URL.Path, s.cfg.PathPrefix) { req.URL.Path = s.cfg.PathPrefix + req.URL.Path } + + if req.Body == nil || req.Method != http.MethodPost { + return + } + + bodyBytes, err := io.ReadAll(req.Body) + if err != nil { + fmt.Println("failed to read request body, error: ", err.Error()) + return + } + + var reqBody openai.ChatCompletionRequest + // ignore errors here, because the request can be something other than ChatCompletionRequest + if err := json.Unmarshal(bodyBytes, &reqBody); err == nil && reqBody.Model == "o1" { + modifyRequestBodyForO1(req, &reqBody) + } else { + req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) + } +} + +func modifyRequestBodyForO1(req *http.Request, reqBody *openai.ChatCompletionRequest) { + reqBody.Stream = false + reqBody.Temperature = nil + for i, msg := range reqBody.Messages { + if msg.Role == "system" { + reqBody.Messages[i].Role = "developer" + } + } + modifiedBodyBytes, err := json.Marshal(reqBody) + if err == nil { + req.Body = io.NopCloser(bytes.NewBuffer(modifiedBodyBytes)) + req.ContentLength = int64(len(modifiedBodyBytes)) + } else { + fmt.Println("failed to marshal request body after modification and skipping, error: ", err.Error()) + } } func Validate(cfg *Config) error {