Skip to content

Commit

Permalink
feat(ai-proxy): add AWS Bedrock Converse-API Driver (#13354) (#9824)
Browse files Browse the repository at this point in the history
Supersedes #13054 which was completely broken.

Adds AWS Bedrock "Converse API" support to Kong AI Gateway.

AG-14

Co-authored-by: Jack Tysoe <[email protected]>
  • Loading branch information
fffonion and tysoekong authored Aug 2, 2024
1 parent 0058c8e commit 61fc9ea
Show file tree
Hide file tree
Showing 23 changed files with 1,167 additions and 165 deletions.
5 changes: 5 additions & 0 deletions changelog/unreleased/kong/ai-proxy-aws-bedrock.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
message: |
Kong AI Gateway (AI Proxy and associated plugin family) now supports
all AWS Bedrock "Converse API" models.
type: feature
scope: Plugin
7 changes: 4 additions & 3 deletions kong-3.8.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,8 @@ build = {
["kong.tools.public.rate-limiting.strategies.redis"] = "kong/tools/public/rate-limiting/strategies/redis.lua",
["kong.tools.json-schema.draft4"] = "kong/tools/json-schema/draft4/init.lua",

["kong.tools.aws_stream"] = "kong/tools/aws_stream.lua",

-- XXX merge - files added or modified by enterprise, all of which no longer exist
-- upstream (in 0.15.0)
["kong.enterprise_edition.db.migrations.enterprise"] = "kong/enterprise_edition/db/migrations/enterprise/init.lua",
Expand Down Expand Up @@ -353,7 +355,6 @@ build = {
["kong.enterprise_edition.db.migrations.enterprise.020_3600_to_3700"] = "kong/enterprise_edition/db/migrations/enterprise/020_3600_to_3700.lua",
["kong.enterprise_edition.db.migrations.enterprise.021_3700_to_3800"] = "kong/enterprise_edition/db/migrations/enterprise/021_3700_to_3800.lua",


["kong.runloop.handler"] = "kong/runloop/handler.lua",
["kong.runloop.events"] = "kong/runloop/events.lua",
["kong.runloop.log_level"] = "kong/runloop/log_level.lua",
Expand Down Expand Up @@ -923,13 +924,13 @@ build = {
["kong.llm.drivers.anthropic"] = "kong/llm/drivers/anthropic.lua",
["kong.llm.drivers.mistral"] = "kong/llm/drivers/mistral.lua",
["kong.llm.drivers.llama2"] = "kong/llm/drivers/llama2.lua",
["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua",
["kong.llm.drivers.bedrock"] = "kong/llm/drivers/bedrock.lua",
["kong.llm.vectordb"] = "kong/llm/vectordb/init.lua",
["kong.llm.vectordb.strategies.redis"] = "kong/llm/vectordb/strategies/redis/init.lua",
["kong.llm.vectordb.strategies.redis.utils"] = "kong/llm/vectordb/strategies/redis/utils.lua",
["kong.llm.proxy.handler"] = "kong/llm/proxy/handler.lua",

["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua",

["kong.plugins.ai-prompt-template.handler"] = "kong/plugins/ai-prompt-template/handler.lua",
["kong.plugins.ai-prompt-template.schema"] = "kong/plugins/ai-prompt-template/schema.lua",
["kong.plugins.ai-prompt-template.templater"] = "kong/plugins/ai-prompt-template/templater.lua",
Expand Down
56 changes: 39 additions & 17 deletions kong/clustering/compat/checkers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ local ngx_WARN = ngx.WARN
local _log_prefix = "[clustering] "


local log_warn_message
local log_warn_message, _AI_PROVIDER_INCOMPATIBLE
do
local fmt = string.format

Expand All @@ -34,8 +34,24 @@ do
KONG_VERSION, hint, dp_version, action)
ngx_log(ngx_WARN, _log_prefix, msg, log_suffix)
end
end

local _AI_PROVIDERS_ADDED = {
[3008000000] = {
"gemini",
"bedrock",
},
}

_AI_PROVIDER_INCOMPATIBLE = function(provider, ver)
for _, v in ipairs(_AI_PROVIDERS_ADDED[ver]) do
if v == provider then
return true
end
end

return false
end
end

local compatible_checkers = {
{ 3008000000, --[[ 3.8.0.0 ]]
Expand Down Expand Up @@ -116,37 +132,43 @@ local compatible_checkers = {

if plugin.name == 'ai-proxy' then
local config = plugin.config
if config.model.provider == "gemini" then
if _AI_PROVIDER_INCOMPATIBLE(config.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because ' .. config.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.model.provider = "openai"
config.route_type = "preserve"
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)

has_update = true
end
end

if plugin.name == 'ai-request-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end

if plugin.name == 'ai-response-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end
Expand Down
9 changes: 9 additions & 0 deletions kong/clustering/compat/removed_fields.lua
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,9 @@ return {
"model.options.gemini",
"auth.gcp_use_service_account",
"auth.gcp_service_account_json",
"model.options.bedrock",
"auth.aws_access_key_id",
"auth.aws_secret_access_key",
},
ai_prompt_decorator = {
"max_request_body_size",
Expand All @@ -364,12 +367,18 @@ return {
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
ai_response_transformer = {
"max_request_body_size",
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
-- Enterprise plugins
openid_connect = {
Expand Down
2 changes: 1 addition & 1 deletion kong/llm/drivers/anthropic.lua
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ local function handle_stream_event(event_t, model_info, route_type)
return delta_to_event(event_data, model_info)

elseif event_id == "message_stop" then
return "[DONE]", nil, nil
return ai_shared._CONST.SSE_TERMINATOR, nil, nil

elseif event_id == "ping" then
return nil, nil, nil
Expand Down
Loading

0 comments on commit 61fc9ea

Please sign in to comment.