Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ai-proxy): add AWS Bedrock Converse-API Driver #13354

Merged
merged 24 commits into from
Jul 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
25c59b9
bedrock changelog
tysoekong Jul 9, 2024
bdbbf9b
add aws_stream library
tysoekong Jul 9, 2024
cb0642b
add bedrock shared-scaffolding
tysoekong Jul 9, 2024
c43d300
change stream parser framework to run by-provider; add bedrock stream…
tysoekong Jul 9, 2024
119e1e8
return nil on empty response json from all LLM
tysoekong Jul 9, 2024
2d04d68
add complex provider signing support to ai-transformer plugin functions
tysoekong Jul 9, 2024
e21d531
add bedrock provider options to shared schema
tysoekong Jul 9, 2024
14e6fba
add aws provider to keybastion loose table
tysoekong Jul 9, 2024
1942f0a
adjust sdk to use new streaming parser format
tysoekong Jul 9, 2024
810891e
add bedrock format test scaffolding
tysoekong Jul 9, 2024
c037480
add bedrock driver class itself
tysoekong Jul 9, 2024
0cb31f2
add bedrock drivers to rockspec
tysoekong Jul 9, 2024
9252142
fix(ai-proxy)(general): body_filter should not run on failure
tysoekong Jul 9, 2024
7275453
add aws streaming format parser test
tysoekong Jul 10, 2024
152b98c
feat(ai-proxy): bedrock unit tests
tysoekong Jul 24, 2024
d044735
fix(ai-proxy): fix gemini streaming; fix gemini analytics
tysoekong Jul 24, 2024
154d31d
fix(ai-proxy): bedrock: cluster compat
tysoekong Jul 25, 2024
5e00426
Update bedrock.lua
tysoekong Jul 25, 2024
da4fcab
fix(clustering): faster comparisons for unsupported AI providers
tysoekong Jul 25, 2024
6826842
fix(clustering): faster comparisons for unsupported AI providers
tysoekong Jul 25, 2024
dcd1d21
fix(ai-proxy): bedrock: refactor auth chain
tysoekong Jul 26, 2024
8d4ded7
lint
tysoekong Jul 26, 2024
334df7f
fix(ai-proxy): (bedrock, gemini): comments rollup
tysoekong Jul 26, 2024
8184ee0
Update kong/plugins/ai-proxy/handler.lua
tysoekong Jul 29, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions changelog/unreleased/kong/ai-proxy-aws-bedrock.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
message: |
Kong AI Gateway (AI Proxy and associated plugin family) now supports
all AWS Bedrock "Converse API" models.
type: feature
scope: Plugin
3 changes: 2 additions & 1 deletion kong-3.8.0-0.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ build = {
["kong.tools.cjson"] = "kong/tools/cjson.lua",
["kong.tools.emmy_debugger"] = "kong/tools/emmy_debugger.lua",
["kong.tools.redis.schema"] = "kong/tools/redis/schema.lua",
["kong.tools.aws_stream"] = "kong/tools/aws_stream.lua",

["kong.runloop.handler"] = "kong/runloop/handler.lua",
["kong.runloop.events"] = "kong/runloop/events.lua",
Expand Down Expand Up @@ -612,8 +613,8 @@ build = {
["kong.llm.drivers.anthropic"] = "kong/llm/drivers/anthropic.lua",
["kong.llm.drivers.mistral"] = "kong/llm/drivers/mistral.lua",
["kong.llm.drivers.llama2"] = "kong/llm/drivers/llama2.lua",

["kong.llm.drivers.gemini"] = "kong/llm/drivers/gemini.lua",
["kong.llm.drivers.bedrock"] = "kong/llm/drivers/bedrock.lua",

["kong.plugins.ai-prompt-decorator.handler"] = "kong/plugins/ai-prompt-decorator/handler.lua",
["kong.plugins.ai-prompt-decorator.schema"] = "kong/plugins/ai-prompt-decorator/schema.lua",
Expand Down
56 changes: 39 additions & 17 deletions kong/clustering/compat/checkers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ local ipairs = ipairs
local type = type


local log_warn_message
local log_warn_message, _AI_PROVIDER_INCOMPATIBLE
do
local ngx_log = ngx.log
local ngx_WARN = ngx.WARN
Expand All @@ -19,8 +19,24 @@ do
KONG_VERSION, hint, dp_version, action)
ngx_log(ngx_WARN, _log_prefix, msg, log_suffix)
end
end

local _AI_PROVIDERS_ADDED = {
[3008000000] = {
"gemini",
"bedrock",
},
}

_AI_PROVIDER_INCOMPATIBLE = function(provider, ver)
for _, v in ipairs(_AI_PROVIDERS_ADDED[ver]) do
if v == provider then
return true
end
end

return false
end
end

local compatible_checkers = {
{ 3008000000, --[[ 3.8.0.0 ]]
Expand All @@ -40,37 +56,43 @@ local compatible_checkers = {

if plugin.name == 'ai-proxy' then
local config = plugin.config
if config.model.provider == "gemini" then
if _AI_PROVIDER_INCOMPATIBLE(config.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because ' .. config.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.model.provider = "openai"
config.route_type = "preserve"
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)

has_update = true
end
end

if plugin.name == 'ai-request-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end

if plugin.name == 'ai-response-transformer' then
local config = plugin.config
if config.llm.model.provider == "gemini" then
config.llm.model.provider = "openai"
if _AI_PROVIDER_INCOMPATIBLE(config.llm.model.provider, 3008000000) then
log_warn_message('configures ' .. plugin.name .. ' plugin with' ..
' "openai preserve mode", because gemini' ..
' provider is not supported in this release',
dp_version, log_suffix)
' "openai preserve mode", because ' .. config.llm.model.provider .. ' provider ' ..
' is not supported in this release',
dp_version, log_suffix)

config.llm.model.provider = "openai"

has_update = true
end
end
Expand Down
9 changes: 9 additions & 0 deletions kong/clustering/compat/removed_fields.lua
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,9 @@ return {
"model.options.gemini",
"auth.gcp_use_service_account",
"auth.gcp_service_account_json",
"model.options.bedrock",
"auth.aws_access_key_id",
"auth.aws_secret_access_key",
},
ai_prompt_decorator = {
"max_request_body_size",
Expand All @@ -188,12 +191,18 @@ return {
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
ai_response_transformer = {
"max_request_body_size",
"llm.model.options.gemini",
"llm.auth.gcp_use_service_account",
"llm.auth.gcp_service_account_json",
"llm.model.options.bedrock",
"llm.auth.aws_access_key_id",
"llm.auth.aws_secret_access_key",
},
prometheus = {
"ai_metrics",
Expand Down
2 changes: 1 addition & 1 deletion kong/llm/drivers/anthropic.lua
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ local function handle_stream_event(event_t, model_info, route_type)
return delta_to_event(event_data, model_info)

elseif event_id == "message_stop" then
return "[DONE]", nil, nil
return ai_shared._CONST.SSE_TERMINATOR, nil, nil

elseif event_id == "ping" then
return nil, nil, nil
Expand Down
Loading
Loading