diff --git a/.bazelignore b/.bazelignore
new file mode 100644
index 000000000000..d40366529ee2
--- /dev/null
+++ b/.bazelignore
@@ -0,0 +1,37 @@
+# NB: sematics here are not the same as .gitignore
+# see https://github.com/bazelbuild/bazel/issues/8106
+# Ignore backup files.
+*~
+# Ignore Vim swap files.
+.*.swp
+# Ignore files generated by IDEs.
+/.aswb/
+/.cache/
+/.classpath
+/.clwb/
+/.factorypath
+/.idea/
+/.ijwb/
+/.project
+/.settings
+/.vscode/
+/bazel.iml
+# Ignore all bazel-* symlinks. There is no full list since this can change
+# based on the name of the directory bazel is cloned into.
+/bazel-*
+# Ignore outputs generated during Bazel bootstrapping.
+/output/
+# Ignore jekyll build output.
+/production
+/.sass-cache
+# Bazelisk version file
+.bazelversion
+# User-specific .bazelrc
+user.bazelrc
+
+/t/
+/spec/
+/spec-ee/
+/servroot/
+/autodoc/
+/.github/
diff --git a/.bazelrc b/.bazelrc
new file mode 100644
index 000000000000..50990b609368
--- /dev/null
+++ b/.bazelrc
@@ -0,0 +1,51 @@
+# Bazel doesn't need more than 200MB of memory for local build based on memory profiling:
+# https://docs.bazel.build/versions/master/skylark/performance.html#memory-profiling
+# The default JVM max heapsize is 1/4 of physical memory up to 32GB which could be large
+# enough to consume all memory constrained by cgroup in large host.
+# Limiting JVM heapsize here to let it do GC more when approaching the limit to
+# leave room for compiler/linker.
+# The number 3G is chosen heuristically to both support large VM and small VM with RBE.
+# Startup options cannot be selected via config.
+startup --host_jvm_args=-Xmx512m
+
+run --color=yes
+
+common --color=yes
+common --curses=auto
+
+build --experimental_ui_max_stdouterr_bytes=10485760
+
+build --show_progress_rate_limit=0
+build --show_timestamps
+build --worker_verbose
+
+build --incompatible_strict_action_env
+
+# Enable --platforms API based cpu,compiler,crosstool_top selection; remove this in 7.0.0 as it's enabled by default
+build --incompatible_enable_cc_toolchain_resolution
+
+# Pass PATH, CC, CXX variables from the environment.
+build --action_env=CC --host_action_env=CC
+build --action_env=CXX --host_action_env=CXX
+build --action_env=PATH --host_action_env=PATH
+
+build --action_env=BAZEL_BUILD=1
+
+# temporary fix for https://github.com/bazelbuild/bazel/issues/12905 on macOS
+build --features=-debug_prefix_map_pwd_is_dot
+
+# Build flags.
+build --action_env=BUILD_NAME=kong-dev
+build --action_env=INSTALL_DESTDIR=MANAGED
+build --strip=never
+
+# Release flags
+build:release --//:debug=false
+build:release --action_env=BUILD_NAME=kong-dev
+build:release --action_env=INSTALL_DESTDIR=/usr/local
+build:release --copt="-g"
+build:release --strip=never
+
+build --spawn_strategy=local
+
+build --action_env=GITHUB_TOKEN --host_action_env=GITHUB_TOKEN
diff --git a/.bazelversion b/.bazelversion
new file mode 100644
index 000000000000..dfda3e0b4f01
--- /dev/null
+++ b/.bazelversion
@@ -0,0 +1 @@
+6.1.0
diff --git a/.ci/luacov-stats-aggregator.lua b/.ci/luacov-stats-aggregator.lua
new file mode 100644
index 000000000000..f64e4f9a779b
--- /dev/null
+++ b/.ci/luacov-stats-aggregator.lua
@@ -0,0 +1,62 @@
+-- Aggregates stats from multiple luacov stat files.
+-- Example stats for a 12 lines file `my/file.lua`
+-- that received hits on lines 3, 4, 9:
+--
+-- ["my/file.lua"] = {
+-- [3] = 1,
+-- [4] = 3,
+-- [9] = 2,
+-- max = 12,
+-- max_hits = 3
+-- }
+--
+
+local luacov_stats = require "luacov.stats"
+local luacov_reporter = require "luacov.reporter"
+local luacov_runner = require "luacov.runner"
+local lfs = require "lfs"
+
+
+-- load parameters
+local params = {...}
+local stats_folders_prefix = params[1] or "luacov-stats-out-"
+local file_name = params[2] or "luacov.stats.out"
+local strip_prefix = params[3] or ""
+local base_path = "."
+
+
+-- load stats from different folders named using the format:
+-- luacov-stats-out-${timestamp}
+local loaded_stats = {}
+for folder in lfs.dir(base_path) do
+ if folder:find(stats_folders_prefix, 1, true) then
+ local stats_file = folder .. "/" .. file_name
+ local loaded = luacov_stats.load(stats_file)
+ if loaded then
+ loaded_stats[#loaded_stats + 1] = loaded
+ print("loading file: " .. stats_file)
+ end
+ end
+end
+
+
+-- aggregate
+luacov_runner.load_config()
+for _, stat_data in ipairs(loaded_stats) do
+ -- make all paths relative to ensure file keys have the same format
+ -- and avoid having separate counters for the same file
+ local rel_stat_data = {}
+ for f_name, data in pairs(stat_data) do
+ if f_name:sub(0, #strip_prefix) == strip_prefix then
+ f_name = f_name:sub(#strip_prefix + 1)
+ end
+ rel_stat_data[f_name] = data
+ end
+
+ luacov_runner.data = rel_stat_data
+ luacov_runner.save_stats()
+end
+
+
+-- generate report
+luacov_reporter.report()
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 50512dad137b..9f7cd2201e2f 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -16,7 +16,7 @@
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [8000, 8001, "db:5432"],
- "postCreateCommand": "make dev",
+ "postCreateCommand": "make venv-dev",
// Set *default* container specific settings.json values on container create.
// "settings": {},
diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml
index 0b69da3395dc..48e238d3f9ca 100644
--- a/.devcontainer/docker-compose.yml
+++ b/.devcontainer/docker-compose.yml
@@ -19,8 +19,8 @@ services:
- ..:/workspace:cached
# Uncomment the next line to use Docker from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker-compose for details.
- - /var/run/docker.sock:/var/run/docker.sock
-
+ - /var/run/docker.sock:/var/run/docker.sock
+
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
cap_add:
- SYS_PTRACE
@@ -37,12 +37,12 @@ services:
CRYPTO_DIR: /usr/local/kong
# Overrides default command so things don't shut down after the process ends.
- command: /bin/sh -c "while sleep 1000; do :; done"
+ command: /bin/sh -c "while sleep 1000; do :; done"
# Runs app on the same network as the service container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db
- # Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
+ # Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)
# Uncomment the next line to use a non-root user for all processes - See https://aka.ms/vscode-remote/containers/non-root for details.
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yaml b/.github/ISSUE_TEMPLATE/bug_report.yaml
index fd76c149b9a2..969c944eb5f2 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yaml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yaml
@@ -4,7 +4,7 @@ body:
- type: checkboxes
attributes:
label: Is there an existing issue for this?
- description: Please search to see if an issue already exists for the bug you encountered. Make sure you upgrade to the latest version of Kong.
+ description: Please search to see if an issue already exists for the bug you encountered. Make sure you are also using the latest version of Kong.
options:
- label: I have searched the existing issues
required: true
@@ -12,7 +12,7 @@ body:
attributes:
label: Kong version (`$ kong version`)
description: 'example: Kong 2.5'
- placeholder: 'Please put the Kong Gateway version here.'
+ placeholder: 'Please provide the current Kong Gateway version you are using here.'
validations:
required: true
- type: textarea
@@ -40,7 +40,6 @@ body:
2. With this config...
3. Run '...'
4. See error...
- render: markdown
validations:
required: false
- type: textarea
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 3dc15d9267cc..87022f12a3ba 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,5 +1,7 @@
blank_issues_enabled: true
contact_links:
+ - name: Kong Gateway Open Source Community Pledge
+ url: https://github.com/Kong/kong/blob/master/COMMUNITY_PLEDGE.md
- name: Feature Request
url: https://github.com/Kong/kong/discussions/categories/ideas
about: Propose your cool ideas and feature requests at the Kong discussion forum
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index d83e17e39ad4..ba036d070436 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -2,17 +2,25 @@
NOTE: Please read the CONTRIBUTING.md guidelines before submitting your patch,
and ensure you followed them all:
https://github.com/Kong/kong/blob/master/CONTRIBUTING.md#contributing
+
+Refer to the Kong Gateway Community Pledge to understand how we work
+with the open source community:
+https://github.com/Kong/kong/blob/master/COMMUNITY_PLEDGE.md
-->
### Summary
+### Checklist
+
+- [ ] The Pull Request has tests
+- [ ] A changelog file has been created under `changelog/unreleased/kong` or `skip-changelog` label added on PR if changelog is unnecessary. [README.md](https://github.com/Kong/gateway-changelog/README.md)
+- [ ] There is a user-facing docs PR against https://github.com/Kong/docs.konghq.com - PUT DOCS PR HERE
+
### Full changelog
* [Implement ...]
-* [Add related tests]
-* ...
### Issue reference
diff --git a/.github/actions/build-cache-key/action.yml b/.github/actions/build-cache-key/action.yml
new file mode 100644
index 000000000000..3edde92a3429
--- /dev/null
+++ b/.github/actions/build-cache-key/action.yml
@@ -0,0 +1,62 @@
+name: Build Cache Key
+
+description: >
+ Generates a cache key suitable for save/restore of Kong builds.
+
+inputs:
+ prefix:
+ description: 'String prefix applied to the build cache key'
+ required: false
+ default: 'build'
+ extra:
+ description: 'Additional values/file hashes to use in the cache key'
+ required: false
+
+outputs:
+ cache-key:
+ description: 'The generated cache key'
+ value: ${{ steps.cache-key.outputs.CACHE_KEY }}
+
+runs:
+ using: composite
+ steps:
+ - name: Generate cache key
+ id: cache-key
+ shell: bash
+ env:
+ PREFIX: ${{ inputs.prefix }}
+ EXTRA: ${{ inputs.extra }}
+ run: |
+ # please keep these sorted
+ FILE_HASHES=(
+ ${{ hashFiles('.bazelignore') }}
+ ${{ hashFiles('.bazelrc') }}
+ ${{ hashFiles('.bazelversion') }}
+ ${{ hashFiles('.github/actions/build-cache-key/**') }}
+ ${{ hashFiles('.github/workflows/build.yml') }}
+ ${{ hashFiles('.requirements') }}
+ ${{ hashFiles('BUILD.bazel') }}
+ ${{ hashFiles('WORKSPACE') }}
+ ${{ hashFiles('bin/kong') }}
+ ${{ hashFiles('bin/kong-health') }}
+ ${{ hashFiles('build/**') }}
+ ${{ hashFiles('kong-*.rockspec') }}
+ ${{ hashFiles('kong.conf.default') }}
+ )
+
+ if [[ -n ${EXTRA:-} ]]; then
+ readarray \
+ -O "${#FILE_HASHES[@]}" \
+ -t \
+ FILE_HASHES \
+ <<< "$EXTRA"
+ fi
+
+ HASH=$(printf '%s\n' "${FILE_HASHES[@]}" \
+ | grep -vE '^$' \
+ | sort --stable --unique \
+ | sha256sum - \
+ | awk '{print $1}'
+ )
+
+ echo "CACHE_KEY=${PREFIX}::${HASH}" | tee -a $GITHUB_OUTPUT
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 5737055179c7..dfd0e3086189 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -6,5 +6,5 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
- # Check for updates to GitHub Actions every weekday
- interval: "daily"
+ # Check for updates to GitHub Actions every week
+ interval: "weekly"
diff --git a/.github/labeler.yml b/.github/labeler.yml
index f401408e9cf1..7f6b10e3093e 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -20,10 +20,14 @@ core/db/migrations:
core/db:
- any: ['kong/db/**/*', '!kong/db/migrations/**/*']
+changelog:
+- CHANGELOG.md
+
core/docs:
-- kong/autodoc/**/*
-- ./**/*.md
-- ./*.md
+- any: ['**/*.md', '!CHANGELOG.md']
+
+autodoc:
+- 'autodoc/**/*'
core/language/go:
- kong/runloop/plugin_servers/*
@@ -49,6 +53,10 @@ core/router:
core/templates:
- kong/templates/*
+core/tracing:
+- kong/tracing/**/*
+- kong/pdk/tracing.lua
+
chore:
- .github/**/*
- .devcontainer/**/*
@@ -107,21 +115,12 @@ plugins/key-auth:
plugins/ldap-auth:
- kong/plugins/ldap-auth/**/*
-plugins/log-serializers:
-- kong/plugins/log-serializers/**/*
-
plugins/loggly:
- kong/plugins/loggly/**/*
plugins/oauth2:
- kong/plugins/oauth2/**/*
-plugins/post-function:
-- kong/plugins/post-function/**/*
-
-plugins/pre-function:
-- kong/plugins/pre-function/**/*
-
plugins/prometheus:
- kong/plugins/prometheus/**/*
@@ -149,6 +148,10 @@ plugins/response-transformer:
plugins/session:
- kong/plugins/session/**/*
+plugins/serverless-functions:
+- kong/plugins/post-function/**/*
+- kong/plugins/pre-function/**/*
+
plugins/statsd:
- kong/plugins/statsd/**/*
@@ -164,4 +167,30 @@ plugins/udp-log:
plugins/zipkin:
- kong/plugins/zipkin/**/*
+plugins/opentelemetry:
+- kong/plugins/opentelemetry/**/*
+
+schema-change-noteworthy:
+- kong/db/schema/**/*.lua
+- kong/**/schema.lua
+- kong/plugins/**/daos.lua
+- plugins-ee/**/daos.lua
+- plugins-ee/**/schema.lua
+- kong/db/dao/*.lua
+- kong/enterprise_edition/redis/init.lua
+
+build/bazel:
+- '**/*.bazel'
+- '**/*.bzl'
+- build/**/*
+- WORKSPACE
+- .bazelignore
+- .bazelrc
+- .bazelversion
+- scripts/build-*.sh
+
+plugins/base_plugin.lua:
+- kong/plugins/base_plugin.lua/**/*
+plugins/log-serializers:
+- kong/plugins/log-serializers/**/*
diff --git a/.github/matrix-commitly.yml b/.github/matrix-commitly.yml
new file mode 100644
index 000000000000..7685340597c3
--- /dev/null
+++ b/.github/matrix-commitly.yml
@@ -0,0 +1,24 @@
+# please see matrix-full.yml for meaning of each field
+build-packages:
+- label: ubuntu-22.04
+ os: ubuntu-22.04
+ package: deb
+ check-manifest-suite: ubuntu-22.04-amd64
+
+build-images:
+- label: ubuntu
+ base-image: ubuntu:22.04
+ package: deb
+ artifact-from: ubuntu-22.04
+
+smoke-tests:
+- label: ubuntu
+
+scan-vulnerabilities:
+- label: ubuntu
+
+release-packages:
+
+release-images:
+- label: ubuntu
+ package: deb
diff --git a/.github/matrix-full.yml b/.github/matrix-full.yml
new file mode 100644
index 000000000000..8c957abb5f9d
--- /dev/null
+++ b/.github/matrix-full.yml
@@ -0,0 +1,209 @@
+build-packages:
+# label: used to distinguish artifacts for later use
+# image: docker image name if the build is running in side a container
+# package: package type
+# package-type: the nfpm packaging target, //:kong_{package} target; only used when package is rpm
+# bazel-args: additional bazel build flags
+# check-manifest-suite: the check manifest suite as defined in scripts/explain_manifest/config.py
+
+# Ubuntu
+- label: ubuntu-20.04
+ image: ubuntu:20.04
+ package: deb
+ check-manifest-suite: ubuntu-20.04-amd64
+- label: ubuntu-22.04
+ package: deb
+ check-manifest-suite: ubuntu-22.04-amd64
+
+# Debian
+- label: debian-10
+ image: debian:10
+ package: deb
+ check-manifest-suite: debian-10-amd64
+- label: debian-11
+ image: debian:11
+ package: deb
+ check-manifest-suite: debian-11-amd64
+
+# Alpine
+- label: alpine
+ os: vars.RELEASE_RUNS_ON
+ package: apk
+ bazel-args: --platforms=//:alpine-crossbuild-x86_64
+ check-manifest-suite: alpine-amd64
+
+# CentOS
+- label: centos-7
+ os: vars.RELEASE_RUNS_ON
+ image: centos:7
+ package: rpm
+ package-type: el7
+ check-manifest-suite: el7-amd64
+
+# RHEL
+- label: rhel-7
+ image: centos:7
+ package: rpm
+ package-type: el7
+ check-manifest-suite: el7-amd64
+- label: rhel-8
+ image: rockylinux:8
+ package: rpm
+ package-type: el8
+ check-manifest-suite: el8-amd64
+
+ # Amazon Linux
+- label: amazonlinux-2
+ image: amazonlinux:2
+ package: rpm
+ package-type: aws2
+ check-manifest-suite: amazonlinux-2-amd64
+- label: amazonlinux-2023
+ image: amazonlinux:2023
+ package: rpm
+ package-type: aws2023
+ check-manifest-suite: amazonlinux-2023-amd64
+
+build-images:
+# Only build images for the latest version of each major release.
+
+# label: used as compose docker image label ${github.sha}-${label}
+# base-image: docker image to use as base
+# package: package type
+# artifact-from: label of build-packages to use
+# artifact-from-alt: another label of build-packages to use for downloading package (to build multi-arch image)
+# docker-platforms: comma separated list of docker buildx platforms to build for
+
+# Ubuntu
+- label: ubuntu
+ base-image: ubuntu:22.04
+ package: deb
+ artifact-from: ubuntu-22.04
+ docker-platforms: linux/amd64
+
+# Centos
+- label: centos7
+ base-image: centos:7
+ package: rpm
+ package-distro: el7
+ artifact-from: centos-7
+
+- label: rhel7
+ base-image: centos:7
+ package: rpm
+ package-distro: el7
+ artifact-from: rhel-7
+
+# Alpine
+- label: alpine
+ base-image: alpine:3.16
+ package: apk
+ artifact-from: alpine
+# Debian
+- label: debian
+ base-image: debian:11-slim
+ package: deb
+ artifact-from: debian-11
+
+# RHEL
+- label: rhel
+ base-image: centos:7
+ package: rpm
+ rpm_platform: el7
+ artifact-from: rhel-7
+ docker-platforms: linux/amd64
+
+smoke-tests:
+- label: ubuntu
+- label: debian
+- label: rhel
+- label: alpine
+
+scan-vulnerabilities:
+- label: ubuntu
+- label: debian
+- label: rhel
+- label: alpine
+
+release-packages:
+# Ubuntu
+- label: ubuntu-20.04
+ package: deb
+ artifact-from: ubuntu-20.04
+ artifact-version: 20.04
+ artifact-type: ubuntu
+ artifact: kong.amd64.deb
+- label: ubuntu-22.04
+ package: deb
+ artifact-from: ubuntu-22.04
+ artifact-version: 22.04
+ artifact-type: ubuntu
+ artifact: kong.amd64.deb
+
+# Debian
+- label: debian-10
+ package: deb
+ artifact-from: debian-10
+ artifact-version: 10
+ artifact-type: debian
+ artifact: kong.amd64.deb
+- label: debian-11
+ package: deb
+ artifact-from: debian-11
+ artifact-version: 11
+ artifact-type: debian
+ artifact: kong.amd64.deb
+
+# CentOS
+- label: centos-7
+ package: rpm
+ artifact-from: centos-7
+ artifact-version: 7
+ artifact-type: centos
+ artifact: kong.el7.amd64.rpm
+
+# RHEL
+- label: rhel-7
+ package: rpm
+ artifact-from: rhel-7
+ artifact-version: 7
+ artifact-type: rhel
+ artifact: kong.el7.amd64.rpm
+- label: rhel-8
+ package: rpm
+ artifact-from: rhel-8
+ artifact-version: 8
+ artifact-type: rhel
+ artifact: kong.el8.amd64.rpm
+
+# Amazon Linux
+- label: amazonlinux-2
+ package: rpm
+ artifact-from: amazonlinux-2
+ artifact-version: 2
+ artifact-type: amazonlinux
+ artifact: kong.aws2.amd64.rpm
+- label: amazonlinux-2023
+ package: rpm
+ artifact-from: amazonlinux-2023
+ artifact-version: 2023
+ artifact-type: amazonlinux
+ artifact: kong.aws2023.amd64.rpm
+
+# Alpine
+- label: alpine
+ package: apk
+ artifact-from: alpine
+ artifact-type: alpine
+ artifact: kong.amd64.apk.tar.gz
+
+release-images:
+- label: centos7
+ package: rpm
+- label: rhel7
+ package: rpm
+- label: alpine
+ package: apk
+- label: ubuntu
+- label: debian
+- label: rhel
diff --git a/.github/stale.yml b/.github/stale.yml
deleted file mode 100644
index db9908d6de1d..000000000000
--- a/.github/stale.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-# Configuration for probot-stale - https://github.com/probot/stale
-
-# Number of days of inactivity before an Issue or Pull Request becomes stale
-daysUntilStale: 14
-
-# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
-daysUntilClose: 7
-
-onlyLabels:
- - "pending author feedback"
-
-# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
-exemptLabels:
- - pinned
- - security
-
-# Label to use when marking as stale
-staleLabel: stale
-
-# Comment to post when marking as stale. Set to `false` to disable
-markComment: >
- This issue has been automatically marked as stale because it has not had
- recent activity. It will be closed if no further activity occurs. Thank you
- for your contributions.
diff --git a/.github/workflows/auto-assignee.yml b/.github/workflows/auto-assignee.yml
index 12fa2933c44a..dcd8f1c4c34d 100644
--- a/.github/workflows/auto-assignee.yml
+++ b/.github/workflows/auto-assignee.yml
@@ -8,5 +8,8 @@ jobs:
assign-author:
runs-on: ubuntu-latest
steps:
- - uses: toshimaru/auto-author-assign@2daaeb2988aef24bf37e636fe733f365c046aba0
+ - name: assign-author
+ # ignore the pull requests opened from PR because token is not correct
+ if: github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]'
+ uses: toshimaru/auto-author-assign@c1ffd6f64e20f8f5f61f4620a1e5f0b0908790ef
diff --git a/.github/workflows/autodocs.yml b/.github/workflows/autodocs.yml
index 6a511ebfc3d9..453a7d6c0b94 100644
--- a/.github/workflows/autodocs.yml
+++ b/.github/workflows/autodocs.yml
@@ -19,7 +19,7 @@ on:
jobs:
build:
name: Build dependencies
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
env:
DOWNLOAD_ROOT: $HOME/download-root
@@ -32,10 +32,10 @@ jobs:
echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Lookup build cache
- uses: actions/cache@v2
+ uses: actions/cache@v3
id: cache-deps
with:
path: ${{ env.INSTALL_ROOT }}
@@ -43,7 +43,7 @@ jobs:
- name: Checkout kong-build-tools
if: steps.cache-deps.outputs.cache-hit != 'true' || github.event.inputs.force_build == 'true'
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
repository: Kong/kong-build-tools
path: kong-build-tools
@@ -51,7 +51,7 @@ jobs:
- name: Checkout go-pluginserver
if: steps.cache-deps.outputs.cache-hit != 'true' || github.event.inputs.force_build == 'true'
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
repository: Kong/go-pluginserver
path: go-pluginserver
@@ -68,9 +68,9 @@ jobs:
if: steps.cache-deps.outputs.cache-hit != 'true' || github.event.inputs.force_build == 'true'
run: |
source .ci/setup_env_github.sh
- make dev
+ make venv-dev
autodoc:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
needs: [build]
steps:
- name: Set environment variables
@@ -80,13 +80,13 @@ jobs:
echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
path: kong
ref: ${{ github.event.inputs.source_branch }}
- name: Checkout Kong Docs
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
with:
repository: kong/docs.konghq.com
path: docs.konghq.com
@@ -94,7 +94,7 @@ jobs:
ref: ${{ github.event.inputs.target_branch }}
- name: Lookup build cache
- uses: actions/cache@v2
+ uses: actions/cache@v3
id: cache-deps
with:
path: ${{ env.INSTALL_ROOT }}
@@ -114,7 +114,7 @@ jobs:
run: |
cd kong
output="$(git branch --show-current)"
- echo "::set-output name=name::$output"
+ echo "name=$output" >> $GITHUB_OUTPUT
- name: Show Docs status
run: |
@@ -123,7 +123,7 @@ jobs:
git checkout -b "autodocs-${{ steps.kong-branch.outputs.name }}"
- name: Commit autodoc changes
- uses: stefanzweifel/git-auto-commit-action@v4
+ uses: stefanzweifel/git-auto-commit-action@v5
with:
repository: "./docs.konghq.com"
commit_message: "Autodocs update"
diff --git a/.github/workflows/backport-fail-bot.yml b/.github/workflows/backport-fail-bot.yml
new file mode 100644
index 000000000000..f8393da03522
--- /dev/null
+++ b/.github/workflows/backport-fail-bot.yml
@@ -0,0 +1,37 @@
+name: Forward failed backport alert to Slack
+
+on:
+ issue_comment:
+ types: [created]
+
+jobs:
+ check_comment:
+ runs-on: ubuntu-latest
+ if: github.event.issue.pull_request != null && contains(github.event.comment.body, 'To backport manually, run these commands in your terminal')
+ steps:
+ - name: Generate Slack Payload
+ id: generate-payload
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const slack_mapping = JSON.parse(process.env.SLACK_MAPPING);
+ const pr_url = "${{ github.event.issue.pull_request.html_url}}";
+ const pr_author_github_id = "${{ github.event.issue.user.login }}"
+ const pr_author_slack_id = slack_mapping[pr_author_github_id];
+ const author = (pr_author_slack_id ? `<@${pr_author_slack_id}>` : pr_author_github_id);
+ const payload = {
+ text: `Backport failed in PR: ${pr_url}. Please check it ${author}.`,
+ channel: process.env.SLACK_CHANNEL,
+ };
+ return JSON.stringify(payload);
+ result-encoding: string
+ env:
+ SLACK_CHANNEL: gateway-notifications
+ SLACK_MAPPING: "${{ vars.GH_ID_2_SLACK_ID_MAPPING }}"
+
+ - name: Send Slack Message
+ uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
+ with:
+ payload: ${{ steps.generate-payload.outputs.result }}
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_GATEWAY_NOTIFICATIONS_WEBHOOK }}
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
new file mode 100644
index 000000000000..7cc4b9c134a3
--- /dev/null
+++ b/.github/workflows/backport.yml
@@ -0,0 +1,24 @@
+name: Backport
+on:
+ pull_request_target:
+ types:
+ - closed
+ - labeled
+
+jobs:
+ backport:
+ name: Backport
+ runs-on: ubuntu-latest
+ if: >
+ github.event.pull_request.merged
+ && (
+ github.event.action == 'closed'
+ || (
+ github.event.action == 'labeled'
+ && contains(github.event.label.name, 'backport')
+ )
+ )
+ steps:
+ - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4
+ with:
+ github_token: ${{ secrets.PAT }}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 000000000000..456d72809134
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,81 @@
+name: Build
+on:
+ workflow_call:
+ inputs:
+ relative-build-root:
+ required: true
+ type: string
+ outputs:
+ cache-key:
+ description: 'Computed cache key, used for restoring cache in other workflows'
+ value: ${{ jobs.build.outputs.cache-key }}
+
+env:
+ BUILD_ROOT: ${{ github.workspace }}/${{ inputs.relative-build-root }}
+
+jobs:
+ build:
+ name: Build dependencies
+ runs-on: ubuntu-22.04
+
+ outputs:
+ cache-key: ${{ steps.cache-key.outputs.cache-key }}
+
+ steps:
+ - name: Checkout Kong source code
+ uses: actions/checkout@v4
+
+ - name: Generate cache key
+ id: cache-key
+ uses: ./.github/actions/build-cache-key
+
+ - name: Lookup build cache
+ id: cache-deps
+ uses: actions/cache@v3
+ with:
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ steps.cache-key.outputs.cache-key }}
+
+ - name: Install packages
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ run: sudo apt update && sudo apt install libyaml-dev valgrind libprotobuf-dev
+
+ - name: Build Kong
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ make build-kong
+ chmod +rw -R "$BUILD_ROOT/kong-dev"
+
+ - name: Update PATH
+ run: |
+ echo "$BUILD_ROOT/kong-dev/bin" >> $GITHUB_PATH
+ echo "$BUILD_ROOT/kong-dev/openresty/nginx/sbin" >> $GITHUB_PATH
+ echo "$BUILD_ROOT/kong-dev/openresty/bin" >> $GITHUB_PATH
+
+ - name: Debug (nginx)
+ run: |
+ echo nginx: $(which nginx)
+ nginx -V 2>&1 | sed -re 's/ --/\n--/g'
+ ldd $(which nginx)
+
+ - name: Debug (luarocks)
+ run: |
+ echo luarocks: $(which luarocks)
+ luarocks --version
+ luarocks config
+
+ - name: Bazel Outputs
+ uses: actions/upload-artifact@v3
+ if: failure()
+ with:
+ name: bazel-outputs
+ path: |
+ bazel-out/_tmp/actions
+ retention-days: 3
+
+ - name: Build Dev Kong dependencies
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ run: |
+ make install-dev-rocks
diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml
index 1a7137a62eb1..614d26d8c48a 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -1,68 +1,50 @@
name: Build & Test
-on: [push, pull_request]
+on:
+ pull_request:
+ paths-ignore:
+ # ignore markdown files (CHANGELOG.md, README.md, etc.)
+ - '**/*.md'
+ - '.github/workflows/release.yml'
+ - 'changelog/**'
+ - 'kong.conf.default'
+ push:
+ paths-ignore:
+ # ignore markdown files (CHANGELOG.md, README.md, etc.)
+ - '**/*.md'
+ # ignore PRs for the generated COPYRIGHT file
+ - 'COPYRIGHT'
+ branches:
+ - master
+ - release/*
+ - test-please/*
+ workflow_dispatch:
+ inputs:
+ coverage:
+ description: 'Coverage enabled'
+ required: false
+ type: boolean
+ default: false
+
+# cancel previous runs if new commits are pushed to the PR, but run for each commit on master
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+env:
+ BUILD_ROOT: ${{ github.workspace }}/bazel-bin/build
+ KONG_TEST_COVERAGE: ${{ inputs.coverage == true || github.event_name == 'schedule' }}
jobs:
build:
- name: Build dependencies
- runs-on: ubuntu-20.04
-
- env:
- DOWNLOAD_ROOT: $HOME/download-root
-
- steps:
- - name: Set environment variables
- run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- - name: Checkout Kong source code
- uses: actions/checkout@v2
-
- - name: Lookup build cache
- uses: actions/cache@v2
- id: cache-deps
- with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
-
- - name: Checkout kong-build-tools
- if: steps.cache-deps.outputs.cache-hit != 'true'
- uses: actions/checkout@v2
- with:
- repository: Kong/kong-build-tools
- path: kong-build-tools
- ref: master
-
- - name: Checkout go-pluginserver
- if: steps.cache-deps.outputs.cache-hit != 'true'
- uses: actions/checkout@v2
- with:
- repository: Kong/go-pluginserver
- path: go-pluginserver
-
- - name: Add to Path
- if: steps.cache-deps.outputs.cache-hit != 'true'
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools" >> $GITHUB_PATH
-
- - name: Install packages
- if: steps.cache-deps.outputs.cache-hit != 'true'
- run: sudo apt update && sudo apt install libyaml-dev valgrind libprotobuf-dev
-
- - name: Build Kong dependencies
- if: steps.cache-deps.outputs.cache-hit != 'true'
- run: |
- source .ci/setup_env_github.sh
- make dev
+ uses: ./.github/workflows/build.yml
+ with:
+ relative-build-root: bazel-bin/build
lint-doc-and-unit-tests:
name: Lint, Doc and Unit tests
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
needs: build
- env:
- KONG_TEST_PG_DATABASE: kong
- KONG_TEST_PG_USER: kong
-
services:
postgres:
image: postgres:13
@@ -75,62 +57,78 @@ jobs:
options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8
steps:
- - name: Set environment variables
- run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
-
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Lookup build cache
- uses: actions/cache@v2
id: cache-deps
+ uses: actions/cache@v3
with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ needs.build.outputs.cache-key }}
- - name: Add to Path
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools" >> $GITHUB_PATH
+ - name: Check test-helpers doc generation
+ run: |
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ pushd ./spec && ldoc .
- name: Check autodoc generation
run: |
- eval `luarocks path`
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
scripts/autodoc
- - name: Check Admin API definition generation
+ - name: Lint Lua code
run: |
- eval `luarocks path`
- scripts/gen-admin-api-def.sh
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ make lint
- - name: Lint Lua code
+ - name: Validate rockspec file
run: |
- eval `luarocks path`
- luacheck -q .
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ scripts/validate-rockspec
+
+ - name: Check spec file misspelling
+ run: |
+ scripts/check_spec_files_spelling.sh
+
+ - name: Check labeler configuration
+ run: scripts/check-labeler.pl .github/labeler.yml
- name: Unit tests
+ env:
+ KONG_TEST_PG_DATABASE: kong
+ KONG_TEST_PG_USER: kong
run: |
- eval `luarocks path`
- make dev
- bin/busted -v -o htest spec/01-unit
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ TEST_CMD="bin/busted -v -o htest spec/01-unit"
+ if [[ $KONG_TEST_COVERAGE = true ]]; then
+ TEST_CMD="$TEST_CMD --coverage"
+ fi
+ $TEST_CMD
+
+ - name: Archive coverage stats file
+ uses: actions/upload-artifact@v3
+ if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
+ with:
+ name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}
+ retention-days: 1
+ path: |
+ luacov.stats.out
+
+ - name: Get kernel message
+ if: failure()
+ run: |
+ sudo dmesg -T
integration-tests-postgres:
name: Postgres ${{ matrix.suite }} - ${{ matrix.split }} tests
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
needs: build
-
strategy:
+ fail-fast: false
matrix:
suite: [integration, plugins]
- split: [first (01-04), second (>= 05)]
-
- env:
- KONG_TEST_PG_DATABASE: kong
- KONG_TEST_PG_USER: kong
- KONG_TEST_DATABASE: postgres
- TEST_SUITE: ${{ matrix.suite }}
- TEST_SPLIT: ${{ matrix.split }}
+ split: [first, second]
services:
postgres:
@@ -144,7 +142,7 @@ jobs:
options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8
grpcbin:
- image: moul/grpcbin
+ image: kong/grpcbin
ports:
- 15002:9000
- 15003:9001
@@ -153,210 +151,237 @@ jobs:
image: redis
ports:
- 6379:6379
- options: --entrypoint redis-server
+ - 6380:6380
+ options: >-
+ --name kong_redis
zipkin:
- image: openzipkin/zipkin:2.19
+ image: openzipkin/zipkin:2
ports:
- 9411:9411
steps:
- - name: Set environment variables
- run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
-
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Lookup build cache
- uses: actions/cache@v2
id: cache-deps
+ uses: actions/cache@v3
with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
-
- - name: Add to Path
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools:$INSTALL_ROOT/go-pluginserver" >> $GITHUB_PATH
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ needs.build.outputs.cache-key }}
- name: Add gRPC test host names
run: |
echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts
echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts
- - name: Tests
+ - name: Install AWS SAM cli tool
+ if: ${{ matrix.suite == 'plugins' }}
run: |
- eval `luarocks path`
- make dev
- .ci/run_tests.sh
-
- integration-tests-dbless:
- name: DB-less integration tests
- runs-on: ubuntu-20.04
- needs: build
-
- env:
- KONG_TEST_PG_DATABASE: kong
- KONG_TEST_PG_USER: kong
- KONG_TEST_DATABASE: 'off'
- TEST_SUITE: dbless
+ curl -L -s -o /tmp/aws-sam-cli.zip https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip
+ unzip -o /tmp/aws-sam-cli.zip -d /tmp/aws-sam-cli
+ sudo /tmp/aws-sam-cli/install --update
- services:
- grpcbin:
- image: moul/grpcbin
- ports:
- - 15002:9000
- - 15003:9001
-
- steps:
- - name: Set environment variables
+ - name: Update PATH
run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
-
- - name: Checkout Kong source code
- uses: actions/checkout@v2
+ echo "$BUILD_ROOT/kong-dev/bin" >> $GITHUB_PATH
+ echo "$BUILD_ROOT/kong-dev/openresty/nginx/sbin" >> $GITHUB_PATH
+ echo "$BUILD_ROOT/kong-dev/openresty/bin" >> $GITHUB_PATH
- - name: Lookup build cache
- uses: actions/cache@v2
- id: cache-deps
- with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
+ - name: Debug (nginx)
+ run: |
+ echo nginx: $(which nginx)
+ nginx -V 2>&1 | sed -re 's/ --/\n--/g'
+ ldd $(which nginx)
- - name: Add to Path
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools:$INSTALL_ROOT/go-pluginserver" >> $GITHUB_PATH
+ - name: Debug (luarocks)
+ run: |
+ echo luarocks: $(which luarocks)
+ luarocks --version
+ luarocks config
- - name: Add gRPC test host names
+ - name: Tune up postgres max_connections
run: |
- echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts
- echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts
+ # arm64 runners may use more connections due to more worker cores
+ psql -hlocalhost -Ukong kong -tAc 'alter system set max_connections = 5000;'
- name: Tests
+ env:
+ KONG_TEST_PG_DATABASE: kong
+ KONG_TEST_PG_USER: kong
+ KONG_TEST_DATABASE: postgres
+ KONG_SPEC_TEST_GRPCBIN_PORT: "15002"
+ KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003"
+ KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json
+ TEST_SUITE: ${{ matrix.suite }}
+ TEST_SPLIT: ${{ matrix.split }}
run: |
- eval `luarocks path`
- make dev
+ make venv-dev # required to install other dependencies like bin/grpcurl
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
.ci/run_tests.sh
- integration-tests-cassandra:
- name: C* ${{ matrix.cassandra_version }} ${{ matrix.suite }} - ${{ matrix.split }} tests
- runs-on: ubuntu-20.04
- needs: build
+ - name: Archive coverage stats file
+ uses: actions/upload-artifact@v3
+ if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
+ with:
+ name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}-${{ matrix.suite }}-${{ contains(matrix.split, 'first') && '1' || '2' }}
+ retention-days: 1
+ path: |
+ luacov.stats.out
- strategy:
- matrix:
- suite: [integration, plugins]
- cassandra_version: [3]
- split: [first (01-04), second (>= 05)]
+ - name: Get kernel message
+ if: failure()
+ run: |
+ sudo dmesg -T
- env:
- KONG_TEST_DATABASE: cassandra
- TEST_SUITE: ${{ matrix.suite }}
- TEST_SPLIT: ${{ matrix.split }}
+ integration-tests-dbless:
+ name: DB-less integration tests
+ runs-on: ubuntu-22.04
+ needs: build
services:
- cassandra:
- image: cassandra:${{ matrix.cassandra_version }}
- ports:
- - 7199:7199
- - 7000:7000
- - 9160:9160
- - 9042:9042
- options: --health-cmd "cqlsh -e 'describe cluster'" --health-interval 5s --health-timeout 5s --health-retries 8
-
grpcbin:
- image: moul/grpcbin
+ image: kong/grpcbin
ports:
- 15002:9000
- 15003:9001
- redis:
- image: redis
- ports:
- - 6379:6379
- options: --entrypoint redis-server
-
- zipkin:
- image: openzipkin/zipkin:2.19
- ports:
- - 9411:9411
-
steps:
- - name: Set environment variables
- run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
-
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Lookup build cache
- uses: actions/cache@v2
id: cache-deps
+ uses: actions/cache@v3
with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
-
- - name: Add to Path
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools:$INSTALL_ROOT/go-pluginserver" >> $GITHUB_PATH
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ needs.build.outputs.cache-key }}
- name: Add gRPC test host names
run: |
echo "127.0.0.1 grpcs_1.test" | sudo tee -a /etc/hosts
echo "127.0.0.1 grpcs_2.test" | sudo tee -a /etc/hosts
+ - name: Run OpenTelemetry Collector
+ run: |
+ mkdir -p ${{ github.workspace }}/tmp/otel
+ touch ${{ github.workspace }}/tmp/otel/file_exporter.json
+ sudo chmod 777 -R ${{ github.workspace }}/tmp/otel
+ docker run -p 4317:4317 -p 4318:4318 -p 55679:55679 \
+ -v ${{ github.workspace }}/spec/fixtures/opentelemetry/otelcol.yaml:/etc/otel-collector-config.yaml \
+ -v ${{ github.workspace }}/tmp/otel:/etc/otel \
+ --name opentelemetry-collector -d \
+ otel/opentelemetry-collector-contrib:0.52.0 \
+ --config=/etc/otel-collector-config.yaml
+ sleep 2
+ docker logs opentelemetry-collector
+
- name: Tests
+ env:
+ KONG_TEST_PG_DATABASE: kong
+ KONG_TEST_PG_USER: kong
+ KONG_TEST_DATABASE: 'off'
+ KONG_SPEC_TEST_GRPCBIN_PORT: "15002"
+ KONG_SPEC_TEST_GRPCBIN_SSL_PORT: "15003"
+ KONG_SPEC_TEST_OTELCOL_FILE_EXPORTER_PATH: ${{ github.workspace }}/tmp/otel/file_exporter.json
+ TEST_SUITE: dbless
run: |
- eval `luarocks path`
- make dev
+ make venv-dev # required to install other dependencies like bin/grpcurl
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
.ci/run_tests.sh
+ - name: Archive coverage stats file
+ uses: actions/upload-artifact@v3
+ if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
+ with:
+ name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}
+ retention-days: 1
+ path: |
+ luacov.stats.out
+
+ - name: Get kernel message
+ if: failure()
+ run: |
+ sudo dmesg -T
+
pdk-tests:
name: PDK tests
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
needs: build
- env:
- TEST_SUITE: pdk
-
steps:
- - name: Set environment variables
- run: |
- echo "INSTALL_ROOT=$HOME/install-root" >> $GITHUB_ENV
- echo "DOWNLOAD_ROOT=$HOME/download-root" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$INSTALL_ROOT/openssl/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV
-
- name: Checkout Kong source code
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Lookup build cache
- uses: actions/cache@v2
id: cache-deps
+ uses: actions/cache@v3
with:
- path: ${{ env.INSTALL_ROOT }}
- key: ${{ hashFiles('.ci/setup_env_github.sh') }}-${{ hashFiles('.github/workflows/build_and_test.yml') }}-${{ hashFiles('.requirements') }}-${{ hashFiles('kong-*.rockspec') }}
-
- - name: Add to Path
- run: echo "$INSTALL_ROOT/openssl/bin:$INSTALL_ROOT/openresty/nginx/sbin:$INSTALL_ROOT/openresty/bin:$INSTALL_ROOT/luarocks/bin:$GITHUB_WORKSPACE/kong-build-tools/openresty-build-tools:$DOWNLOAD_ROOT/cpanm" >> $GITHUB_PATH
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ needs.build.outputs.cache-key }}
- name: Install Test::Nginx
run: |
- CPAN_DOWNLOAD=$DOWNLOAD_ROOT/cpanm
+ CPAN_DOWNLOAD=./cpanm
mkdir -p $CPAN_DOWNLOAD
curl -o $CPAN_DOWNLOAD/cpanm https://cpanmin.us
chmod +x $CPAN_DOWNLOAD/cpanm
echo "Installing CPAN dependencies..."
- cpanm --notest --local-lib=$HOME/perl5 local::lib && eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib)
- cpanm --notest Test::Nginx
+ $CPAN_DOWNLOAD/cpanm --notest --local-lib=$HOME/perl5 local::lib && eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib)
+ $CPAN_DOWNLOAD/cpanm --notest Test::Nginx
- name: Tests
+ env:
+ TEST_SUITE: pdk
run: |
- eval `luarocks path`
- make dev
-
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ if [[ $KONG_TEST_COVERAGE = true ]]; then
+ export PDK_LUACOV=1
+ fi
eval $(perl -I $HOME/perl5/lib/perl5/ -Mlocal::lib)
.ci/run_tests.sh
+
+ - name: Archive coverage stats file
+ uses: actions/upload-artifact@v3
+ if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
+ with:
+ name: luacov-stats-out-${{ github.job }}-${{ github.run_id }}
+ retention-days: 1
+ path: |
+ luacov.stats.out
+
+ - name: Get kernel message
+ if: failure()
+ run: |
+ sudo dmesg -T
+
+ aggregator:
+ needs: [lint-doc-and-unit-tests,pdk-tests,integration-tests-postgres,integration-tests-dbless]
+ name: Luacov stats aggregator
+ if: ${{ always() && (inputs.coverage == true || github.event_name == 'schedule') }}
+ runs-on: ubuntu-22.04
+
+ steps:
+ - name: Checkout source code
+ uses: actions/checkout@v4
+
+ - name: Install requirements
+ run: |
+ sudo apt-get update && sudo apt-get install -y luarocks
+ sudo luarocks install luacov
+ sudo luarocks install luafilesystem
+
+ # Download all archived coverage stats files
+ - uses: actions/download-artifact@v3
+
+ - name: Stats aggregation
+ shell: bash
+ run: |
+ lua .ci/luacov-stats-aggregator.lua "luacov-stats-out-" "luacov.stats.out" ${{ github.workspace }}/
+ # The following prints a report with each file sorted by coverage percentage, and the total coverage
+ printf "\n\nCoverage File\n\n"
+ awk -v RS='Coverage\n-+\n' 'NR>1{print $0}' luacov.report.out | grep -vE "^-|^$" > summary.out
+ cat summary.out | grep -v "^Total" | awk '{printf "%7d%% %s\n", $4, $1}' | sort -n
+ cat summary.out | grep "^Total" | awk '{printf "%7d%% %s\n", $4, $1}'
diff --git a/.github/workflows/buildifier.yml b/.github/workflows/buildifier.yml
new file mode 100644
index 000000000000..85d3aaab0c24
--- /dev/null
+++ b/.github/workflows/buildifier.yml
@@ -0,0 +1,55 @@
+name: Buildifier
+
+on:
+ pull_request:
+ paths:
+ - '**/*.bzl'
+ - '**/*.bazel'
+ - 'BUILD*'
+ - 'WORKSPACE*'
+ push:
+ paths:
+ - '**/*.bzl'
+ - '**/*.bazel'
+ - 'BUILD*'
+ - 'WORKSPACE*'
+ branches:
+ - master
+ - release/*
+
+jobs:
+
+ autoformat:
+ name: Auto-format and Check
+ runs-on: ubuntu-22.04
+
+ steps:
+ - name: Check out code
+ uses: actions/checkout@v4
+
+ - name: Install Dependencies
+ run: |
+ sudo wget -O /bin/buildifier https://github.com/bazelbuild/buildtools/releases/download/5.1.0/buildifier-linux-amd64
+ sudo chmod +x /bin/buildifier
+
+ - name: Run buildifier
+ run: |
+ buildifier -mode=fix $(find . -name 'BUILD*' -o -name 'WORKSPACE*' -o -name '*.bzl' -o -name '*.bazel' -type f)
+
+ - name: Verify buildifier
+ shell: bash
+ run: |
+ # From: https://backreference.org/2009/12/23/how-to-match-newlines-in-sed/
+ # This is to leverage this workaround:
+ # https://github.com/actions/toolkit/issues/193#issuecomment-605394935
+ function urlencode() {
+ sed ':begin;$!N;s/\n/%0A/;tbegin'
+ }
+ if [[ $(git diff-index --name-only HEAD --) ]]; then
+ for x in $(git diff-index --name-only HEAD --); do
+ echo "::error file=$x::Please run buildifier.%0A$(git diff $x | urlencode)"
+ done
+ echo "${{ github.repository }} is out of style. Please run buildifier."
+ exit 1
+ fi
+ echo "${{ github.repository }} is formatted correctly."
diff --git a/.github/workflows/cherry-picks.yml b/.github/workflows/cherry-picks.yml
new file mode 100644
index 000000000000..6383c1d5fd6a
--- /dev/null
+++ b/.github/workflows/cherry-picks.yml
@@ -0,0 +1,41 @@
+name: Cherry Pick to remote repository
+on:
+ pull_request_target:
+ types: [closed, labeled]
+ issue_comment:
+ types: [created]
+jobs:
+ cross-repo-cherrypick:
+ name: Cherry pick to remote repository
+ runs-on: ubuntu-latest
+ # Only run when pull request is merged, or labeled
+ # or when a comment containing `/cherry-pick` is created
+ # and the author is a member, collaborator or owner
+ if: >
+ (
+ github.event_name == 'pull_request_target' &&
+ github.event.pull_request.merged
+ ) || (
+ github.event_name == 'issue_comment' &&
+ github.event.issue.pull_request &&
+ contains(fromJSON('["MEMBER", "COLLABORATOR", "OWNER"]'), github.event.comment.author_association) &&
+ contains(github.event.comment.body, '/cherry-pick')
+ )
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ token: ${{ secrets.CHERRY_PICK_TOKEN }}
+ - name: Create backport pull requests
+ uses: jschmid1/cross-repo-cherrypick-action@2366f50fd85e8966aa024a4dd6fbf70e7019d7e1
+ with:
+ token: ${{ secrets.CHERRY_PICK_TOKEN }}
+ pull_title: '[cherry-pick -> ${target_branch}] ${pull_title}'
+ merge_commits: 'skip'
+ trigger_label: 'cherry-pick kong-ee' # trigger based on this label
+ pull_description: |-
+ Automated cherry-pick to `${target_branch}`, triggered by a label in https://github.com/${owner}/${repo}/pull/${pull_number} :robot:.
+ upstream_repo: 'kong/kong-ee'
+ branch_map: |-
+ {
+ "master": "master"
+ }
diff --git a/.github/workflows/community-stale.yml b/.github/workflows/community-stale.yml
new file mode 100644
index 000000000000..395aa82978ea
--- /dev/null
+++ b/.github/workflows/community-stale.yml
@@ -0,0 +1,53 @@
+name: Close inactive issues
+on:
+ schedule:
+ - cron: "30 1 * * *"
+
+jobs:
+ close-issues:
+ runs-on: ubuntu-latest
+ permissions:
+ issues: write
+ pull-requests: write
+ steps:
+ - uses: actions/stale@v8
+ with:
+ days-before-stale: 14
+ days-before-close: 7
+ only-labels: "pending author feedback"
+ exempt-pr-labels: "pinned,security"
+ exempt-issue-labels: "pinned,security"
+ stale-issue-label: "stale"
+ stale-issue-message: "This issue is marked as stale because it has been open for 14 days with no activity."
+ close-issue-message: |
+ Dear contributor,
+
+ We are automatically closing this issue because it has not seen any activity for three weeks.
+ We're sorry that your issue could not be resolved. If any new information comes up that could
+ help resolving it, please feel free to reopen it.
+
+ Your contribution is greatly appreciated!
+
+ Please have a look
+ [our pledge to the community](https://github.com/Kong/kong/blob/master/COMMUNITY_PLEDGE.md)
+ for more information.
+
+ Sincerely,
+ Your Kong Gateway team
+ stale-pr-message: "This PR is marked as stale because it has been open for 14 days with no activity."
+ close-pr-message: |
+ Dear contributor,
+
+ We are automatically closing this pull request because it has not seen any activity for three weeks.
+ We're sorry that we could not merge it. If you still want to pursure your patch, please feel free to
+ reopen it and address any remaining issues.
+
+ Your contribution is greatly appreciated!
+
+ Please have a look
+ [our pledge to the community](https://github.com/Kong/kong/blob/master/COMMUNITY_PLEDGE.md)
+ for more information.
+
+ Sincerely,
+ Your Kong Gateway team
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/label-check.yml b/.github/workflows/label-check.yml
new file mode 100644
index 000000000000..bfa8b67a7981
--- /dev/null
+++ b/.github/workflows/label-check.yml
@@ -0,0 +1,16 @@
+name: Pull Request Label Checker
+on:
+ pull_request:
+ types: [opened, edited, synchronize, labeled, unlabeled]
+jobs:
+ check-labels:
+ name: prevent merge labels
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: do-not-merge label found
+ run: echo "do-not-merge label found, this PR will not be merged"; exit 1
+ if: ${{ contains(github.event.*.labels.*.name, 'pr/do not merge') || contains(github.event.*.labels.*.name, 'DO NOT MERGE') }}
+ - name: backport master label found
+ run: echo "Please do not backport into master, instead, create a PR targeting master and backport from it instead."; exit 1
+ if: ${{ contains(github.event.*.labels.*.name, 'backport master') }}
diff --git a/.github/workflows/label-community-pr.yml b/.github/workflows/label-community-pr.yml
new file mode 100644
index 000000000000..b1eb9d1fdda1
--- /dev/null
+++ b/.github/workflows/label-community-pr.yml
@@ -0,0 +1,34 @@
+name: Label community PRs
+
+on:
+ schedule:
+ - cron: '*/30 * * * *'
+
+permissions:
+ pull-requests: write
+
+jobs:
+ check_author:
+ runs-on: ubuntu-latest
+ defaults:
+ run:
+ shell: bash
+ steps:
+ - uses: actions/checkout@v4
+ - name: Label Community PR
+ env:
+ GH_TOKEN: ${{ secrets.COMMUNITY_PRS_TOKEN }}
+ LABEL: "author/community"
+ BOTS: "team-gateway-bot app/dependabot"
+ run: |
+ set +e
+ for id in `gh pr list -S 'draft:false' -s 'open'|awk '{print $1}'`
+ do
+ name=`gh pr view $id --json author -q '.author.login'`
+ ret=`gh api orgs/Kong/members --paginate -q '.[].login'|grep "^${name}$"`
+ if [[ -z $ret && ! "${BOTS[@]}" =~ $name ]]; then
+ gh pr edit $id --add-label "${{ env.LABEL }}"
+ else
+ gh pr edit $id --remove-label "${{ env.LABEL }}"
+ fi
+ done
diff --git a/.github/workflows/label-schema.yml b/.github/workflows/label-schema.yml
new file mode 100644
index 000000000000..38af629d9aa4
--- /dev/null
+++ b/.github/workflows/label-schema.yml
@@ -0,0 +1,14 @@
+name: Pull Request Schema Labeler
+on:
+ pull_request:
+ types: [opened, edited, synchronize, labeled, unlabeled]
+jobs:
+ schema-change-labels:
+ if: "${{ contains(github.event.*.labels.*.name, 'schema-change-noteworthy') }}"
+ runs-on: ubuntu-latest
+ steps:
+ - name: Schema change label found
+ uses: rtCamp/action-slack-notify@v2
+ continue-on-error: true
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_SCHEMA_CHANGE }}
diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml
index d102b8c96e41..4613569074b3 100644
--- a/.github/workflows/label.yml
+++ b/.github/workflows/label.yml
@@ -17,6 +17,6 @@ jobs:
pull-requests: write
steps:
- - uses: actions/labeler@v3.0.2
+ - uses: actions/labeler@v4
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml
deleted file mode 100644
index 7a75989bf673..000000000000
--- a/.github/workflows/package.yml
+++ /dev/null
@@ -1,128 +0,0 @@
-name: Package & Smoke Test
-
-on: # yamllint disable-line rule:truthy
- pull_request:
- push:
- branches:
- - master
- - next/*
- - release/*
-
-env:
- DOCKER_REPOSITORY: kong/kong-build-tools
-
-jobs:
- package-and-test:
- if: github.event_name == 'pull_request'
- name: Build & Smoke Test Packages
- runs-on: ubuntu-22.04
-
- steps:
- - name: Swap git with https
- run: git config --global url."https://github".insteadOf git://github
-
- - name: Setup some environment variables
- run: |
- echo "KONG_SOURCE_LOCATION=$GITHUB_WORKSPACE/kong-src" >> $GITHUB_ENV
- echo "KONG_BUILD_TOOLS_LOCATION=$GITHUB_WORKSPACE/kong-build-tools" >> $GITHUB_ENV
-
- - name: Checkout Kong source code
- uses: actions/checkout@v3
- with:
- path: ${{ env.KONG_SOURCE_LOCATION }}
- submodules: recursive
- token: ${{ secrets.GHA_KONG_BOT_READ_TOKEN }}
-
- - name: Setup kong-build-tools
- run: |
- pushd ${{ env.KONG_SOURCE_LOCATION }}
- make setup-kong-build-tools
-
- - name: Setup package naming environment variables
- run: |
- grep -v '^#' ${{ env.KONG_SOURCE_LOCATION}}/.requirements >> $GITHUB_ENV
-
- - name: Package & Test
- env:
- GITHUB_TOKEN: ${{ secrets.GHA_KONG_BOT_READ_TOKEN }}
- run: |
- pushd ${{ env.KONG_SOURCE_LOCATION }}
- make package/test/deb
-
- package-test-and-unofficial-release:
- if: github.event_name == 'push'
- name: Build & Smoke & Unofficial Release Packages
- runs-on: ubuntu-22.04
- strategy:
- matrix:
- package_type: [deb, rpm, apk]
-
- steps:
- - name: Login to Docker Hub
- uses: docker/login-action@v2
- with:
- username: ${{ secrets.GHA_DOCKERHUB_PUSH_USER }}
- password: ${{ secrets.GHA_KONG_ORG_DOCKERHUB_PUSH_TOKEN }}
-
- - name: Swap git with https
- run: git config --global url."https://github".insteadOf git://github
-
- - name: Setup directory environment variables
- run: |
- echo "KONG_SOURCE_LOCATION=$GITHUB_WORKSPACE/kong-src" >> $GITHUB_ENV
- echo "KONG_BUILD_TOOLS_LOCATION=$GITHUB_WORKSPACE/kong-build-tools" >> $GITHUB_ENV
-
- - name: Checkout Kong source code
- uses: actions/checkout@v3
- with:
- path: ${{ env.KONG_SOURCE_LOCATION }}
- submodules: recursive
- token: ${{ secrets.GHA_KONG_BOT_READ_TOKEN }}
-
- - name: Setup kong-build-tools
- run: |
- pushd ${{ env.KONG_SOURCE_LOCATION }}
- make setup-kong-build-tools
-
- - name: Setup package naming environment variables
- run: |
- grep -v '^#' ${{ env.KONG_SOURCE_LOCATION}}/.requirements >> $GITHUB_ENV
- echo "DOCKER_RELEASE_REPOSITORY=kong/kong" >> $GITHUB_ENV
- echo "KONG_TEST_CONTAINER_TAG=${GITHUB_REF_NAME##*/}-${{ matrix.package_type }}" >> $GITHUB_ENV
- if [[ ${{matrix.package_type }} == "apk" ]]; then
- echo "ADDITIONAL_TAG_LIST=${GITHUB_REF_NAME##*/}-alpine" >> $GITHUB_ENV
- fi
- if [[ ${{matrix.package_type }} == "deb" ]]; then
- echo "ADDITIONAL_TAG_LIST=${GITHUB_REF_NAME##*/}-debian ${GITHUB_REF_NAME##*/} $GITHUB_SHA" >> $GITHUB_ENV
- fi
-
- - name: Package & Test
- env:
- GITHUB_TOKEN: ${{ secrets.GHA_KONG_BOT_READ_TOKEN }}
- run: |
- pushd ${{ env.KONG_SOURCE_LOCATION }}
- make package/test/${{ matrix.package_type }}
-
- - name: Push Docker Image
- continue-on-error: true
- env:
- SKIP_TESTS: true
- run: |
- pushd ${{ env.KONG_SOURCE_LOCATION }}
- make release/docker/${{ matrix.package_type }}
-
- - name: Store the package artifacts
- continue-on-error: true
- uses: actions/upload-artifact@v3
- with:
- name: ${{ matrix.package_type }}
- path: ${{ env.KONG_BUILD_TOOLS_LOCATION }}/output/*
-
- - name: Comment on commit
- continue-on-error: true
- uses: peter-evans/commit-comment@v2
- with:
- token: ${{ secrets.GHA_COMMENT_TOKEN }}
- body: |
- Docker image avaialble ${{ env.DOCKER_RELEASE_REPOSITORY }}:${{ env.KONG_TEST_CONTAINER_TAG }}
- Artifacts availabe https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
diff --git a/.github/workflows/perf.yml b/.github/workflows/perf.yml
index f681199a397d..f30b1b95a136 100644
--- a/.github/workflows/perf.yml
+++ b/.github/workflows/perf.yml
@@ -2,18 +2,91 @@ name: Performance Test
on:
pull_request:
- issue_comment:
schedule:
# don't know the timezone but it's daily at least
- cron: '0 7 * * *'
env:
- terraform_version: '1.1.2'
+ terraform_version: '1.2.4'
+ HAS_ACCESS_TO_GITHUB_TOKEN: ${{ github.event_name != 'pull_request' || (github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]') }}
+ BUILD_ROOT: ${{ github.workspace }}/bazel-bin/build
+
+ # only for pr
+ GHA_CACHE: ${{ github.event_name == 'pull_request' }}
jobs:
+ build-packages:
+ name: Build dependencies
+ runs-on: ubuntu-22.04
+ if: |
+ github.event_name == 'schedule' ||
+ (github.event_name == 'pull_request' && startsWith(github.event.pull_request.title, 'perf(')) ||
+ (github.event_name == 'issue_comment' && github.event.action == 'created' &&
+ github.event.issue.pull_request &&
+ contains('["OWNER", "COLLABORATOR", "MEMBER"]', github.event.comment.author_association) &&
+ (startsWith(github.event.comment.body, '/perf') || startsWith(github.event.comment.body, '/flamegraph'))
+ )
+
+ outputs:
+ cache-key: ${{ steps.cache-key.outputs.cache-key }}
+
+ steps:
+ - name: Checkout Kong source code
+ uses: actions/checkout@v4
+
+ - name: Generate cache key
+ id: cache-key
+ uses: ./.github/actions/build-cache-key
+ with:
+ prefix: perf
+
+ - name: Lookup build cache
+ id: cache-deps
+ uses: actions/cache@v3
+ with:
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ steps.cache-key.outputs.cache-key }}
+
+ - name: Install packages
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ run: sudo apt update && sudo apt install libyaml-dev valgrind libprotobuf-dev
+
+ - name: Build Kong
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ make build-kong
+ BUILD_PREFIX=$BUILD_ROOT/kong-dev
+ export PATH="$BUILD_PREFIX/bin:$BUILD_PREFIX/openresty/nginx/sbin:$BUILD_PREFIX/openresty/bin:$PATH"
+ chmod +rw -R $BUILD_PREFIX
+ nginx -V
+ ldd $(which nginx)
+ luarocks
+
+ - name: Bazel Outputs
+ uses: actions/upload-artifact@v3
+ if: failure()
+ with:
+ name: bazel-outputs
+ path: |
+ bazel-out/_tmp/actions
+ retention-days: 3
+
+ - name: Build Dev Kong dependencies
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ run: |
+ make install-dev-rocks
+
perf:
name: RPS, latency and flamegraphs
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
+ needs: build-packages
+
+ permissions:
+ # required to send comment of graphs and results in the PR
+ pull-requests: write
+
if: |
github.event_name == 'schedule' ||
(github.event_name == 'pull_request' && startsWith(github.event.pull_request.title, 'perf(')) ||
@@ -23,45 +96,37 @@ jobs:
(startsWith(github.event.comment.body, '/perf') || startsWith(github.event.comment.body, '/flamegraph'))
)
+ # perf test can only run one at a time per repo for now
+ concurrency:
+ group: perf-ce
+
steps:
+ # set up mutex across CE and EE to avoid resource race
+ - name: Set up mutex
+ uses: ben-z/gh-action-mutex@9709ba4d8596ad4f9f8bbe8e0f626ae249b1b3ac # v1.0-alpha-6
+ with:
+ repository: "Kong/kong-perf-mutex-lock"
+ branch: "gh-mutex"
+ repo-token: ${{ secrets.PAT }}
+
- name: Checkout Kong source code
- uses: actions/checkout@v2
- # Fetch all history for all tags and branches
+ uses: actions/checkout@v4
with:
+ # Fetch all history for all tags and branches
fetch-depth: 0
-
- - name: Install OpenResty
- run: |
- openresty_version=$(cat .requirements | grep RESTY_VERSION= | cut -d= -f2)
- sudo apt-get -y install --no-install-recommends wget gnupg ca-certificates
- wget -O - https://openresty.org/package/pubkey.gpg | sudo apt-key add -
- echo "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main" | \
- sudo tee /etc/apt/sources.list.d/openresty.list
- sudo apt-get update
- sudo apt-get install "openresty=${openresty_version}*" "openresty-resty=${openresty_version}*" -y
- sudo apt-mark hold openresty
+ - name: Load Cached Packages
+ id: cache-deps
+ if: env.GHA_CACHE == 'true'
+ uses: actions/cache@v3
+ with:
+ path: ${{ env.BUILD_ROOT }}
+ key: ${{ needs.build-packages.outputs.cache-key }}
- - name: Install Dependencies
+ - name: Install performance test Dependencies
run: |
- wget https://luarocks.org/releases/luarocks-3.7.0.tar.gz -O - |tar zxvf -
- pushd luarocks-*/
- ./configure --with-lua=/usr/local/openresty/luajit/ \
- --lua-suffix=jit \
- --with-lua-include=/usr/local/openresty/luajit/include/luajit-2.1
- sudo make install
- popd
-
- # just need the lua files to let all imports happy
- # the CI won't actually run Kong locally
- git clone https://github.com/kong/lua-kong-nginx-module /tmp/lua-kong-nginx-module
- pushd /tmp/lua-kong-nginx-module
- sudo make LUA_LIB_DIR=/usr/local/share/lua/5.1/ install
- popd
-
# in Kong repository
- sudo apt install libyaml-dev -y
- sudo make dev
+ sudo apt update && sudo apt install inkscape -y
# terraform!
wget https://releases.hashicorp.com/terraform/${{ env.terraform_version }}/terraform_${{ env.terraform_version }}_linux_amd64.zip
@@ -71,8 +136,8 @@ jobs:
- name: Choose perf suites
id: choose_perf
run: |
- suites=$(echo "${{ github.event.comment.body }}" | awk '{print $1}')
- tags=$(echo "${{ github.event.comment.body }}" | awk '{print $2}')
+ suites="$(printf '%s' "${{ github.event.comment.body }}" | awk '{print $1}')"
+ tags="$(printf '%s' "${{ github.event.comment.body }}" | awk '{print $2}')"
if [[ $suite == "/flamegraph" ]]; then
suites="02-flamegraph"
@@ -92,63 +157,137 @@ jobs:
fi
fi
- echo ::set-output name=suites::"$suites"
- echo ::set-output name=tags::"$tags"
+ echo "suites=$suites" >> $GITHUB_OUTPUT
+ echo "tags=$tags" >> $GITHUB_OUTPUT
+
+ - uses: xt0rted/pull-request-comment-branch@d97294d304604fa98a2600a6e2f916a84b596dc7 # v1.4.1
+ id: comment-branch
+ if: github.event_name == 'issue_comment' && github.event.action == 'created'
+
+ - name: Find compared versions
+ id: compare_versions
+ run: |
+ pr_ref=$(echo "${{ github.event.pull_request.base.ref }}")
+ custom_vers="$(printf '%s' "${{ github.event.comment.body }}" | awk '{print $3}')"
+
+ if [[ ! -z "${pr_ref}" ]]; then
+ vers="git:${{ github.head_ref }},git:${pr_ref}"
+ elif [[ ! -z "${custom_vers}" ]]; then
+ vers="${custom_vers}"
+ elif [[ ! -z "${{ github.event.comment.body }}" ]]; then
+ vers="git:${{ steps.comment-branch.outputs.head_ref}},git:${{ steps.comment-branch.outputs.base_ref}}"
+ else # is cron job/on master
+ vers="git:master,git:origin/master~10,git:origin/master~50"
+ fi
+
+ echo $vers
+
+ echo "vers=$vers" >> $GITHUB_OUTPUT
+
- name: Run Tests
env:
- PERF_TEST_VERSIONS: git:${{ github.sha }},git:master
- PERF_TEST_METAL_PROJECT_ID: ${{ secrets.PERF_TEST_PACKET_PROJECT_ID }}
- PERF_TEST_METAL_AUTH_TOKEN: ${{ secrets.PERF_TEST_PACKET_AUTH_TOKEN }}
+ PERF_TEST_VERSIONS: ${{ steps.compare_versions.outputs.vers }}
PERF_TEST_DRIVER: terraform
- timeout-minutes: 60
+ PERF_TEST_TERRAFORM_PROVIDER: bring-your-own
+ PERF_TEST_BYO_KONG_IP: ${{ secrets.PERF_TEST_BYO_KONG_IP }}
+ PERF_TEST_BYO_WORKER_IP: ${{ secrets.PERF_TEST_BYO_WORKER_IP }}
+ PERF_TEST_BYO_SSH_USER: gha
+ PERF_TEST_USE_DAILY_IMAGE: true
+ PERF_TEST_DISABLE_EXEC_OUTPUT: true
+ timeout-minutes: 180
run: |
+ export PERF_TEST_BYO_SSH_KEY_PATH=$(pwd)/ssh_key
+ echo "${{ secrets.PERF_TEST_BYO_SSH_KEY }}" > ${PERF_TEST_BYO_SSH_KEY_PATH}
+
+ chmod 600 ${PERF_TEST_BYO_SSH_KEY_PATH}
+ # setup tunnel for psql and admin port
+ ssh -o StrictHostKeyChecking=no -o TCPKeepAlive=yes -o ServerAliveInterval=10 \
+ -o ExitOnForwardFailure=yes -o ConnectTimeout=5 \
+ -L 15432:localhost:5432 -L 39001:localhost:39001 \
+ -i ${PERF_TEST_BYO_SSH_KEY_PATH} \
+ ${PERF_TEST_BYO_SSH_USER}@${PERF_TEST_BYO_KONG_IP} tail -f /dev/null &
+ sleep 5
+
+ sudo iptables -t nat -I OUTPUT -p tcp --dport 5432 -d ${PERF_TEST_BYO_KONG_IP} -j DNAT --to 127.0.0.1:15432
+ sudo iptables -t nat -I OUTPUT -p tcp --dport 39001 -d ${PERF_TEST_BYO_KONG_IP} -j DNAT --to 127.0.0.1:39001
+
+ make venv-dev # required to install other dependencies like bin/grpcurl
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
for suite in ${{ steps.choose_perf.outputs.suites }}; do
# Run each test individually, ngx.pipe doesn't like to be imported twice
# maybe bin/busted --no-auto-insulate
for f in $(find "spec/04-perf/$suite/" -type f); do
- bin/busted -o gtest "$f" \
+ bin/busted "$f" \
-t "${{ steps.choose_perf.outputs.tags }}"
done
done
-
+
- name: Teardown
# Note: by default each job has if: ${{ success() }}
if: always()
env:
- PERF_TEST_VERSIONS: git:${{ github.sha }},git:master
- PERF_TEST_METAL_PROJECT_ID: ${{ secrets.PERF_TEST_PACKET_PROJECT_ID }}
- PERF_TEST_METAL_AUTH_TOKEN: ${{ secrets.PERF_TEST_PACKET_AUTH_TOKEN }}
+ PERF_TEST_VERSIONS: git:${{ github.sha }}
PERF_TEST_DRIVER: terraform
- PERF_TEST_TEARDOWN_ALL: "true"
+ PERF_TEST_TERRAFORM_PROVIDER: bring-your-own
+ PERF_TEST_BYO_KONG_IP: ${{ secrets.PERF_TEST_BYO_KONG_IP }}
+ PERF_TEST_BYO_WORKER_IP: ${{ secrets.PERF_TEST_BYO_WORKER_IP }}
+ PERF_TEST_BYO_SSH_USER: gha
+ PERF_TEST_TEARDOWN_ALL: true
run: |
- bin/busted -o gtest spec/04-perf/99-teardown/
+ export PERF_TEST_BYO_SSH_KEY_PATH=$(pwd)/ssh_key
+ echo "${{ secrets.PERF_TEST_BYO_SSH_KEY }}" > ${PERF_TEST_BYO_SSH_KEY_PATH}
- - name: Save results
- uses: actions/upload-artifact@v2
+ make venv-dev # required to install other dependencies like bin/grpcurl
+ source ${{ env.BUILD_ROOT }}/kong-dev-venv.sh
+ bin/busted spec/04-perf/99-teardown/
+
+ rm -f ${PERF_TEST_BYO_SSH_KEY_PATH}
+
+ - name: Generate high DPI graphs
+ if: always()
+ run: |
+ for i in $(ls output/*.svg); do
+ inkscape --export-area-drawing --export-png="${i%.*}.png" --export-dpi=300 -b FFFFFF $i
+ done
+
+ - uses: actions/setup-python@v4
with:
- name: rps-and-latency
- path: |
- output/result.txt
- retention-days: 31
+ python-version: '3.10'
+ cache: 'pip'
- - name: Save flamegrpahs
- uses: actions/upload-artifact@v2
+ - name: Generate plots
+ if: always()
+ run: |
+ cwd=$(pwd)
+ cd spec/helpers/perf/charts/
+ pip install -r requirements.txt
+ for i in $(ls ${cwd}/output/*.data.json); do
+ python ./charts.py $i -o "${cwd}/output/"
+ done
+
+ - name: Save results
+ uses: actions/upload-artifact@v3
+ if: always()
with:
- name: flamegraphs
+ name: perf-results
path: |
- output/*.svg
+ output/
+ !output/**/*.log
+
retention-days: 31
- name: Save error logs
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v3
+ if: always()
with:
name: error_logs
path: |
- output/*.log
+ output/**/*.log
retention-days: 31
- name: Output
+ if: always()
id: output
run: |
if [[ "${{ steps.choose_perf.outputs.suites }}" =~ "02-flamegraph" ]]; then
@@ -164,13 +303,22 @@ jobs:
result="${result//$'\n'/'%0A'}"
result="${result//$'\r'/'%0D'}"
- echo ::set-output name=result::"$result"
-
+ echo "result=$results" >> $GITHUB_OUTPUT
+
+ - name: Upload charts
+ if: always()
+ id: charts
+ uses: devicons/public-upload-to-imgur@352cf5f2805c692539a96cfe49a09669e6fca88e # v2.2.2
+ continue-on-error: true
+ with:
+ path: output/*.png
+ client_id: ${{ secrets.PERF_TEST_IMGUR_CLIENT_ID }}
+
- name: Comment
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'issue_comment' && github.event.issue.pull_request)
- uses: actions-ecosystem/action-create-comment@v1
+ uses: actions-ecosystem/action-create-comment@e23bc59fbff7aac7f9044bd66c2dc0fe1286f80b # v1.0.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
body: |
@@ -178,6 +326,8 @@ jobs:
**Test Suite**: ${{ steps.choose_perf.outputs.suites }} (${{ steps.choose_perf.outputs.tags }})
+ ${{ join(fromJSON(steps.charts.outputs.markdown_urls), ' ') }}
+
Click to expand
```
@@ -188,4 +338,4 @@ jobs:
- [Download Artifacts](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts)
+ [Download Artifacts](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}#artifacts) for detailed results and interactive SVG flamegraphs.
diff --git a/.github/workflows/release-and-tests-fail-bot.yml b/.github/workflows/release-and-tests-fail-bot.yml
new file mode 100644
index 000000000000..d651bef52903
--- /dev/null
+++ b/.github/workflows/release-and-tests-fail-bot.yml
@@ -0,0 +1,47 @@
+name: Notify Slack user on workflow failure
+
+on:
+ workflow_run:
+ workflows: ["Package & Release", "Build & Test"]
+ types:
+ - completed
+ branches:
+ - master
+ - release/*
+ - next/*
+
+jobs:
+ notify_failure:
+ runs-on: ubuntu-latest
+ if: ${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.event != 'schedule' }}
+ steps:
+ - name: Generate Slack Payload
+ id: generate-payload
+ env:
+ SLACK_CHANNEL: gateway-notifications
+ SLACK_MAPPING: "${{ vars.GH_ID_2_SLACK_ID_MAPPING }}"
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const slack_mapping = JSON.parse(process.env.SLACK_MAPPING);
+ const repo_name = "${{ github.event.workflow_run.repository.full_name }}";
+ const run_id = ${{ github.event.workflow_run.id }};
+ const run_url = `https://github.com/${repo_name}/actions/runs/${run_id}`;
+ const workflow_name = "${{ github.event.workflow_run.name }}";
+ const branch_name = "${{ github.event.workflow_run.head_branch }}";
+ const actor_github_id = "${{ github.event.workflow_run.actor.login }}";
+ const actor_slack_id = slack_mapping[actor_github_id];
+ const actor = actor_slack_id ? `<@${actor_slack_id}>` : actor_github_id;
+ const payload = {
+ text: `Workflow “${workflow_name}” failed in repo: "${repo_name}", branch: "${branch_name}". Run URL: ${run_url}. Please check it ${actor} .`,
+ channel: process.env.SLACK_CHANNEL,
+ };
+ return JSON.stringify(payload);
+ result-encoding: string
+
+ - name: Send Slack Message
+ uses: slackapi/slack-github-action@e28cf165c92ffef168d23c5c9000cffc8a25e117 # v1.24.0
+ with:
+ payload: ${{ steps.generate-payload.outputs.result }}
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_GATEWAY_NOTIFICATIONS_WEBHOOK }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 000000000000..517eb7e1b03b
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,694 @@
+name: Package & Release
+
+# The workflow to build and release official Kong packages and images.
+#
+# TODO:
+# Do not bump the version of actions/checkout to v4 before dropping rhel7 and amazonlinux2.
+
+on: # yamllint disable-line rule:truthy
+ pull_request:
+ paths-ignore:
+ - '**/*.md'
+ - '.github/workflows/build_and_test.yml'
+ - 'changelog/**'
+ - 'kong.conf.default'
+ schedule:
+ - cron: '0 0 * * *'
+ push:
+ branches:
+ - master
+ workflow_dispatch:
+ inputs:
+ official:
+ description: 'Official release?'
+ required: true
+ type: boolean
+ default: false
+ version:
+ description: 'Release version, e.g. `3.0.0.0-beta.2`'
+ required: true
+ type: string
+
+# `commit-ly` is a flag that indicates whether the build should be run per commit.
+
+env:
+ # official release repo
+ DOCKER_REPOSITORY: kong/kong
+ PRERELEASE_DOCKER_REPOSITORY: kong/kong
+ FULL_RELEASE: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }}
+
+ # only for pr
+ GHA_CACHE: ${{ github.event_name == 'pull_request' }}
+
+ HAS_ACCESS_TO_GITHUB_TOKEN: ${{ github.event_name != 'pull_request' || (github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]') }}
+
+
+jobs:
+ metadata:
+ name: Metadata
+ runs-on: ubuntu-22.04
+ outputs:
+ kong-version: ${{ steps.build-info.outputs.kong-version }}
+ prerelease-docker-repository: ${{ env.PRERELEASE_DOCKER_REPOSITORY }}
+ docker-repository: ${{ steps.build-info.outputs.docker-repository }}
+ release-desc: ${{ steps.build-info.outputs.release-desc }}
+ release-label: ${{ steps.build-info.outputs.release-label || '' }}
+ deploy-environment: ${{ steps.build-info.outputs.deploy-environment }}
+ matrix: ${{ steps.build-info.outputs.matrix }}
+ arch: ${{ steps.build-info.outputs.arch }}
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Build Info
+ id: build-info
+ run: |
+ KONG_VERSION=$(bash scripts/grep-kong-version.sh)
+ echo "kong-version=$KONG_VERSION" >> $GITHUB_OUTPUT
+
+ if [ "${{ github.event_name == 'schedule' }}" == "true" ]; then
+ echo "release-label=$(date -u +'%Y%m%d')" >> $GITHUB_OUTPUT
+ fi
+
+ matrix_file=".github/matrix-commitly.yml"
+ if [ "$FULL_RELEASE" == "true" ]; then
+ matrix_file=".github/matrix-full.yml"
+ fi
+
+ if [ "${{ github.event.inputs.official }}" == "true" ]; then
+ release_desc="$KONG_VERSION (official)"
+ echo "docker-repository=$DOCKER_REPOSITORY" >> $GITHUB_OUTPUT
+ echo "deploy-environment=release" >> $GITHUB_OUTPUT
+ else
+ release_desc="$KONG_VERSION (pre-release)"
+ echo "docker-repository=$PRERELEASE_DOCKER_REPOSITORY" >> $GITHUB_OUTPUT
+ fi
+
+ echo "release-desc=$release_desc" >> $GITHUB_OUTPUT
+
+ echo "matrix=$(yq -I=0 -o=json $matrix_file)" >> $GITHUB_OUTPUT
+
+ cat $GITHUB_OUTPUT
+
+ echo "### :package: Building and packaging for $release_desc" >> $GITHUB_STEP_SUMMARY
+ echo >> $GITHUB_STEP_SUMMARY
+ echo '- event_name: ${{ github.event_name }}' >> $GITHUB_STEP_SUMMARY
+ echo '- ref_name: ${{ github.ref_name }}' >> $GITHUB_STEP_SUMMARY
+ echo '- inputs.version: ${{ github.event.inputs.version }}' >> $GITHUB_STEP_SUMMARY
+ echo >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+ cat $GITHUB_OUTPUT >> $GITHUB_STEP_SUMMARY
+ echo '```' >> $GITHUB_STEP_SUMMARY
+
+ build-packages:
+ needs: metadata
+ name: Build & Package - ${{ matrix.label }}
+ environment: ${{ needs.metadata.outputs.deploy-environment }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-packages'] }}"
+
+ runs-on: ubuntu-22.04
+ container:
+ image: ${{ matrix.image }}
+ options: --privileged
+
+ steps:
+ - name: Early Rpm Setup
+ if: matrix.package == 'rpm' && matrix.image != ''
+ run: |
+ # tar/gzip is needed to restore git cache (if available)
+ yum install -y tar gzip which file zlib-devel
+
+ - name: Early Deb in Container Setup
+ if: matrix.package == 'deb' && matrix.image != ''
+ run: |
+ # tar/gzip is needed to restore git cache (if available)
+ apt-get update
+ apt-get install -y git tar gzip file sudo
+
+ - name: Cache Git
+ id: cache-git
+ if: (matrix.package == 'rpm' || matrix.image == 'debian:10') && matrix.image != ''
+ uses: actions/cache@v3
+ with:
+ path: /usr/local/git
+ key: ${{ matrix.label }}-git-2.41.0
+
+ # el-7,8, amazonlinux-2,2023, debian-10 doesn't have git 2.18+, so we need to install it manually
+ - name: Install newer Git
+ if: (matrix.package == 'rpm' || matrix.image == 'debian:10') && matrix.image != '' && steps.cache-git.outputs.cache-hit != 'true'
+ run: |
+ if which apt 2>/dev/null; then
+ apt update
+ apt install -y wget libz-dev libssl-dev libcurl4-gnutls-dev gettext make gcc autoconf sudo
+ else
+ yum update -y
+ yum groupinstall -y 'Development Tools'
+ yum install -y wget zlib-devel openssl-devel curl-devel expat-devel gettext-devel perl-CPAN perl-devel
+ fi
+ wget https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.41.0.tar.gz
+ tar xf git-2.41.0.tar.gz
+ cd git-2.41.0
+
+ # https://gitlab.com/gitlab-org/omnibus-gitlab/-/merge_requests/5948/diffs
+ if [[ ${{ matrix.image }} == "centos:7" ]]; then
+ echo 'CFLAGS=-std=gnu99' >> config.mak
+ fi
+
+ make configure
+ ./configure --prefix=/usr/local/git
+ make -j$(nproc)
+ make install
+
+ - name: Add Git to PATH
+ if: (matrix.package == 'rpm' || matrix.image == 'debian:10') && matrix.image != ''
+ run: |
+ echo "/usr/local/git/bin" >> $GITHUB_PATH
+
+ - name: Debian Git dependencies
+ if: matrix.image == 'debian:10'
+ run: |
+ apt update
+ # dependencies for git
+ apt install -y wget libz-dev libssl-dev libcurl4-gnutls-dev sudo
+
+ - name: Checkout Kong source code
+ uses: actions/checkout@v3
+
+ - name: Swap git with https
+ run: git config --global url."https://github".insteadOf git://github
+
+ - name: Generate build cache key
+ id: cache-key
+ if: env.GHA_CACHE == 'true'
+ uses: ./.github/actions/build-cache-key
+ with:
+ prefix: ${{ matrix.label }}-build
+ extra: |
+ ${{ hashFiles('kong/**') }}
+
+ - name: Cache Packages
+ id: cache-deps
+ if: env.GHA_CACHE == 'true'
+ uses: actions/cache@v3
+ with:
+ path: bazel-bin/pkg
+ key: ${{ steps.cache-key.outputs.cache-key }}
+
+ - name: Set .requirements into environment variables
+ run: |
+ grep -v '^#' .requirements >> $GITHUB_ENV
+
+ - name: Setup Bazel
+ uses: bazelbuild/setup-bazelisk@95c9bf48d0c570bb3e28e57108f3450cd67c1a44 # v2.0.0
+
+ - name: Install Deb Dependencies
+ if: matrix.package == 'deb' && steps.cache-deps.outputs.cache-hit != 'true'
+ run: |
+ sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
+ automake \
+ build-essential \
+ curl \
+ file \
+ libyaml-dev \
+ m4 \
+ perl \
+ pkg-config \
+ unzip \
+ zlib1g-dev
+
+ - name: Install Rpm Dependencies
+ if: matrix.package == 'rpm' && matrix.image != ''
+ run: |
+ yum groupinstall -y 'Development Tools'
+ dnf config-manager --set-enabled powertools || true # enable devel packages on rockylinux:8
+ dnf config-manager --set-enabled crb || true # enable devel packages on rockylinux:9
+ yum install -y libyaml-devel zlib-devel
+ yum install -y cpanminus || (yum install -y perl && curl -L https://raw.githubusercontent.com/miyagawa/cpanminus/master/cpanm | perl - App::cpanminus) # amazonlinux2023 removed cpanminus
+ # required for openssl 3.x config
+ cpanm IPC/Cmd.pm
+
+ - name: Build Kong dependencies
+ if: steps.cache-deps.outputs.cache-hit != 'true'
+ env:
+ GH_TOKEN: ${{ github.token }}
+ run: |
+ bazel build --config release //build:kong --verbose_failures ${{ matrix.bazel-args }}
+
+ - name: Package Kong - ${{ matrix.package }}
+ if: matrix.package != 'rpm' && steps.cache-deps.outputs.cache-hit != 'true'
+ run: |
+ bazel build --config release :kong_${{ matrix.package }} --verbose_failures ${{ matrix.bazel-args }}
+
+ - name: Package Kong - rpm
+ if: matrix.package == 'rpm' && steps.cache-deps.outputs.cache-hit != 'true'
+ env:
+ RELEASE_SIGNING_GPG_KEY: ${{ secrets.RELEASE_SIGNING_GPG_KEY }}
+ NFPM_RPM_PASSPHRASE: ${{ secrets.RELEASE_SIGNING_GPG_KEY_PASSPHRASE }}
+ run: |
+ if [ -n "${RELEASE_SIGNING_GPG_KEY:-}" ]; then
+ RPM_SIGNING_KEY_FILE=$(mktemp)
+ echo "$RELEASE_SIGNING_GPG_KEY" > $RPM_SIGNING_KEY_FILE
+ export RPM_SIGNING_KEY_FILE=$RPM_SIGNING_KEY_FILE
+ fi
+
+ bazel build --config release :kong_${{ matrix.package-type }} --action_env=RPM_SIGNING_KEY_FILE --action_env=NFPM_RPM_PASSPHRASE ${{ matrix.bazel-args }}
+
+ - name: Bazel Debug Outputs
+ if: failure()
+ run: |
+ cat bazel-out/_tmp/actions/stderr-*
+ sudo dmesg || true
+ tail -n500 bazel-out/**/*/CMake.log || true
+
+ - name: Upload artifact
+ uses: actions/upload-artifact@v3
+ with:
+ name: ${{ matrix.label }}-packages
+ path: bazel-bin/pkg
+ retention-days: 3
+
+ verify-manifest-packages:
+ needs: [metadata, build-packages]
+ name: Verify Manifest - Package ${{ matrix.label }}
+ runs-on: ubuntu-22.04
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-packages'] }}"
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Download artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.label }}-packages
+ path: bazel-bin/pkg
+
+ - name: Install Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+ cache: 'pip' # caching pip dependencies
+
+ - name: Verify
+ run: |
+ cd scripts/explain_manifest
+ pip install -r requirements.txt
+ pkg=$(ls ../../bazel-bin/pkg/kong* |head -n1)
+ python ./main.py -f filelist.txt -p $pkg -o test.txt -s ${{ matrix.check-manifest-suite }}
+
+ build-images:
+ name: Build Images - ${{ matrix.label }}
+ needs: [metadata, build-packages]
+ runs-on: ubuntu-22.04
+
+ permissions:
+ # create comments on commits for docker images needs the `write` permission
+ contents: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-images'] }}"
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: Download artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact-from }}-packages
+ path: bazel-bin/pkg
+
+ - name: Download artifact (alt)
+ if: matrix.artifact-from-alt != ''
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact-from-alt }}-packages
+ path: bazel-bin/pkg
+
+ - name: Login to Docker Hub
+ if: ${{ env.HAS_ACCESS_TO_GITHUB_TOKEN == 'true' }}
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v2.1.0
+ with:
+ username: ${{ secrets.GHA_DOCKERHUB_PUSH_USER }}
+ password: ${{ secrets.GHA_KONG_ORG_DOCKERHUB_PUSH_TOKEN }}
+
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ needs.metadata.outputs.prerelease-docker-repository }}
+ tags: |
+ type=raw,${{ github.sha }}-${{ matrix.label }}
+ type=raw,enable=${{ matrix.label == 'ubuntu' }},${{ github.sha }}
+
+ - name: Set up QEMU
+ if: matrix.docker-platforms != ''
+ uses: docker/setup-qemu-action@v2
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+
+ - name: Set platforms
+ id: docker_platforms_arg
+ run: |
+ platforms="${{ matrix.docker-platforms }}"
+ if [[ -z "$platforms" ]]; then
+ platforms="linux/amd64"
+ fi
+
+ echo "platforms=$platforms"
+ echo "platforms=$platforms" >> $GITHUB_OUTPUT
+
+ - name: Set rpm platform
+ id: docker_rpm_platform_arg
+ if: matrix.package == 'rpm'
+ run: |
+ rpm_platform="${{ matrix.rpm_platform }}"
+ if [[ -z "$rpm_platform" ]]; then
+ rpm_platform="el9"
+ fi
+
+ echo "rpm_platform=$rpm_platform"
+ echo "rpm_platform=$rpm_platform" >> $GITHUB_OUTPUT
+
+ - name: Build Docker Image
+ uses: docker/build-push-action@v5
+ with:
+ file: build/dockerfiles/${{ matrix.package }}.Dockerfile
+ context: .
+ push: ${{ env.HAS_ACCESS_TO_GITHUB_TOKEN == 'true' }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ platforms: ${{ steps.docker_platforms_arg.outputs.platforms }}
+ build-args: |
+ KONG_BASE_IMAGE=${{ matrix.base-image }}
+ KONG_ARTIFACT_PATH=bazel-bin/pkg/
+ RPM_PLATFORM=${{ steps.docker_rpm_platform_arg.outputs.rpm_platform }}
+ EE_PORTS=8002 8445 8003 8446 8004 8447
+
+ - name: Comment on commit
+ if: github.event_name == 'push' && matrix.label == 'ubuntu'
+ uses: peter-evans/commit-comment@5a6f8285b8f2e8376e41fe1b563db48e6cf78c09 # v3.0.0
+ continue-on-error: true # TODO: temporary fix until the token is back
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ body: |
+ ### Bazel Build
+ Docker image available `${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}`
+ Artifacts available https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
+
+ verify-manifest-images:
+ needs: [metadata, build-images]
+ name: Verify Manifest - Image ${{ matrix.label }}
+ runs-on: ubuntu-22.04
+ if: github.event_name != 'pull_request' || (github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]')
+
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['build-images'] }}"
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Install Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.11'
+ cache: 'pip' # caching pip dependencies
+
+ - name: Verify
+ run: |
+ cd scripts/explain_manifest
+ # docker image verify requires sudo to set correct permissions, so we
+ # also install deps for root
+ sudo -E pip install -r requirements.txt
+ IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }}
+
+ sudo -E python ./main.py --image $IMAGE -f docker_image_filelist.txt -s docker-image
+
+ scan-images:
+ name: Scan Images - ${{ matrix.label }}
+ needs: [metadata, build-images]
+ runs-on: ubuntu-22.04
+ if: |-
+ always()
+ && fromJSON(needs.metadata.outputs.matrix)['scan-vulnerabilities'] != ''
+ && needs.build-images.result == 'success'
+ && (github.event_name != 'pull_request' || (github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]'))
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['scan-vulnerabilities'] }}"
+ env:
+ IMAGE: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}-${{ matrix.label }}
+ steps:
+ - name: Install regctl
+ uses: regclient/actions/regctl-installer@main
+
+ - name: Login to Docker Hub
+ if: ${{ env.HAS_ACCESS_TO_GITHUB_TOKEN }}
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v2.1.0
+ with:
+ username: ${{ secrets.GHA_DOCKERHUB_PUSH_USER }}
+ password: ${{ secrets.GHA_KONG_ORG_DOCKERHUB_PUSH_TOKEN }}
+
+ # TODO: Refactor matrix file to support and parse platforms specific to distro
+ # Workaround: Look for specific amd64 hardcooded architectures
+ - name: Parse Architecture Specific Image Manifest Digests
+ id: image_manifest_metadata
+ run: |
+ manifest_list_exists="$(
+ if regctl manifest get "${IMAGE}" --format raw-body --require-list -v panic &> /dev/null; then
+ echo true
+ else
+ echo false
+ fi
+ )"
+ echo "manifest_list_exists=$manifest_list_exists"
+ echo "manifest_list_exists=$manifest_list_exists" >> $GITHUB_OUTPUT
+
+ amd64_sha="$(regctl image digest "${IMAGE}" --platform linux/amd64 || echo '')"
+ echo "amd64_sha=$amd64_sha"
+ echo "amd64_sha=$amd64_sha" >> $GITHUB_OUTPUT
+
+ - name: Scan AMD64 Image digest
+ id: sbom_action_amd64
+ if: steps.image_manifest_metadata.outputs.amd64_sha != ''
+ uses: Kong/public-shared-actions/security-actions/scan-docker-image@v1
+ with:
+ asset_prefix: kong-${{ github.sha }}-${{ matrix.label }}-linux-amd64
+ image: ${{ needs.metadata.outputs.prerelease-docker-repository }}:${{ github.sha }}-${{ matrix.label }}
+
+ smoke-tests:
+ name: Smoke Tests - ${{ matrix.label }}
+ needs: [metadata, build-images]
+ runs-on: ubuntu-22.04
+ if: |-
+ fromJSON(needs.metadata.outputs.matrix)['smoke-tests'] != ''
+ && (github.event_name != 'pull_request' || (github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]'))
+
+ # TODO: test packages
+ strategy:
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['smoke-tests'] }}"
+
+ services:
+ postgres:
+ image: postgres:13
+ env:
+ POSTGRES_USER: kong
+ POSTGRES_DB: kong
+ POSTGRES_PASSWORD: kong
+ ports:
+ - "5432:5432"
+ options: --health-cmd pg_isready --health-interval 5s --health-timeout 5s --health-retries 8
+
+ env:
+ KONG_ADMIN_URI: http://localhost:8001
+ KONG_ADMIN_HTTP2_URI: https://localhost:8444
+ KONG_PROXY_URI: http://localhost:8000
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v2.1.0
+ with:
+ username: ${{ secrets.GHA_DOCKERHUB_PUSH_USER }}
+ password: ${{ secrets.GHA_KONG_ORG_DOCKERHUB_PUSH_TOKEN }}
+
+ - name: Setup Kong instance
+ # always pull the latest image to ensure we're testing the latest version.
+ run: |
+ docker run \
+ -p 8000:8000 -p 8001:8001 -p 8444:8444\
+ -e KONG_PG_PASSWORD=kong \
+ -e KONG_ADMIN_LISTEN="0.0.0.0:8001, 0.0.0.0:8444 ssl http2" \
+ -e KONG_ANONYMOUS_REPORTS=off \
+ --name kong \
+ --restart always \
+ --network=host -d \
+ --pull always \
+ ${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }} \
+ sh -c "kong migrations bootstrap && kong start"
+ sleep 3
+ docker logs kong
+
+ - name: Smoke Tests - Version Test
+ run: |
+ workflow_version="$(
+ echo '${{ steps.metadata.outputs.kong-version }}' \
+ | sed -e 's@\.@\\\.@g'
+ )"
+
+ # confirm workflow's version and built container version match with
+ # dots escaped, and end-line delimited
+ if ! docker exec kong kong version | grep -E "${workflow_version}$"; then
+ echo "Built container's 'kong version' didn't match workflow's."
+ echo "Ensure that versions in the meta.lua files are as expected."
+ exit 1
+ fi
+
+ - name: Smoke Tests - Base Tests
+ env:
+ VERBOSE: ${{ runner.debug == '1' && '1' || '' }}
+ run: build/tests/01-base.sh
+
+ - name: Smoke Tests - Admin API
+ env:
+ VERBOSE: ${{ runner.debug == '1' && '1' || '' }}
+ run: build/tests/02-admin-api.sh
+
+ - name: Smoke Tests - HTTP2 Admin API
+ env:
+ VERBOSE: ${{ runner.debug == '1' && '1' || '' }}
+ run: build/tests/03-http2-admin-api.sh
+
+ release-packages:
+ name: Release Packages - ${{ matrix.label }} - ${{ needs.metadata.outputs.release-desc }}
+ needs: [metadata, build-packages, build-images, smoke-tests]
+ runs-on: ubuntu-22.04
+ if: fromJSON(needs.metadata.outputs.matrix)['release-packages'] != ''
+ timeout-minutes: 5 # PULP takes a while to publish
+ environment: release
+
+ strategy:
+ # limit to 3 jobs at a time
+ max-parallel: 3
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['release-packages'] }}"
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Download artifact
+ uses: actions/download-artifact@v3
+ with:
+ name: ${{ matrix.artifact-from }}-packages
+ path: bazel-bin/pkg
+
+ - name: Set package architecture
+ id: pkg-arch
+ run: |
+ arch='amd64'
+ echo "arch=$arch"
+ echo "arch=$arch" >> $GITHUB_OUTPUT
+
+ - name: Upload Packages
+ env:
+ ARCHITECTURE: ${{ steps.pkg-arch.outputs.arch }}
+ OFFICIAL_RELEASE: ${{ github.event.inputs.official }}
+ ARTIFACT_VERSION: ${{ matrix.artifact-version }}
+ ARTIFACT_TYPE: ${{ matrix.artifact-type }}
+ ARTIFACT: ${{ matrix.artifact }}
+ INPUT_VERSION: ${{ github.event.inputs.version }}
+ PACKAGE_TYPE: ${{ matrix.package }}
+ KONG_RELEASE_LABEL: ${{ needs.metadata.outputs.release-label }}
+ VERBOSE: ${{ runner.debug == '1' && '1' || '' }}
+ CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }}
+ CLOUDSMITH_DRY_RUN: ''
+ IGNORE_CLOUDSMITH_FAILURES: ${{ vars.IGNORE_CLOUDSMITH_FAILURES }}
+ USE_CLOUDSMITH: ${{ vars.USE_CLOUDSMITH }}
+ run: |
+ sha256sum bazel-bin/pkg/*
+
+ # set the version input as tags passed to release-scripts
+ # note: release-scripts rejects user tags if missing internal flag
+ #
+ # this can be a comma-sepratated list of tags to apply
+ if [[ "$OFFICIAL_RELEASE" == 'false' ]]; then
+ if echo "$INPUT_VERSION" | grep -qs -E 'rc|alpha|beta|nightly'; then
+ PACKAGE_TAGS="$INPUT_VERSION"
+ export PACKAGE_TAGS
+ fi
+ fi
+
+ scripts/release-kong.sh
+
+ release-images:
+ name: Release Images - ${{ matrix.label }} - ${{ needs.metadata.outputs.release-desc }}
+ needs: [metadata, build-images, smoke-tests]
+ runs-on: ubuntu-22.04
+ if: github.repository_owner == 'Kong' && fromJSON(needs.metadata.outputs.matrix)['release-images'] != ''
+
+ strategy:
+ # limit to 3 jobs at a time
+ max-parallel: 3
+ fail-fast: false
+ matrix:
+ include: "${{ fromJSON(needs.metadata.outputs.matrix)['release-images'] }}"
+
+ steps:
+ - name: Login to Docker Hub
+ uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v2.1.0
+ with:
+ username: ${{ secrets.GHA_DOCKERHUB_PUSH_USER }}
+ password: ${{ secrets.GHA_KONG_ORG_DOCKERHUB_PUSH_TOKEN }}
+
+ - name: Docker meta
+ id: meta
+ uses: docker/metadata-action@v5
+ with:
+ images: ${{ needs.metadata.outputs.docker-repository }}
+ sep-tags: " "
+ tags: |
+ type=raw,value=latest,enable=${{ matrix.label == 'ubuntu' }}
+ type=match,enable=${{ github.event_name == 'workflow_dispatch' }},pattern=\d.\d,value=${{ github.event.inputs.version }}
+ type=match,enable=${{ github.event_name == 'workflow_dispatch' && matrix.label == 'ubuntu' }},pattern=\d.\d,value=${{ github.event.inputs.version }},suffix=
+ type=raw,enable=${{ github.event_name == 'workflow_dispatch' }},${{ github.event.inputs.version }}
+ type=raw,enable=${{ github.event_name == 'workflow_dispatch' && matrix.label == 'ubuntu' }},${{ github.event.inputs.version }},suffix=
+ type=ref,event=branch
+ type=ref,enable=${{ matrix.label == 'ubuntu' }},event=branch,suffix=
+ type=ref,event=tag
+ type=ref,enable=${{ matrix.label == 'ubuntu' }},event=tag,suffix=
+ type=ref,event=pr
+ type=schedule,pattern=nightly
+ type=schedule,enable=${{ matrix.label == 'ubuntu' }},pattern=nightly,suffix=
+ type=schedule,pattern={{date 'YYYYMMDD'}}
+ type=schedule,enable=${{ matrix.label == 'ubuntu' }},pattern={{date 'YYYYMMDD'}},suffix=
+ flavor: |
+ latest=false
+ suffix=-${{ matrix.label }}
+
+ - name: Install regctl
+ uses: regclient/actions/regctl-installer@b6614f5f56245066b533343a85f4109bdc38c8cc
+
+ - name: Push Images
+ env:
+ TAGS: "${{ steps.meta.outputs.tags }}"
+ run: |
+ PRERELEASE_IMAGE=${{ env.PRERELEASE_DOCKER_REPOSITORY }}:${{ github.sha }}-${{ matrix.label }}
+ docker pull $PRERELEASE_IMAGE
+ for tag in $TAGS; do
+ regctl -v debug image copy $PRERELEASE_IMAGE $tag
+ done
diff --git a/.gitignore b/.gitignore
index 8ecd58061979..e5d2a13a8e93 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,3 +26,7 @@ bin/grpcurl
*.so
*.bak
+
+bazel-*
+bin/bazel
+bin/h2client
diff --git a/.requirements b/.requirements
index 2dacfc1c3b5a..3625afb2c7b0 100644
--- a/.requirements
+++ b/.requirements
@@ -10,3 +10,9 @@ LIBYAML_VERSION=0.2.5
KONG_GO_PLUGINSERVER_VERSION=v0.6.1
KONG_BUILD_TOOLS_VERSION=4.40.1
KONG_NGINX_MODULE_BRANCH=0.2.1
+
+PCRE=8.45
+OPENSSL=1.1.1o
+OPENRESTY=1.19.9.1
+LUAROCKS=3.8.0
+LUA_KONG_NGINX_MODULE=6b2fa308e091e2daed2407dc38d54fbcd8fae768 # 0.2.1-sr1
diff --git a/.travis.yml b/.travis.yml
index d5a0c1afe65f..eba13869155d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -48,7 +48,7 @@ env:
install:
- source .ci/setup_env.sh
- - make dev
+ - make venv-dev
cache:
apt: true
diff --git a/BUILD.bazel b/BUILD.bazel
new file mode 100644
index 000000000000..632194b18c17
--- /dev/null
+++ b/BUILD.bazel
@@ -0,0 +1,239 @@
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("//build/nfpm:rules.bzl", "nfpm_pkg")
+load("//build/toolchain:managed_toolchain.bzl", "aarch64_glibc_distros")
+
+filegroup(
+ name = "srcs",
+ srcs = glob(["**"]),
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "rockspec_srcs",
+ srcs = glob(["*.rockspec"]),
+ visibility = ["//visibility:public"],
+)
+
+nfpm_env = {
+ "KONG_NAME": "kong",
+ "KONG_REPLACES_1": "kong-community-edition",
+ "KONG_REPLACES_2": "kong-enterprise-edition-fips",
+ "KONG_CONFLICTS_1": "kong-community-edition",
+ "KONG_CONFLICTS_2": "kong-enterprise-edition-fips",
+}
+
+nfpm_pkg(
+ name = "kong_deb",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ packager = "deb",
+ pkg_name = "kong",
+ visibility = ["//visibility:public"],
+)
+
+nfpm_pkg(
+ name = "kong_apk",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ packager = "apk",
+ pkg_name = "kong",
+ visibility = ["//visibility:public"],
+)
+
+nfpm_pkg(
+ name = "kong_el8",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ packager = "rpm",
+ pkg_name = "kong.el8",
+ visibility = ["//visibility:public"],
+)
+
+nfpm_pkg(
+ name = "kong_el7",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ extra_env = {
+ "RPM_EXTRA_DEPS": "hostname",
+ },
+ packager = "rpm",
+ pkg_name = "kong.el7",
+ visibility = ["//visibility:public"],
+)
+
+nfpm_pkg(
+ name = "kong_aws2",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ extra_env = {
+ "RPM_EXTRA_DEPS": "/usr/sbin/useradd",
+ "RPM_EXTRA_DEPS_2": "/usr/sbin/groupadd",
+ },
+ packager = "rpm",
+ pkg_name = "kong.aws2",
+ visibility = ["//visibility:public"],
+)
+
+nfpm_pkg(
+ name = "kong_aws2023",
+ config = "//build:package/nfpm.yaml",
+ env = nfpm_env,
+ extra_env = {
+ "RPM_EXTRA_DEPS": "/usr/sbin/useradd",
+ "RPM_EXTRA_DEPS_2": "/usr/sbin/groupadd",
+ "RPM_EXTRA_DEPS_3": "libxcrypt-compat",
+ },
+ packager = "rpm",
+ pkg_name = "kong.aws2023",
+ visibility = ["//visibility:public"],
+)
+
+###### flags
+
+# --//:debug=true
+bool_flag(
+ name = "debug",
+ build_setting_default = True,
+)
+
+config_setting(
+ name = "debug_flag",
+ flag_values = {
+ ":debug": "true",
+ },
+ visibility = ["//visibility:public"],
+)
+
+config_setting(
+ name = "debug_linux_flag",
+ constraint_values = [
+ "@platforms//os:linux",
+ ],
+ flag_values = {
+ ":debug": "true",
+ },
+ visibility = ["//visibility:public"],
+)
+
+# --//:skip_webui=false
+bool_flag(
+ name = "skip_webui",
+ build_setting_default = False,
+)
+
+config_setting(
+ name = "skip_webui_flags",
+ flag_values = {
+ ":skip_webui": "true",
+ },
+ visibility = ["//visibility:public"],
+)
+
+##### constraints, platforms and config_settings for cross-compile
+
+constraint_setting(name = "cross_build_setting")
+
+constraint_value(
+ name = "cross_build",
+ constraint_setting = ":cross_build_setting",
+)
+
+# platform sets the constraint values based on user input (--platform=//:PLATFOTM)
+platform(
+ name = "generic-crossbuild-x86_64",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:x86_64",
+ "//build/platforms/distro:generic",
+ ":cross_build",
+ ],
+)
+
+platform(
+ name = "generic-crossbuild-aarch64",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:aarch64",
+ "//build/platforms/distro:generic",
+ ":cross_build",
+ ],
+)
+
+# backward compatibility
+alias(
+ name = "ubuntu-22.04-arm64",
+ actual = ":generic-crossbuild-aarch64",
+)
+
+platform(
+ name = "alpine-crossbuild-x86_64",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:x86_64",
+ "//build/platforms/distro:alpine",
+ ":cross_build",
+ ],
+)
+
+# backward compatibility
+alias(
+ name = "alpine-x86_64",
+ actual = ":alpine-crossbuild-x86_64",
+)
+
+platform(
+ name = "alpine-crossbuild-aarch64",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:aarch64",
+ "//build/platforms/distro:alpine",
+ ":cross_build",
+ ],
+)
+
+[
+ platform(
+ name = vendor + "-crossbuild-aarch64",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:aarch64",
+ "//build/platforms/distro:" + vendor,
+ ":cross_build",
+ ],
+ )
+ for vendor in aarch64_glibc_distros
+]
+
+# config_settings define a select() condition based on user-set constraint_values
+# see https://bazel.build/docs/configurable-attributes
+config_setting(
+ name = "aarch64-linux-anylibc-cross",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:aarch64",
+ ":cross_build",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+config_setting(
+ name = "x86_64-linux-musl-cross",
+ constraint_values = [
+ "@platforms//os:linux",
+ "@platforms//cpu:x86_64",
+ "//build/platforms/distro:alpine",
+ ":cross_build",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+selects.config_setting_group(
+ # matches all cross build platforms
+ name = "any-cross",
+ match_any = [
+ ":aarch64-linux-anylibc-cross",
+ ":x86_64-linux-musl-cross",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/Makefile b/Makefile
index f9a23c8bb9da..27832cfea241 100644
--- a/Makefile
+++ b/Makefile
@@ -6,6 +6,9 @@ WIN_SCRIPTS = "bin/busted" "bin/kong"
BUSTED_ARGS ?= -v
TEST_CMD ?= bin/busted $(BUSTED_ARGS)
+BUILD_NAME ?= kong-dev
+BAZEL_ARGS ?= --verbose_failures --action_env=BUILD_NAME=$(BUILD_NAME) --//:skip_webui=true
+
ifeq ($(OS), darwin)
OPENSSL_DIR ?= /usr/local/opt/openssl
GRPCURL_OS ?= osx
@@ -16,15 +19,26 @@ endif
ifeq ($(MACHINE), aarch64)
GRPCURL_MACHINE ?= arm64
+H2CLIENT_MACHINE ?= arm64
else
GRPCURL_MACHINE ?= $(MACHINE)
+H2CLIENT_MACHINE ?= $(MACHINE)
+endif
+
+ifeq ($(MACHINE), aarch64)
+BAZELISK_MACHINE ?= arm64
+else ifeq ($(MACHINE), x86_64)
+BAZELISK_MACHINE ?= amd64
+else
+BAZELISK_MACHINE ?= $(MACHINE)
endif
.PHONY: install dependencies dev remove grpcurl \
setup-ci setup-kong-build-tools \
lint test test-integration test-plugins test-all \
pdk-phase-check functional-tests \
- fix-windows release
+ fix-windows release \
+ nightly-release release
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
KONG_SOURCE_LOCATION ?= $(ROOT_DIR)
@@ -35,8 +49,18 @@ RESTY_OPENSSL_VERSION ?= `grep RESTY_OPENSSL_VERSION $(KONG_SOURCE_LOCATION)/.re
RESTY_PCRE_VERSION ?= `grep RESTY_PCRE_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'`
KONG_BUILD_TOOLS ?= `grep KONG_BUILD_TOOLS_VERSION $(KONG_SOURCE_LOCATION)/.requirements | awk -F"=" '{print $$2}'`
GRPCURL_VERSION ?= 1.8.5
+BAZLISK_VERSION ?= 1.18.0
OPENRESTY_PATCHES_BRANCH ?= master
KONG_NGINX_MODULE_BRANCH ?= master
+BAZEL := $(shell command -v bazel 2> /dev/null)
+VENV = /dev/null # backward compatibility when no venv is built
+
+# Use x86_64 grpcurl v1.8.5 for Apple silicon chips
+ifeq ($(GRPCURL_OS)_$(MACHINE)_$(GRPCURL_VERSION), osx_arm64_1.8.5)
+GRPCURL_MACHINE = x86_64
+endif
+
+H2CLIENT_VERSION ?= 0.4.0
PACKAGE_TYPE ?= deb
REPOSITORY_NAME ?= kong-${PACKAGE_TYPE}
@@ -82,6 +106,21 @@ release-docker-images:
KONG_SOURCE_LOCATION=${KONG_SOURCE_LOCATION} \
release-kong-docker-images
+bin/bazel:
+ @curl -s -S -L \
+ https://github.com/bazelbuild/bazelisk/releases/download/v$(BAZLISK_VERSION)/bazelisk-$(OS)-$(BAZELISK_MACHINE) -o bin/bazel
+ @chmod +x bin/bazel
+
+bin/grpcurl:
+ @curl -s -S -L \
+ https://github.com/fullstorydev/grpcurl/releases/download/v$(GRPCURL_VERSION)/grpcurl_$(GRPCURL_VERSION)_$(GRPCURL_OS)_$(GRPCURL_MACHINE).tar.gz | tar xz -C bin;
+ @$(RM) bin/LICENSE
+
+bin/h2client:
+ @curl -s -S -L \
+ https://github.com/Kong/h2client/releases/download/v$(H2CLIENT_VERSION)/h2client_$(H2CLIENT_VERSION)_$(OS)_$(H2CLIENT_MACHINE).tar.gz | tar xz -C bin;
+ @$(RM) bin/README.md
+
release:
ifeq ($(ISTAG),false)
sed -i -e '/return string\.format/,/\"\")/c\return "$(KONG_VERSION)\"' kong/meta.lua
@@ -188,15 +227,44 @@ dependencies: bin/grpcurl
fi \
done;
-bin/grpcurl:
- @curl -s -S -L \
- https://github.com/fullstorydev/grpcurl/releases/download/v$(GRPCURL_VERSION)/grpcurl_$(GRPCURL_VERSION)_$(GRPCURL_OS)_$(GRPCURL_MACHINE).tar.gz | tar xz -C bin;
- @rm bin/LICENSE
+build-kong: check-bazel
+ $(BAZEL) build //build:kong --verbose_failures --action_env=BUILD_NAME=$(BUILD_NAME)
+
+build-venv: check-bazel
+ $(eval VENV := bazel-bin/build/$(BUILD_NAME)-venv.sh)
+
+ @if [ ! -e bazel-bin/build/$(BUILD_NAME)-venv.sh ]; then \
+ $(BAZEL) build //build:venv $(BAZEL_ARGS); \
+ fi
+
+install-dev-rocks: build-venv
+ @. $(VENV) ;\
+ for rock in $(DEV_ROCKS) ; do \
+ if luarocks list --porcelain $$rock | grep -q "installed" ; then \
+ echo $$rock already installed, skipping ; \
+ else \
+ echo $$rock not found, installing via luarocks... ; \
+ LIBRARY_PREFIX=$$(pwd)/bazel-bin/build/$(BUILD_NAME)/kong ; \
+ luarocks install $$rock OPENSSL_DIR=$$LIBRARY_PREFIX CRYPTO_DIR=$$LIBRARY_PREFIX YAML_DIR=$(YAML_DIR) || exit 1; \
+ fi \
+ done;
dev: remove install dependencies
+venv-dev: build-venv install-dev-rocks bin/grpcurl bin/h2client
+
+check-bazel: bin/bazel
+ifndef BAZEL
+ $(eval BAZEL := bin/bazel)
+endif
+
+clean: check-bazel
+ $(BAZEL) clean
+ $(RM) bin/bazel bin/grpcurl bin/h2client
+
+
lint:
- @luacheck -q .
+ @luacheck -q . --exclude-files=bazel-*
@!(grep -R -E -I -n -w '#only|#o' spec && echo "#only or #o tag detected") >&2
@!(grep -R -E -I -n -- '---\s+ONLY' t && echo "--- ONLY block detected") >&2
diff --git a/WORKSPACE b/WORKSPACE
new file mode 100644
index 000000000000..d2f1c9610302
--- /dev/null
+++ b/WORKSPACE
@@ -0,0 +1,53 @@
+workspace(name = "kong")
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+http_archive(
+ name = "bazel_skylib",
+ sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+ urls = [
+ "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+ "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+ ],
+)
+
+load("//build:kong_bindings.bzl", "load_bindings")
+
+load_bindings(name = "kong_bindings")
+
+http_archive(
+ name = "rules_foreign_cc",
+ sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
+ strip_prefix = "rules_foreign_cc-0.9.0",
+ url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
+)
+
+load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")
+
+# This sets up some common toolchains for building targets. For more details, please see
+# https://bazelbuild.github.io/rules_foreign_cc/0.9.0/flatten.html#rules_foreign_cc_dependencies
+rules_foreign_cc_dependencies(
+ register_built_tools = False, # don't build toolchains like make
+ register_default_tools = True, # register cmake and ninja that are managed by bazel
+ register_preinstalled_tools = True, # use preinstalled toolchains like make
+)
+
+load("//build/openresty:repositories.bzl", "openresty_repositories")
+
+openresty_repositories()
+
+load("//build/nfpm:repositories.bzl", "nfpm_repositories")
+
+nfpm_repositories()
+
+load("//build:repositories.bzl", "build_repositories")
+
+build_repositories()
+
+load("//build/toolchain:repositories.bzl", "toolchain_repositories")
+
+toolchain_repositories()
+
+load("//build/toolchain:managed_toolchain.bzl", "register_all_toolchains")
+
+register_all_toolchains()
diff --git a/bin/busted b/bin/busted
index 3aa7fa44710c..dfc41fec1230 100755
--- a/bin/busted
+++ b/bin/busted
@@ -1,6 +1,12 @@
#!/usr/bin/env resty
-local DEFAULT_RESTY_FLAGS="-c 4096"
+setmetatable(_G, nil)
+
+local pl_path = require("pl.path")
+
+local cert_path = pl_path.abspath("spec/fixtures/kong_spec.crt")
+
+local DEFAULT_RESTY_FLAGS=string.format(" -c 4096 --http-conf 'lua_ssl_trusted_certificate %s;' ", cert_path)
if not os.getenv("KONG_BUSTED_RESPAWNED") then
-- initial run, so go update the environment
@@ -24,8 +30,9 @@ if not os.getenv("KONG_BUSTED_RESPAWNED") then
-- rebuild the invoked commandline, while inserting extra resty-flags
local resty_flags = DEFAULT_RESTY_FLAGS
- local cmd = { "exec" }
- for i = -1, #arg do
+ local cmd = { "exec", "/usr/bin/env", "resty" }
+ local cmd_prefix_count = #cmd
+ for i = 0, #arg do
if arg[i]:sub(1, 12) == "RESTY_FLAGS=" then
resty_flags = arg[i]:sub(13, -1)
@@ -35,7 +42,7 @@ if not os.getenv("KONG_BUSTED_RESPAWNED") then
end
if resty_flags then
- table.insert(cmd, 3, resty_flags)
+ table.insert(cmd, cmd_prefix_count+1, resty_flags)
end
table.insert(script, table.concat(cmd, " "))
@@ -45,8 +52,6 @@ if not os.getenv("KONG_BUSTED_RESPAWNED") then
os.exit(rc)
end
-setmetatable(_G, nil)
-
pcall(require, "luarocks.loader")
require("kong.globalpatches")({
@@ -56,3 +61,5 @@ require("kong.globalpatches")({
-- Busted command-line runner
require 'busted.runner'({ standalone = false })
+
+-- vim: set ft=lua ts=2 sw=2 sts=2 et :
diff --git a/build/BUILD.bazel b/build/BUILD.bazel
new file mode 100644
index 000000000000..4d7a467a1760
--- /dev/null
+++ b/build/BUILD.bazel
@@ -0,0 +1,176 @@
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+load("//build:build_system.bzl", "kong_directory_genrule", "kong_rules_group", "kong_template_file")
+
+exports_files([
+ "package/nfpm.yaml",
+ "package/nfpm.enterprise.yaml",
+])
+
+lib_deps = []
+
+install_lib_deps_cmd = "\n".join([
+ """
+ DEP=${WORKSPACE_PATH}/$(echo $(locations %s) | awk '{print $1}')
+ # use tar magic to exclude files and create with correct permission
+ copy_with_filter ${DEP} ${BUILD_DESTDIR}/kong
+""" % dep
+ for dep in lib_deps
+])
+
+lualib_deps = [
+ "@lua-kong-nginx-module//:all_srcs",
+]
+
+install_lualib_deps_cmd = "\n".join([
+ """
+ DEP=$(pwd)/external/%s
+ INSTALL=/usr/bin/install make --silent -C ${DEP} LUA_LIB_DIR=${BUILD_DESTDIR}/openresty/lualib install
+""" % dep.lstrip("@").split("/")[0]
+ for dep in lualib_deps
+])
+
+install_webui_cmd = select({
+ "//conditions:default": """
+ """,
+ "@kong//:skip_webui_flags": "\n",
+})
+
+kong_directory_genrule(
+ name = "kong",
+ srcs = [
+ "@openresty//:openresty",
+ "@openresty//:luajit",
+ "@luarocks//:luarocks_make",
+ "@luarocks//:luarocks_target",
+ "@protoc//:all_srcs",
+ "@openssl",
+ ] + select({
+ "@kong//:skip_webui_flags": [],
+ "//conditions:default": [],
+ }) + lib_deps + lualib_deps,
+ cmd = """ set -e
+ function copy_with_filter {
+ mkdir -p $2
+ tar -cC $1 --exclude="*.a" --exclude="*.la" \
+ --exclude="*/share/*" --exclude="*/bin/*" \
+ --exclude="*.log" . | tar -xC $2/.
+ chmod -R "+rw" $2
+ }
+ function LN {
+ if [[ "$OSTYPE" == "darwin"* ]]; then
+ # TODO: support relative path links once we start to cross compile on macOS
+ ln -sf $@
+ else
+ ln -srf $@
+ fi
+ }
+ rm -rf ${BUILD_DESTDIR}
+ mkdir -p ${BUILD_DESTDIR}/kong/lib ${BUILD_DESTDIR}/openresty ${BUILD_DESTDIR}/bin
+
+ if [[ "$OSTYPE" == "darwin"* ]]; then
+ libext="dylib"
+ else # assume linux
+ libext="so"
+ fi
+
+ OPENRESTY=${WORKSPACE_PATH}/$(echo '$(locations @openresty//:openresty)' | awk '{print $1}')
+ cp -r ${OPENRESTY}/. ${BUILD_DESTDIR}/openresty/.
+ LN ${BUILD_DESTDIR}/openresty/bin/resty ${BUILD_DESTDIR}/bin/resty
+ chmod -R "+rw" ${BUILD_DESTDIR}/openresty
+
+ LUAJIT=${WORKSPACE_PATH}/$(echo '$(locations @openresty//:luajit)' | awk '{print $1}')
+ copy_with_filter ${LUAJIT} ${BUILD_DESTDIR}/openresty/luajit
+ cp ${LUAJIT}/bin/luajit ${BUILD_DESTDIR}/openresty/luajit/bin/luajit
+ tar -cC ${LUAJIT}/share . | tar -xC ${BUILD_DESTDIR}/openresty/luajit/share
+ chmod -R "+rw" ${BUILD_DESTDIR}/openresty/luajit
+
+ LUAROCKS=${WORKSPACE_PATH}/$(dirname '$(location @luarocks//:luarocks_make)')/luarocks_tree
+ cp -r ${LUAROCKS}/. ${BUILD_DESTDIR}/.
+ rm ${BUILD_DESTDIR}/bin/lapis ${BUILD_DESTDIR}/bin/luarocks-admin
+
+ cp -r $(locations @protoc//:all_srcs) ${BUILD_DESTDIR}/kong/.
+
+ OPENSSL=${WORKSPACE_PATH}/$(echo $(locations @openssl) | awk '{print $1}')
+ # use tar magic to exclude files and create with correct permission
+ copy_with_filter $OPENSSL ${BUILD_DESTDIR}/kong
+
+
+ """ + install_lib_deps_cmd + install_lualib_deps_cmd + install_webui_cmd +
+ """
+ mkdir -p ${BUILD_DESTDIR}/etc/kong
+ cp kong.conf.default ${BUILD_DESTDIR}/etc/kong/kong.conf.default
+
+ # housecleaning
+ mv ${BUILD_DESTDIR}/kong/*.${libext}* ${BUILD_DESTDIR}/kong/lib 2>/dev/null || true
+ if [[ -d ${BUILD_DESTDIR}/kong/lib64 ]]; then
+ copy_with_filter ${BUILD_DESTDIR}/kong/lib64 ${BUILD_DESTDIR}/kong/lib
+ rm -rf ${BUILD_DESTDIR}/kong/lib64
+ fi
+
+ # remove pkgconfig since they are invalid anyway
+ find ${BUILD_DESTDIR} -name "*.pc" -delete
+
+ # clean empty directory
+ find ${BUILD_DESTDIR} -empty -type d -delete
+
+ # foreign_cc rule dereferences symlink, we will dedup them here
+ # TODO: patch https://github.com/bazelbuild/rules_foreign_cc/blob/main/foreign_cc/private/framework.bzl#L450 to not remove symlink
+ for f in $(find ${BUILD_DESTDIR}/kong/lib ${BUILD_DESTDIR}/openresty/luajit/lib -type f -name "*.${libext}*" ); do
+ if [[ -L "$f" ]]; then continue; fi # already a symlink
+ target=$(ls -r1 $f.* 2>/dev/null | head -n1)
+ if [[ ! -z "$target" && "$f" != "$target" ]]; then
+ LN "$target" "$f"
+ fi
+ done
+
+ cp kong/pluginsocket.proto ${BUILD_DESTDIR}/kong/include/pluginsocket.proto
+
+ LN ${BUILD_DESTDIR}/openresty/nginx/sbin/nginx ${BUILD_DESTDIR}/openresty/bin/openresty
+ """,
+ # XXX: bazel forces 0555 as artifact permission, which is not correct for packagin
+ # here we deliberately use a different directory so file permission is preserved
+ # see also https://github.com/bazelbuild/bazel/issues/5588
+ output_dir = KONG_VAR["BUILD_NAME"] + ".nop",
+ visibility = ["//visibility:public"],
+)
+
+kong_template_file(
+ name = "venv.sh",
+ output = "%s-venv.sh" % KONG_VAR["BUILD_NAME"],
+ substitutions = {
+ "{{build_name}}": KONG_VAR["BUILD_NAME"],
+ "{{workspace_path}}": KONG_VAR["WORKSPACE_PATH"],
+ },
+ template = "//build:templates/venv.sh",
+)
+
+kong_template_file(
+ name = "venv.fish",
+ output = "%s-venv.fish" % KONG_VAR["BUILD_NAME"],
+ substitutions = {
+ "{{build_name}}": KONG_VAR["BUILD_NAME"],
+ "{{workspace_path}}": KONG_VAR["WORKSPACE_PATH"],
+ },
+ template = "//build:templates/venv.fish",
+)
+
+kong_template_file(
+ name = "venv-commons",
+ is_executable = True,
+ output = "%s-venv/lib/venv-commons" % KONG_VAR["BUILD_NAME"],
+ substitutions = {
+ "{{workspace_path}}": KONG_VAR["WORKSPACE_PATH"],
+ },
+ template = "//build:templates/venv-commons",
+)
+
+kong_rules_group(
+ name = "venv",
+ propagates = [
+ ":kong",
+ ":venv.sh",
+ ":venv.fish",
+ ":venv-commons",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/README.md b/build/README.md
new file mode 100644
index 000000000000..7af1d2b7f04f
--- /dev/null
+++ b/build/README.md
@@ -0,0 +1,137 @@
+# Build
+
+This directory contains the build system for the project.
+The build system is designed to be used with the [Bazel](https://bazel.build/).
+It is designed to be running on Linux without root privileges, and no virtualization technology is required.
+
+The build system is tested on Linux (x86_64 and aarch64) and macOS (Intel chip and AppleSilicon Chip).
+
+## Prerequisites
+
+The build system requires the following tools to be installed:
+
+- [Bazel/Bazelisk](https://bazel.build/install/bazelisk), Bazelisk is recommended to ensure the correct version of Bazel is used.
+- [Build Dependencies](https://github.com/Kong/kong/blob/master/DEVELOPER.md#prerequisites), the build system requires the same dependencies as Kong itself.
+
+## Building
+
+To build Kong and all its dependencies, run the following command:
+
+Bash/Zsh:
+
+```bash
+git submodule update --init
+GITHUB_TOKEN=token bazel build //build:kong --verbose_failures
+```
+
+The build output is in `bazel-bin/build/kong-dev`.
+
+To use the build as a virtual development environment, run:
+
+```bash
+bazel build //build:venv --verbose_failures
+. ./bazel-bin/build/kong-dev-venv.sh
+```
+
+Some other targets one might find useful for debugging are:
+
+- `@openresty//:openresty`: builds openresty
+- `@luarocks//:luarocks_make`: builds luarocks for Kong dependencies
+
+### Build Options
+
+Following build options can be used to set specific features:
+
+- **--//:debug=true** turn on debug opitons for OpenResty and LuaJIT, default to true.
+- **--action_env=BUILD_NAME=** set the `build_name`, multiple build can exist at same time to allow you
+switch between different Kong versions or branches. Default to `kong-dev`; don't set this when you are
+building a building an binary package.
+- **--action_env=INSTALL_DESTDIR=** set the directory when the build is intended to be installed. Bazel won't
+actually install files into this directory, but this will make sure certain hard coded paths and RPATH is
+correctly set when building a package. Default to `bazel-bin/build/`.
+
+
+### Official build
+
+`--config release` specifies the build configuration to use for release, it sets following build options:
+
+```
+build:release --//:debug=false
+build:release --action_env=BUILD_NAME=kong-dev
+build:release --action_env=INSTALL_DESTDIR=/usr/local
+```
+
+To build an official release, use:
+
+```bash
+GITHUB_TOKEN=token bazel build --config release //build:kong --verbose_failures
+```
+
+Supported build targets for binary packages:
+- `:kong_deb`
+- `:kong_el7`
+- `:kong_el8`
+- `:kong_aws2`
+- `:kong_aws2023`
+- `:kong_apk`
+
+For example, to build the deb package:
+
+```bash
+bazel build --verbose_failures --config release :kong_deb
+
+```
+
+Run `bazel clean` to clean the bazel build cache.
+
+#### GPG Signing
+
+GPG singing is supported for the rpm packages (`el*` and `aws*`).
+
+```bash
+bazel build //:kong_el8 --action_env=RPM_SIGNING_KEY_FILE --action_env=NFPM_RPM_PASSPHRASE
+```
+
+## Cross compiling
+
+Cross compiling is currently only tested on Ubuntu 22.04 x86_64 with following targeting platforms:
+
+- **//:generic-crossbuild-aarch64** Use the system installed aarch64 toolchain.
+ - Requires user to manually install `crossbuild-essential-arm64` on Debian/Ubuntu.
+- **//:alpine-crossbuild-x86_64** Alpine Linux x86_64; bazel manages the build toolchain.
+- **//:alpine-crossbuild-aarch64** Alpine Linux aarch64; bazel manages the build toolchain.
+
+Make sure platforms are selected both in building Kong and packaing kong:
+
+```bash
+bazel build --config release //build:kong --platforms=//:generic-crossbuild-aarch64
+bazel build --config release :kong_deb --platforms=//:generic-crossbuild-aarch64
+```
+
+## Troubleshooting
+
+Run `bazel build` with `--sandbox_debug --verbose_failures` to get more information about the error.
+
+The `.log` files in `bazel-bin` contain the build logs.
+
+## FAQ
+
+### Caching
+
+Bazel utilizes a cache to speed up the build process. You might want to clear the cache actively
+if you recently changed `BUILD_NAME` or `INSTALL_DESTDIR`.
+
+To completely remove the entire working tree created by a Bazel instance, run:
+
+```shell
+bazel clean --expunge
+```
+
+### Cleanup
+
+In some cases where the build fails or the build is interrupted, the build system may leave behind some temporary files. To clean up the build system, run the following command or simply rerun the build:
+
+```shell
+bazel clean
+```
+
diff --git a/build/build_system.bzl b/build/build_system.bzl
new file mode 100644
index 000000000000..5b2feeb1fc3a
--- /dev/null
+++ b/build/build_system.bzl
@@ -0,0 +1,209 @@
+"""
+Load this file for all Kong-specific build macros
+and rules that you'd like to use in your BUILD files.
+"""
+
+load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+# A genrule variant that can output a directory.
+def _kong_directory_genrule_impl(ctx):
+ tree = ctx.actions.declare_directory(ctx.attr.output_dir)
+ env = dicts.add(KONG_VAR, ctx.configuration.default_shell_env, {
+ "GENRULE_OUTPUT_DIR": tree.path,
+ })
+
+ # XXX: remove the "env" from KONG_VAR which is a list
+ env["OPENRESTY_PATCHES"] = ""
+
+ ctx.actions.run_shell(
+ inputs = ctx.files.srcs,
+ tools = ctx.files.tools,
+ outputs = [tree],
+ command = "mkdir -p " + tree.path + " && " + ctx.expand_location(ctx.attr.cmd),
+ env = env,
+ )
+ return [DefaultInfo(files = depset([tree]))]
+
+kong_directory_genrule = rule(
+ implementation = _kong_directory_genrule_impl,
+ attrs = {
+ "srcs": attr.label_list(),
+ "cmd": attr.string(),
+ "tools": attr.label_list(),
+ "output_dir": attr.string(),
+ },
+)
+
+# A rule that can be used as a meta rule that propagates multiple other rules
+def _kong_rules_group_impl(ctx):
+ return [DefaultInfo(files = depset(ctx.files.propagates))]
+
+kong_rules_group = rule(
+ implementation = _kong_rules_group_impl,
+ attrs = {
+ "propagates": attr.label_list(),
+ },
+)
+
+_kong_template_attrs = {
+ "template": attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ ),
+ "output": attr.output(
+ mandatory = True,
+ ),
+ "substitutions": attr.string_dict(),
+ "srcs": attr.label_list(allow_files = True, doc = "List of locations to expand the template, in target configuration"),
+ "tools": attr.label_list(allow_files = True, cfg = "exec", doc = "List of locations to expand the template, in exec configuration"),
+ "is_executable": attr.bool(default = False),
+ # hidden attributes
+ "_cc_toolchain": attr.label(
+ default = "@bazel_tools//tools/cpp:current_cc_toolchain",
+ ),
+}
+
+def _render_template(ctx, output):
+ substitutions = dict(ctx.attr.substitutions)
+ for l in ctx.attr.srcs + ctx.attr.tools:
+ files = l.files.to_list()
+ if len(files) == 1:
+ p = files[0].path
+ else:
+ p = "/".join(files[0].path.split("/")[:-1]) # get the directory
+ substitutions["{{%s}}" % l.label] = p
+
+ substitutions["{{CC}}"] = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo].compiler_executable
+
+ # yes, not a typo, use gcc for linker
+ substitutions["{{LD}}"] = substitutions["{{CC}}"]
+
+ ctx.actions.expand_template(
+ template = ctx.file.template,
+ output = output,
+ substitutions = substitutions,
+ is_executable = ctx.attr.is_executable,
+ )
+
+def _kong_template_file_impl(ctx):
+ _render_template(ctx, ctx.outputs.output)
+
+ return [
+ DefaultInfo(files = depset([ctx.outputs.output])),
+ ]
+
+kong_template_file = rule(
+ implementation = _kong_template_file_impl,
+ attrs = _kong_template_attrs,
+)
+
+def _kong_template_genrule_impl(ctx):
+ f = ctx.actions.declare_file(ctx.attr.name + ".rendered.sh")
+ _render_template(ctx, f)
+
+ ctx.actions.run_shell(
+ outputs = [ctx.outputs.output],
+ inputs = ctx.files.srcs + ctx.files.tools + [f],
+ command = "{} {}".format(f.path, ctx.outputs.output.path),
+ progress_message = ctx.attr.progress_message,
+ )
+
+ return [
+ # don't list f as files/real output
+ DefaultInfo(files = depset([ctx.outputs.output])),
+ ]
+
+kong_template_genrule = rule(
+ implementation = _kong_template_genrule_impl,
+ attrs = _kong_template_attrs | {
+ "progress_message": attr.string(doc = "Message to display when running the command"),
+ },
+)
+
+def _copyright_header(ctx):
+ paths = ctx.execute(["find", ctx.path("."), "-type", "f"]).stdout.split("\n")
+
+ copyright_content = ctx.read(ctx.path(Label("@kong//:distribution/COPYRIGHT-HEADER"))).replace("--", " ")
+ copyright_content_js = "/*\n" + copyright_content + "*/\n\n"
+ copyright_content_html = "\n\n"
+ for path in paths:
+ if path.endswith(".js") or path.endswith(".map") or path.endswith(".css"):
+ content = ctx.read(path)
+ if not content.startswith(copyright_content_js):
+ # the default enabled |legacy_utf8| leads to a double-encoded utf-8
+ # while writing utf-8 content read by |ctx.read|, let's disable it
+ ctx.file(path, copyright_content_js + content, legacy_utf8 = False)
+
+ elif path.endswith(".html"):
+ content = ctx.read(path)
+ if not content.startswith(copyright_content_html):
+ # the default enabled |legacy_utf8| leads to a double-encoded utf-8
+ # while writing utf-8 content read by |ctx.read|, let's disable it
+ ctx.file(path, copyright_content_html + content, legacy_utf8 = False)
+
+def _github_release_impl(ctx):
+ ctx.file("WORKSPACE", "workspace(name = \"%s\")\n" % ctx.name)
+
+ if ctx.attr.build_file:
+ ctx.file("BUILD.bazel", ctx.read(ctx.attr.build_file))
+ elif ctx.attr.build_file_content:
+ ctx.file("BUILD.bazel", ctx.attr.build_file_content)
+
+ os_name = ctx.os.name
+ os_arch = ctx.os.arch
+
+ if os_arch == "aarch64":
+ os_arch = "arm64"
+ elif os_arch == "x86_64":
+ os_arch = "amd64"
+ elif os_arch != "amd64":
+ fail("Unsupported arch %s" % os_arch)
+
+ if os_name == "mac os x":
+ os_name = "macOS"
+ elif os_name != "linux":
+ fail("Unsupported OS %s" % os_name)
+
+ gh_bin = "%s" % ctx.path(Label("@gh_%s_%s//:bin/gh" % (os_name, os_arch)))
+ args = [gh_bin, "release", "download", ctx.attr.tag, "-R", ctx.attr.repo]
+ downloaded_file = None
+ if ctx.attr.pattern:
+ if "/" in ctx.attr.pattern or ".." in ctx.attr.pattern:
+ fail("/ and .. are not allowed in pattern")
+ downloaded_file = ctx.attr.pattern.replace("*", "_")
+ args += ["-p", ctx.attr.pattern]
+ elif ctx.attr.archive:
+ args.append("--archive=" + ctx.attr.archive)
+ downloaded_file = "gh-release." + ctx.attr.archive.split(".")[-1]
+ else:
+ fail("at least one of pattern or archive must be set")
+
+ args += ["-O", downloaded_file]
+
+ ret = ctx.execute(args)
+
+ if ret.return_code != 0:
+ gh_token_set = "GITHUB_TOKEN is set, is it valid?"
+ if not ctx.os.environ.get("GITHUB_TOKEN", ""):
+ gh_token_set = "GITHUB_TOKEN is not set, is this a private repo?"
+ fail("Failed to download release (%s): %s, exit: %d" % (gh_token_set, ret.stderr, ret.return_code))
+
+ ctx.extract(downloaded_file, stripPrefix = ctx.attr.strip_prefix)
+
+ if not ctx.attr.skip_add_copyright_header:
+ _copyright_header(ctx)
+
+github_release = repository_rule(
+ implementation = _github_release_impl,
+ attrs = {
+ "tag": attr.string(mandatory = True),
+ "pattern": attr.string(mandatory = False),
+ "archive": attr.string(mandatory = False, values = ["zip", "tar.gz"]),
+ "strip_prefix": attr.string(default = "", doc = "Strip prefix from downloaded files"),
+ "repo": attr.string(mandatory = True),
+ "build_file": attr.label(allow_single_file = True),
+ "build_file_content": attr.string(),
+ "skip_add_copyright_header": attr.bool(default = False, doc = "Whether to inject COPYRIGHT-HEADER into downloaded files, only required for webuis"),
+ },
+)
diff --git a/build/cross_deps/BUILD.bazel b/build/cross_deps/BUILD.bazel
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/build/cross_deps/libxcrypt/BUILD.bazel b/build/cross_deps/libxcrypt/BUILD.bazel
new file mode 100644
index 000000000000..d7862e9d016e
--- /dev/null
+++ b/build/cross_deps/libxcrypt/BUILD.bazel
@@ -0,0 +1,6 @@
+exports_files(
+ [
+ "BUILD.libxcrypt.bazel",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/cross_deps/libxcrypt/BUILD.libxcrypt.bazel b/build/cross_deps/libxcrypt/BUILD.libxcrypt.bazel
new file mode 100644
index 000000000000..933172eec78f
--- /dev/null
+++ b/build/cross_deps/libxcrypt/BUILD.libxcrypt.bazel
@@ -0,0 +1,60 @@
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make")
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+filegroup(
+ name = "all_srcs",
+ srcs = glob(
+ include = ["**"],
+ exclude = ["*.bazel"],
+ ),
+)
+
+selects.config_setting_group(
+ name = "disable-obsolete-api",
+ # looks like RHEL is aggressive on migrating to libxcrypt
+ # set this option if any distro is looking for "libcrypto.so.2"
+ # instead of "libcrypt.so.1" (i.e. "error while loading shared libraries: libcrypt.so.1")
+ match_any = [
+ "@kong//build/platforms/distro:rhel9",
+ "@kong//build/platforms/distro:aws2023",
+ ],
+)
+
+configure_make(
+ name = "libxcrypt",
+ configure_command = "configure",
+ configure_in_place = True,
+ configure_options = select({
+ "@kong//:aarch64-linux-anylibc-cross": [
+ "--host=aarch64-linux",
+ ],
+ "@kong//:x86_64-linux-musl-cross": [
+ "--host=x86_64-linux-musl",
+ ],
+ "//conditions:default": [],
+ }) + select({
+ ":disable-obsolete-api": [
+ "--enable-obsolete-api=no",
+ ],
+ "//conditions:default": [],
+ }),
+ lib_source = ":all_srcs",
+ # out_lib_dir = "lib",
+ out_shared_libs = select({
+ "@platforms//os:macos": [
+ "libcrypt.1.dylib",
+ ],
+ ":disable-obsolete-api": [
+ "libcrypt.so.2",
+ ],
+ "//conditions:default": [
+ "libcrypt.so.1",
+ ],
+ }),
+ targets = [
+ "-j" + KONG_VAR["NPROC"],
+ "install -j" + KONG_VAR["NPROC"],
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/cross_deps/libxcrypt/repositories.bzl b/build/cross_deps/libxcrypt/repositories.bzl
new file mode 100644
index 000000000000..f6c28d022445
--- /dev/null
+++ b/build/cross_deps/libxcrypt/repositories.bzl
@@ -0,0 +1,18 @@
+"""A module defining the third party dependency OpenResty"""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+def libxcrypt_repositories():
+ """Defines the libcrypt repository"""
+
+ # many distros starts replace glibc/libcrypt with libxcrypt
+ # thus crypt.h and libcrypt.so.1 are missing from cross tool chain
+ # ubuntu2004: 4.4.10
+ # ubuntu2204: 4.4.27
+ http_archive(
+ name = "cross_deps_libxcrypt",
+ url = "https://github.com/besser82/libxcrypt/releases/download/v4.4.27/libxcrypt-4.4.27.tar.xz",
+ sha256 = "500898e80dc0d027ddaadb5637fa2bf1baffb9ccd73cd3ab51d92ef5b8a1f420",
+ strip_prefix = "libxcrypt-4.4.27",
+ build_file = "//build/cross_deps/libxcrypt:BUILD.libxcrypt.bazel",
+ )
diff --git a/build/cross_deps/libyaml/BUILD.bazel b/build/cross_deps/libyaml/BUILD.bazel
new file mode 100644
index 000000000000..588b8759be7f
--- /dev/null
+++ b/build/cross_deps/libyaml/BUILD.bazel
@@ -0,0 +1,16 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
+
+exports_files(
+ [
+ "BUILD.libyaml.bazel",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+build_test(
+ name = "build",
+ targets = [
+ "@cross_deps_libyaml//:libyaml",
+ ],
+ visibility = ["//:__pkg__"],
+)
diff --git a/build/cross_deps/libyaml/BUILD.libyaml.bazel b/build/cross_deps/libyaml/BUILD.libyaml.bazel
new file mode 100644
index 000000000000..ad4e48560df3
--- /dev/null
+++ b/build/cross_deps/libyaml/BUILD.libyaml.bazel
@@ -0,0 +1,40 @@
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+filegroup(
+ name = "all_srcs",
+ srcs = glob(
+ include = ["**"],
+ exclude = ["*.bazel"],
+ ),
+)
+
+configure_make(
+ name = "libyaml",
+ configure_command = "configure",
+ configure_in_place = True,
+ configure_options = select({
+ "@kong//:aarch64-linux-anylibc-cross": [
+ "--host=aarch64-linux",
+ ],
+ "@kong//:x86_64-linux-musl-cross": [
+ "--host=x86_64-linux-musl",
+ ],
+ "//conditions:default": [],
+ }),
+ lib_source = ":all_srcs",
+ # out_lib_dir = "lib",
+ out_shared_libs = select({
+ "@platforms//os:macos": [
+ "libyaml-0.2.dylib",
+ ],
+ "//conditions:default": [
+ "libyaml-0.so.2",
+ ],
+ }),
+ targets = [
+ "-j" + KONG_VAR["NPROC"],
+ "install -j" + KONG_VAR["NPROC"],
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/cross_deps/libyaml/repositories.bzl b/build/cross_deps/libyaml/repositories.bzl
new file mode 100644
index 000000000000..b7b2800cf965
--- /dev/null
+++ b/build/cross_deps/libyaml/repositories.bzl
@@ -0,0 +1,15 @@
+"""A module defining the third party dependency OpenResty"""
+
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+def libyaml_repositories():
+ """Defines the libyaml repository"""
+
+ http_archive(
+ name = "cross_deps_libyaml",
+ url = "https://pyyaml.org/download/libyaml/yaml-0.2.5.tar.gz",
+ sha256 = "c642ae9b75fee120b2d96c712538bd2cf283228d2337df2cf2988e3c02678ef4",
+ strip_prefix = "yaml-0.2.5",
+ build_file = "//build/cross_deps/libyaml:BUILD.libyaml.bazel",
+ )
diff --git a/build/cross_deps/repositories.bzl b/build/cross_deps/repositories.bzl
new file mode 100644
index 000000000000..a2afddfc9e9d
--- /dev/null
+++ b/build/cross_deps/repositories.bzl
@@ -0,0 +1,8 @@
+load("//build/cross_deps/zlib:repositories.bzl", "zlib_repositories")
+load("//build/cross_deps/libyaml:repositories.bzl", "libyaml_repositories")
+load("//build/cross_deps/libxcrypt:repositories.bzl", "libxcrypt_repositories")
+
+def cross_deps_repositories():
+ zlib_repositories()
+ libyaml_repositories()
+ libxcrypt_repositories()
diff --git a/build/cross_deps/zlib/BUILD.bazel b/build/cross_deps/zlib/BUILD.bazel
new file mode 100644
index 000000000000..d650c675249b
--- /dev/null
+++ b/build/cross_deps/zlib/BUILD.bazel
@@ -0,0 +1,16 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
+
+exports_files(
+ [
+ "BUILD.zlib.bazel",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+build_test(
+ name = "build",
+ targets = [
+ "@cross_deps_zlib//:zlib",
+ ],
+ visibility = ["//:__pkg__"],
+)
diff --git a/build/cross_deps/zlib/BUILD.zlib.bazel b/build/cross_deps/zlib/BUILD.zlib.bazel
new file mode 100644
index 000000000000..a82ac6977817
--- /dev/null
+++ b/build/cross_deps/zlib/BUILD.zlib.bazel
@@ -0,0 +1,49 @@
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "cmake")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+filegroup(
+ name = "all_srcs",
+ srcs = glob(
+ include = ["**"],
+ exclude = ["*.bazel"],
+ ),
+)
+
+cmake(
+ name = "zlib",
+ build_args = [
+ "--", # <- Pass remaining options to the native tool.
+ "-j" + KONG_VAR["NPROC"],
+ ],
+ # partially from https://github.com/envoyproxy/envoy/blob/main/bazel/foreign_cc/BUILD#L546
+ cache_entries = {
+ "CMAKE_CXX_COMPILER_FORCED": "on",
+ "CMAKE_C_COMPILER_FORCED": "on",
+ "SKIP_BUILD_EXAMPLES": "on",
+ "BUILD_SHARED_LIBS": "ON",
+
+ # The following entries are for zlib-ng. Since zlib and zlib-ng are compatible source
+ # codes and CMake ignores unknown cache entries, it is fine to combine it into one
+ # dictionary.
+ #
+ # Reference: https://github.com/zlib-ng/zlib-ng#build-options.
+ "ZLIB_COMPAT": "on",
+ "ZLIB_ENABLE_TESTS": "off",
+
+ # Warning: Turning WITH_OPTIM to "on" doesn't pass ZlibCompressorImplTest.CallingChecksum.
+ "WITH_OPTIM": "on",
+ # However turning off SSE4 fixes it.
+ "WITH_SSE4": "off",
+
+ # Warning: Turning WITH_NEW_STRATEGIES to "on" doesn't pass gzip compressor fuzz test.
+ # Turning this off means falling into NO_QUICK_STRATEGY route.
+ "WITH_NEW_STRATEGIES": "off",
+
+ # Only allow aligned address.
+ # Reference: https://github.com/zlib-ng/zlib-ng#advanced-build-options.
+ "UNALIGNED_OK": "off",
+ },
+ lib_source = ":all_srcs",
+ out_shared_libs = ["libz.so.1"],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/cross_deps/zlib/repositories.bzl b/build/cross_deps/zlib/repositories.bzl
new file mode 100644
index 000000000000..3185b65222a8
--- /dev/null
+++ b/build/cross_deps/zlib/repositories.bzl
@@ -0,0 +1,18 @@
+"""A module defining the third party dependency OpenResty"""
+
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+def zlib_repositories():
+ """Defines the zlib repository"""
+
+ http_archive(
+ name = "cross_deps_zlib",
+ urls = [
+ "https://zlib.net/zlib-1.2.13.tar.gz",
+ "https://zlib.net/fossils/zlib-1.2.13.tar.gz",
+ ],
+ sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30",
+ strip_prefix = "zlib-1.2.13",
+ build_file = "//build/cross_deps/zlib:BUILD.zlib.bazel",
+ )
diff --git a/build/dockerfiles/apk.Dockerfile b/build/dockerfiles/apk.Dockerfile
new file mode 100644
index 000000000000..5dfcb2f37bc1
--- /dev/null
+++ b/build/dockerfiles/apk.Dockerfile
@@ -0,0 +1,56 @@
+ARG KONG_BASE_IMAGE=alpine:3.16
+FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE
+
+LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)"
+
+ARG KONG_VERSION
+ENV KONG_VERSION $KONG_VERSION
+
+ARG KONG_PREFIX=/usr/local/kong
+ENV KONG_PREFIX $KONG_PREFIX
+
+ARG EE_PORTS
+
+ARG TARGETARCH
+
+ARG KONG_ARTIFACT=kong.${TARGETARCH}.apk.tar.gz
+ARG KONG_ARTIFACT_PATH=
+COPY ${KONG_ARTIFACT_PATH}${KONG_ARTIFACT} /tmp/kong.apk.tar.gz
+
+RUN apk upgrade --update-cache \
+ && apk add --virtual .build-deps tar gzip \
+ && tar -C / -xzf /tmp/kong.apk.tar.gz \
+ && apk add --no-cache libstdc++ libgcc pcre perl tzdata libcap zlib zlib-dev bash yaml \
+ && adduser -u 1000 -S kong \
+ && addgroup -g 1000 -S kong \
+ && mkdir -p "${KONG_PREFIX}" \
+ && chown -R kong:0 ${KONG_PREFIX} \
+ && chown kong:0 /usr/local/bin/kong \
+ && chmod -R g=u ${KONG_PREFIX} \
+ && chown -R kong:kong /usr/local/bin/luarocks \
+ && chown -R kong:kong /usr/local/lib/lua \
+ && chown -R kong:kong /usr/local/lib/luarocks \
+ && chown -R kong:kong /usr/local/openresty \
+ && chown -R kong:kong /usr/local/etc/luarocks \
+ && chown -R kong:kong /usr/local/share/lua \
+ && rm -rf /tmp/kong.apk.tar.gz \
+ && ln -sf /usr/local/openresty/bin/resty /usr/local/bin/resty \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/luajit \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/lua \
+ && ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/local/bin/nginx \
+ && apk del .build-deps \
+ && kong version
+
+COPY build/dockerfiles/entrypoint.sh /entrypoint.sh
+
+USER kong
+
+ENTRYPOINT ["/entrypoint.sh"]
+
+EXPOSE 8000 8443 8001 8444 $EE_PORTS
+
+STOPSIGNAL SIGQUIT
+
+HEALTHCHECK --interval=60s --timeout=10s --retries=10 CMD kong health
+
+CMD ["kong", "docker-start"]
diff --git a/build/dockerfiles/deb.Dockerfile b/build/dockerfiles/deb.Dockerfile
new file mode 100644
index 000000000000..4a44abd0cd4b
--- /dev/null
+++ b/build/dockerfiles/deb.Dockerfile
@@ -0,0 +1,47 @@
+ARG KONG_BASE_IMAGE=debian:bullseye-slim
+FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE
+
+LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)"
+
+ARG KONG_VERSION
+ENV KONG_VERSION $KONG_VERSION
+
+ARG KONG_PREFIX=/usr/local/kong
+ENV KONG_PREFIX $KONG_PREFIX
+
+ARG EE_PORTS
+
+ARG TARGETARCH
+
+ARG KONG_ARTIFACT=kong.${TARGETARCH}.deb
+ARG KONG_ARTIFACT_PATH=
+COPY ${KONG_ARTIFACT_PATH}${KONG_ARTIFACT} /tmp/kong.deb
+
+RUN apt-get update \
+ && apt-get -y upgrade \
+ && apt-get -y autoremove \
+ && apt-get install -y --no-install-recommends /tmp/kong.deb \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -rf /tmp/kong.deb \
+ && chown kong:0 /usr/local/bin/kong \
+ && chown -R kong:0 ${KONG_PREFIX} \
+ && ln -sf /usr/local/openresty/bin/resty /usr/local/bin/resty \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/luajit \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/lua \
+ && ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/local/bin/nginx \
+ && ln -sf "$(find / -name "libz.*" 2>/dev/null | head -n 1)" /usr/lib/libz.so \
+ && kong version
+
+COPY build/dockerfiles/entrypoint.sh /entrypoint.sh
+
+USER kong
+
+ENTRYPOINT ["/entrypoint.sh"]
+
+EXPOSE 8000 8443 8001 8444 $EE_PORTS
+
+STOPSIGNAL SIGQUIT
+
+HEALTHCHECK --interval=60s --timeout=10s --retries=10 CMD kong health
+
+CMD ["kong", "docker-start"]
diff --git a/build/dockerfiles/entrypoint.sh b/build/dockerfiles/entrypoint.sh
new file mode 100755
index 000000000000..f4f2a499b777
--- /dev/null
+++ b/build/dockerfiles/entrypoint.sh
@@ -0,0 +1,57 @@
+#!/usr/bin/env bash
+set -Eeo pipefail
+
+# usage: file_env VAR [DEFAULT]
+# ie: file_env 'XYZ_DB_PASSWORD' 'example'
+# (will allow for "$XYZ_DB_PASSWORD_FILE" to fill in the value of
+# "$XYZ_DB_PASSWORD" from a file, especially for Docker's secrets feature)
+file_env() {
+ local var="$1"
+ local fileVar="${var}_FILE"
+ local def="${2:-}"
+ # Do not continue if _FILE env is not set
+ if ! [ "${!fileVar:-}" ]; then
+ return
+ elif [ "${!var:-}" ] && [ "${!fileVar:-}" ]; then
+ echo >&2 "error: both $var and $fileVar are set (but are exclusive)"
+ exit 1
+ fi
+ local val="$def"
+ if [ "${!var:-}" ]; then
+ val="${!var}"
+ elif [ "${!fileVar:-}" ]; then
+ val="$(< "${!fileVar}")"
+ fi
+ export "$var"="$val"
+ unset "$fileVar"
+}
+
+export KONG_NGINX_DAEMON=${KONG_NGINX_DAEMON:=off}
+
+if [[ "$1" == "kong" ]]; then
+
+ all_kong_options="/usr/local/share/lua/5.1/kong/templates/kong_defaults.lua"
+ set +Eeo pipefail
+ while IFS='' read -r LINE || [ -n "${LINE}" ]; do
+ opt=$(echo "$LINE" | grep "=" | sed "s/=.*$//" | sed "s/ //" | tr '[:lower:]' '[:upper:]')
+ file_env "KONG_$opt"
+ done < $all_kong_options
+ set -Eeo pipefail
+
+ file_env KONG_PASSWORD
+ PREFIX=${KONG_PREFIX:=/usr/local/kong}
+
+ if [[ "$2" == "docker-start" ]]; then
+ kong prepare -p "$PREFIX" "$@"
+
+ ln -sfn /dev/stdout $PREFIX/logs/access.log
+ ln -sfn /dev/stdout $PREFIX/logs/admin_access.log
+ ln -sfn /dev/stderr $PREFIX/logs/error.log
+
+ exec /usr/local/openresty/nginx/sbin/nginx \
+ -p "$PREFIX" \
+ -c nginx.conf
+ fi
+fi
+
+exec "$@"
diff --git a/build/dockerfiles/rpm.Dockerfile b/build/dockerfiles/rpm.Dockerfile
new file mode 100644
index 000000000000..51067e2aa781
--- /dev/null
+++ b/build/dockerfiles/rpm.Dockerfile
@@ -0,0 +1,60 @@
+ARG KONG_BASE_IMAGE=redhat/ubi8
+FROM --platform=$TARGETPLATFORM $KONG_BASE_IMAGE
+
+LABEL maintainer="Kong Docker Maintainers (@team-gateway-bot)"
+
+ARG KONG_VERSION
+ENV KONG_VERSION $KONG_VERSION
+
+# RedHat required labels
+LABEL name="Kong" \
+ vendor="Kong" \
+ version="$KONG_VERSION" \
+ release="1" \
+ url="https://konghq.com" \
+ summary="Next-Generation API Platform for Modern Architectures" \
+ description="Next-Generation API Platform for Modern Architectures"
+
+# RedHat required LICENSE file approved path
+COPY LICENSE /licenses/
+
+ARG PACKAGE_DISTRO=el7
+
+ARG KONG_PREFIX=/usr/local/kong
+ENV KONG_PREFIX $KONG_PREFIX
+
+ARG EE_PORTS
+
+ARG TARGETARCH
+
+ARG KONG_ARTIFACT=kong.${PACKAGE_DISTRO}.${TARGETARCH}.rpm
+ARG KONG_ARTIFACT_PATH=
+COPY ${KONG_ARTIFACT_PATH}${KONG_ARTIFACT} /tmp/kong.rpm
+
+# hadolint ignore=DL3015
+RUN yum update -y \
+ && yum install -y /tmp/kong.rpm \
+ && rm /tmp/kong.rpm \
+ && chown kong:0 /usr/local/bin/kong \
+ && chown -R kong:0 /usr/local/kong \
+ && ln -sf /usr/local/openresty/bin/resty /usr/local/bin/resty \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/luajit \
+ && ln -sf /usr/local/openresty/luajit/bin/luajit /usr/local/bin/lua \
+ && ln -sf /usr/local/openresty/nginx/sbin/nginx /usr/local/bin/nginx \
+ && ln -sf "$(find / -name "libz.*" 2>/dev/null | head -n 1)" /usr/lib/libz.so \
+ && ln -sf "$(find / -name "libz.*" 2>/dev/null | head -n 1)" /usr/lib64/libz.so \
+ && kong version
+
+COPY build/dockerfiles/entrypoint.sh /entrypoint.sh
+
+USER kong
+
+ENTRYPOINT ["/entrypoint.sh"]
+
+EXPOSE 8000 8443 8001 8444 $EE_PORTS
+
+STOPSIGNAL SIGQUIT
+
+HEALTHCHECK --interval=60s --timeout=10s --retries=10 CMD kong health
+
+CMD ["kong", "docker-start"]
diff --git a/build/kong_bindings.bzl b/build/kong_bindings.bzl
new file mode 100644
index 000000000000..f81b22301021
--- /dev/null
+++ b/build/kong_bindings.bzl
@@ -0,0 +1,108 @@
+"""
+Global varibles
+"""
+
+def _load_vars(ctx):
+ # Read env from .requirements
+ requirements = ctx.read(Label("@kong//:.requirements"))
+ content = ctx.execute(["bash", "-c", "echo '%s' | " % requirements +
+ """grep -E '^(\\w*)=(.+)$' | sed -E 's/^(.*)=([^# ]+).*$/"\\1": "\\2",/'"""]).stdout
+ content = content.replace('""', '"')
+
+ # Workspace path
+ workspace_path = "%s" % ctx.path(Label("@//:WORKSPACE")).dirname
+ content += '"WORKSPACE_PATH": "%s",\n' % workspace_path
+
+ # Local env
+ # Temporarily fix for https://github.com/bazelbuild/bazel/issues/14693#issuecomment-1079006291
+ for key in [
+ "GITHUB_TOKEN",
+ "RPM_SIGNING_KEY_FILE",
+ "NFPM_RPM_PASSPHRASE",
+ ]:
+ value = ctx.os.environ.get(key, "")
+ if value:
+ content += '"%s": "%s",\n' % (key, value)
+
+ build_name = ctx.os.environ.get("BUILD_NAME", "")
+ content += '"BUILD_NAME": "%s",\n' % build_name
+
+ build_destdir = workspace_path + "/bazel-bin/build/" + build_name
+ content += '"BUILD_DESTDIR": "%s",\n' % build_destdir
+
+ install_destdir = ctx.os.environ.get("INSTALL_DESTDIR", "MANAGED")
+ if install_destdir == "MANAGED":
+ install_destdir = build_destdir
+ content += '"INSTALL_DESTDIR": "%s",\n' % install_destdir
+
+ # Kong Version
+ # TODO: this may not change after a bazel clean if cache exists
+ kong_version = ctx.execute(["bash", "scripts/grep-kong-version.sh"], working_directory = workspace_path).stdout
+ content += '"KONG_VERSION": "%s",' % kong_version.strip()
+
+ if ctx.os.name == "mac os x":
+ nproc = ctx.execute(["sysctl", "-n", "hw.ncpu"]).stdout.strip()
+ else: # assume linux
+ nproc = ctx.execute(["nproc"]).stdout.strip()
+
+ content += '"%s": "%s",' % ("NPROC", nproc)
+
+ macos_target = ""
+ if ctx.os.name == "mac os x":
+ macos_target = ctx.execute(["sw_vers", "-productVersion"]).stdout.strip()
+ content += '"MACOSX_DEPLOYMENT_TARGET": "%s",' % macos_target
+
+ # convert them into a list of labels relative to the workspace root
+ # TODO: this may not change after a bazel clean if cache exists
+ patches = sorted([
+ '"@kong//:%s"' % str(p).replace(workspace_path, "").lstrip("/")
+ for p in ctx.path(workspace_path + "/build/openresty/patches").readdir()
+ ])
+
+ content += '"OPENRESTY_PATCHES": [%s],' % (", ".join(patches))
+
+ ctx.file("BUILD.bazel", "")
+ ctx.file("variables.bzl", "KONG_VAR = {\n" + content + "\n}")
+
+def _check_sanity(ctx):
+ if ctx.os.name == "mac os x":
+ xcode_prefix = ctx.execute(["xcode-select", "-p"]).stdout.strip()
+ if "CommandLineTools" in xcode_prefix:
+ fail("Command Line Tools is not supported, please install Xcode from App Store.\n" +
+ "If you recently installed Xcode, please run `sudo xcode-select -s /Applications/Xcode.app/Contents/Developer` to switch to Xcode,\n" +
+ "then do a `bazel clean --expunge` and try again.\n" +
+ "The following command is useful to check if Xcode is picked up by Bazel:\n" +
+ "eval `find /private/var/tmp/_bazel_*/|grep xcode-locator|head -n1`")
+
+ python = ctx.execute(["which", "python"]).stdout.strip()
+ if not python:
+ fail("rules_foreign_cc hasn't migrated to python3 on macOS yet, and your system doens't \n" +
+ "have a `python` binary. Consider create a symlink to `python3` and include in PATH:\n" +
+ "ln -s `which python3` /usr/local/bin/python\n" +
+ "export PATH=/usr/local/bin:$PATH bazel build \n")
+
+ user = ctx.os.environ.get("USER", "")
+ if "@" in user:
+ fail("Bazel uses $USER in cache and rule_foreign_cc uses `@` in its sed command.\n" +
+ "However, your username contains a `@` character, which will cause build failure.\n" +
+ "Please rerun this build with:\n" +
+ "export USER=" + user.replace("@", "_") + " bazel build ")
+
+def _load_bindings_impl(ctx):
+ _check_sanity(ctx)
+
+ _load_vars(ctx)
+
+load_bindings = repository_rule(
+ implementation = _load_bindings_impl,
+ # force "fetch"/invalidation of this repository every time it runs
+ # so that environ vars, patches and kong version is up to date
+ # see https://blog.bazel.build/2017/02/22/repository-invalidation.html
+ local = True,
+ environ = [
+ "BUILD_NAME",
+ "INSTALL_DESTDIR",
+ "RPM_SIGNING_KEY_FILE",
+ "NFPM_RPM_PASSPHRASE",
+ ],
+)
diff --git a/build/luarocks/BUILD.bazel b/build/luarocks/BUILD.bazel
new file mode 100644
index 000000000000..79168c44d857
--- /dev/null
+++ b/build/luarocks/BUILD.bazel
@@ -0,0 +1,21 @@
+load("//build:build_system.bzl", "kong_rules_group")
+
+exports_files(
+ [
+ "BUILD.luarocks.bazel",
+ "luarocks_wrap_script.lua",
+ "templates/luarocks_exec.sh",
+ "templates/luarocks_make.sh",
+ "templates/luarocks_target.sh",
+ ],
+ visibility = ["//visibility:public"],
+)
+
+kong_rules_group(
+ name = "luarocks",
+ propagates = [
+ "@luarocks//:luarocks_make",
+ "@luarocks//:luarocks_target",
+ ],
+ visibility = ["//:__pkg__"],
+)
diff --git a/build/luarocks/BUILD.luarocks.bazel b/build/luarocks/BUILD.luarocks.bazel
new file mode 100644
index 000000000000..0d7924e3befe
--- /dev/null
+++ b/build/luarocks/BUILD.luarocks.bazel
@@ -0,0 +1,110 @@
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make")
+load("@kong//build:build_system.bzl", "kong_template_genrule")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+filegroup(
+ name = "all_srcs",
+ srcs = glob(
+ include = ["**"],
+ exclude = ["*.bazel"],
+ ),
+)
+
+# This rules is used to bootstrap luarocks to install rocks dependencies
+# A different rule is used to install luarocks in the release artifact
+# so that we got correct interpreter path, lua paths, etc.
+configure_make(
+ name = "luarocks_host",
+ configure_command = "configure",
+ configure_in_place = True,
+ configure_options = [
+ "--lua-suffix=jit",
+ "--with-lua=$$EXT_BUILD_DEPS$$/luajit",
+ "--with-lua-include=$$EXT_BUILD_DEPS$$/luajit/include/luajit-2.1",
+ ],
+ lib_source = ":all_srcs",
+ out_bin_dir = "",
+ out_binaries = ["bin/luarocks"], # fake binary
+ out_data_dirs = ["luarocks"], # mark all files as data
+ targets = [
+ "build",
+ "install",
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ "@openresty//:luajit",
+ ],
+)
+
+kong_template_genrule(
+ name = "luarocks_exec",
+ srcs = [
+ "@openssl//:openssl",
+ ] + select({
+ "@kong//:any-cross": ["@cross_deps_libyaml//:libyaml"],
+ "//conditions:default": [
+ "@luarocks//:luarocks_host",
+ "@openresty//:luajit",
+ ],
+ }),
+ is_executable = True,
+ output = "luarocks_exec.sh",
+ substitutions = {
+ "{{lib_rpath}}": "%s/kong/lib" % KONG_VAR["INSTALL_DESTDIR"],
+ },
+ template = "@//build/luarocks:templates/luarocks_exec.sh",
+ tools = select({
+ "@kong//:any-cross": [
+ "@luarocks//:luarocks_host",
+ "@openresty//:luajit",
+ ],
+ "//conditions:default": [],
+ }),
+ visibility = ["//visibility:public"],
+)
+
+kong_template_genrule(
+ name = "luarocks_make",
+ srcs = [
+ "@kong//:rockspec_srcs",
+ "@luarocks//:luarocks_exec",
+ "@luarocks//:luarocks_target", # to avoid concurrency issue, run this after luarocks_target
+ ],
+ is_executable = True,
+ output = "luarocks_make.log",
+ progress_message = "Luarocks: Install Kong rocks dependencies",
+ template = "@//build/luarocks:templates/luarocks_make.sh",
+ visibility = ["//visibility:public"],
+)
+
+# install luarocks itself in target configuration
+kong_template_genrule(
+ name = "luarocks_target",
+ srcs = [":luarocks_exec"] + select({
+ "@kong//:any-cross": [],
+ "//conditions:default": [
+ "@luarocks//:luarocks_host",
+ "@openresty//:luajit",
+ ],
+ }),
+ is_executable = True,
+ output = "luarocks_target.log",
+ progress_message = "Luarocks: Install luarocks on target system",
+ substitutions = {
+ "{{build_destdir}}": KONG_VAR["BUILD_DESTDIR"],
+ "{{install_destdir}}": KONG_VAR["INSTALL_DESTDIR"],
+ "{{luarocks_version}}": KONG_VAR["LUAROCKS"],
+ "{{workspace_path}}": KONG_VAR["WORKSPACE_PATH"],
+ },
+ template = "@//build/luarocks:templates/luarocks_target.sh",
+ tools = [
+ "@//build/luarocks:luarocks_wrap_script.lua",
+ ] + select({
+ "@//:any-cross": [
+ "@luarocks//:luarocks_host",
+ "@openresty//:luajit",
+ ],
+ "//conditions:default": [],
+ }),
+ visibility = ["//visibility:public"],
+)
diff --git a/build/luarocks/luarocks_repositories.bzl b/build/luarocks/luarocks_repositories.bzl
new file mode 100644
index 000000000000..87ad74f85155
--- /dev/null
+++ b/build/luarocks/luarocks_repositories.bzl
@@ -0,0 +1,19 @@
+"""A module defining the third party dependency luarocks"""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+def luarocks_repositories():
+ version = KONG_VAR["LUAROCKS"]
+
+ maybe(
+ http_archive,
+ name = "luarocks",
+ build_file = "//build/luarocks:BUILD.luarocks.bazel",
+ strip_prefix = "luarocks-" + version,
+ sha256 = "56ab9b90f5acbc42eb7a94cf482e6c058a63e8a1effdf572b8b2a6323a06d923",
+ urls = [
+ "https://luarocks.org/releases/luarocks-" + version + ".tar.gz",
+ ],
+ )
diff --git a/build/luarocks/luarocks_wrap_script.lua b/build/luarocks/luarocks_wrap_script.lua
new file mode 100644
index 000000000000..049f93e115cf
--- /dev/null
+++ b/build/luarocks/luarocks_wrap_script.lua
@@ -0,0 +1,40 @@
+-- This software is copyright Kong Inc. and its licensors.
+-- Use of the software is subject to the agreement between your organization
+-- and Kong Inc. If there is no such agreement, use is governed by and
+-- subject to the terms of the Kong Master Software License Agreement found
+-- at https://konghq.com/enterprisesoftwarelicense/.
+-- [ END OF LICENSE 0867164ffc95e54f04670b5169c09574bdbd9bba ]
+
+local cfg = require("luarocks.core.cfg")
+assert(cfg.init())
+-- print(require("inspect")(cfg))
+
+local fs = require "luarocks.fs"
+fs.init()
+
+local queries = require("luarocks.queries")
+local search = require("luarocks.search")
+
+local name = arg[1]
+local tree = arg[2]
+local install_dest = arg[3]
+
+local query = queries.new(name, nil, nil, true)
+
+local _, ver = assert(search.pick_installed_rock(query))
+
+if install_dest:sub(-1) ~= "/" then
+ install_dest = install_dest .. "/"
+end
+-- HACK
+cfg.lua_interpreter = "luajit"
+cfg.sysconfdir = install_dest .. "etc/luarocks"
+cfg.variables["LUA_DIR"] = install_dest .. "openresty/luajit"
+cfg.variables["LUA_INCDIR"] = install_dest .. "openresty/luajit/include/luajit-2.1"
+cfg.variables["LUA_BINDIR"] = install_dest .. "openresty/luajit/bin"
+
+local wrap = fs.wrap_script
+
+wrap(
+ string.format("%s/lib/luarocks/rocks-5.1/luarocks/%s/bin/%s", tree, ver, name),
+ string.format("%s/bin/%s", tree, name), "one", name, ver)
diff --git a/build/luarocks/templates/luarocks_exec.sh b/build/luarocks/templates/luarocks_exec.sh
new file mode 100644
index 000000000000..220d92eb6a41
--- /dev/null
+++ b/build/luarocks/templates/luarocks_exec.sh
@@ -0,0 +1,83 @@
+#!/bin/bash -e
+
+# template variables starts
+openssl_path="{{@openssl//:openssl}}"
+luarocks_host_path="{{@luarocks//:luarocks_host}}"
+luajit_path="{{@openresty//:luajit}}"
+cross_deps_libyaml_path="{{@cross_deps_libyaml//:libyaml}}"
+CC={{CC}}
+LD={{LD}}
+LIB_RPATH={{lib_rpath}}
+# template variables ends
+
+root_path=$(pwd)
+
+ROCKS_DIR=$root_path/$(dirname $@)/luarocks_tree
+if [ ! -d $ROCKS_DIR ]; then
+ mkdir -p $ROCKS_DIR
+fi
+# pre create the dir and file so bsd readlink is happy
+mkdir -p "$ROCKS_DIR/../cache"
+CACHE_DIR=$(readlink -f "$ROCKS_DIR/../cache")
+touch "$ROCKS_DIR/../luarocks_config.lua"
+ROCKS_CONFIG=$(readlink -f "$ROCKS_DIR/../luarocks_config.lua")
+
+OPENSSL_DIR=$root_path/$openssl_path
+
+# we use system libyaml on macos
+if [[ "$OSTYPE" == "darwin"* ]]; then
+ YAML_DIR=$(HOME=~$(whoami) PATH=/opt/homebrew/bin:$PATH brew --prefix)/opt/libyaml
+elif [[ -d $cross_deps_libyaml_path ]]; then
+ # TODO: is there a good way to use locations but doesn't break non-cross builds?
+ YAML_DIR=$root_path/$cross_deps_libyaml_path
+else
+ YAML_DIR=/usr
+fi
+
+if [[ $CC != /* ]]; then
+ # point to our relative path of managed toolchain
+ CC=$root_path/$CC
+ LD=$root_path/$LD
+fi
+
+echo "
+rocks_trees = {
+ { name = [[system]], root = [[$ROCKS_DIR]] }
+}
+local_cache = '$CACHE_DIR'
+show_downloads = true
+gcc_rpath = false -- disable default rpath, add our own
+variables = {
+ CC = '$CC',
+ LD = '$LD',
+ LDFLAGS = '-Wl,-rpath,$LIB_RPATH',
+}
+" > $ROCKS_CONFIG
+
+LUAROCKS_HOST=$luarocks_host_path
+
+host_luajit=$root_path/$luajit_path/bin/luajit
+
+cat << EOF > $@
+LIB_RPATH=$LIB_RPATH
+LUAROCKS_HOST=$LUAROCKS_HOST
+ROCKS_DIR=$ROCKS_DIR
+CACHE_DIR=$CACHE_DIR
+ROCKS_CONFIG=$ROCKS_CONFIG
+
+export LUAROCKS_CONFIG=$ROCKS_CONFIG
+export CC=$CC
+export LD=$LD
+export EXT_BUILD_ROOT=$root_path # for musl
+
+# no idea why PATH is not preserved in ctx.actions.run_shell
+export PATH=$PATH
+
+# force the interpreter here instead of invoking luarocks directly,
+# some distros has BINPRM_BUF_SIZE smaller than the shebang generated,
+# which is usually more than 160 bytes
+$host_luajit $root_path/$LUAROCKS_HOST/bin/luarocks \$private_rocks_args \$@ \\
+ OPENSSL_DIR=$OPENSSL_DIR \\
+ CRYPTO_DIR=$OPENSSL_DIR \\
+ YAML_DIR=$YAML_DIR
+EOF
diff --git a/build/luarocks/templates/luarocks_make.sh b/build/luarocks/templates/luarocks_make.sh
new file mode 100644
index 000000000000..dc5d6105f3c2
--- /dev/null
+++ b/build/luarocks/templates/luarocks_make.sh
@@ -0,0 +1,21 @@
+#!/bin/bash -e
+
+# template variables starts
+luarocks_exec="{{@luarocks//:luarocks_exec}}"
+# template variables ends
+
+if [[ "$OSTYPE" == "darwin"* ]]; then
+ export DEVELOPER_DIR=$(xcode-select -p)
+ export SDKROOT=$(xcrun --sdk macosx --show-sdk-path)
+fi
+mkdir -p $(dirname $@)
+# lyaml needs this and doesn't honor --no-doc
+# the alternate will populate a non-existent HOME
+# env var just to let ldoc happy
+# alias LDOC command to true(1) command
+export LDOC=true
+
+$luarocks_exec make --no-doc 2>&1 >$@.tmp
+
+# only generate the output when the command succeeds
+mv $@.tmp $@
\ No newline at end of file
diff --git a/build/luarocks/templates/luarocks_target.sh b/build/luarocks/templates/luarocks_target.sh
new file mode 100644
index 000000000000..f84d52dcb4c3
--- /dev/null
+++ b/build/luarocks/templates/luarocks_target.sh
@@ -0,0 +1,59 @@
+#!/bin/bash -e
+
+# template variables starts
+workspace_path="{{workspace_path}}"
+luarocks_version="{{luarocks_version}}"
+install_destdir="{{install_destdir}}"
+build_destdir="{{build_destdir}}"
+
+luarocks_exec="{{@luarocks//:luarocks_exec}}"
+luajit_path="{{@openresty//:luajit}}"
+luarocks_host_path="{{@luarocks//:luarocks_host}}"
+luarocks_wrap_script="{{@//build/luarocks:luarocks_wrap_script.lua}}"
+# template variables ends
+
+mkdir -p $(dirname $@)
+
+
+# install luarocks
+$luarocks_exec install "luarocks $luarocks_version"
+
+# use host configuration to invoke luarocks API to wrap a correct bin/luarocks script
+rocks_tree=$workspace_path/$(dirname $luarocks_exec)/luarocks_tree
+host_luajit=$workspace_path/$luajit_path/bin/luajit
+
+host_luarocks_tree=$luarocks_host_path
+export LUA_PATH="$build_destdir/share/lua/5.1/?.lua;$build_destdir/share/lua/5.1/?/init.lua;$host_luarocks_tree/share/lua/5.1/?.lua;$host_luarocks_tree/share/lua/5.1/?/init.lua;;"
+
+ROCKS_CONFIG="luarocks_make_config.lua"
+cat << EOF > $ROCKS_CONFIG
+rocks_trees = {
+ { name = [[system]], root = [[$rocks_tree]] }
+}
+EOF
+export LUAROCKS_CONFIG=$ROCKS_CONFIG
+
+$host_luajit $luarocks_wrap_script \
+ luarocks $rocks_tree $install_destdir 2>&1 > $@.tmp
+
+# write the luarocks config with host configuration
+mkdir -p $rocks_tree/etc/luarocks
+cat << EOF > $rocks_tree/etc/luarocks/config-5.1.lua
+-- LuaRocks configuration
+rocks_trees = {
+ { name = "user", root = home .. "/.luarocks" };
+ { name = "system", root = "$install_destdir" };
+ }
+ lua_interpreter = "luajit";
+ variables = {
+ LUA_DIR = "$install_destdir/openresty/luajit";
+ LUA_INCDIR = "$install_destdir/openresty/luajit/include/luajit-2.1";
+ LUA_BINDIR = "$install_destdir/openresty/luajit/bin";
+}
+EOF
+
+# TODO: this still doesn't work
+sed -i -e "s|$rocks_tree|$install_destdir|g" $rocks_tree/bin/luarocks
+
+# only generate the output when the command succeeds
+mv $@.tmp $@
\ No newline at end of file
diff --git a/build/nfpm/BUILD.bazel b/build/nfpm/BUILD.bazel
new file mode 100644
index 000000000000..d70ebc0efe10
--- /dev/null
+++ b/build/nfpm/BUILD.bazel
@@ -0,0 +1,5 @@
+filegroup(
+ name = "all_srcs",
+ srcs = glob(["**"]),
+ visibility = ["//visibility:public"],
+)
diff --git a/build/nfpm/repositories.bzl b/build/nfpm/repositories.bzl
new file mode 100644
index 000000000000..3f4f1a4e974d
--- /dev/null
+++ b/build/nfpm/repositories.bzl
@@ -0,0 +1,55 @@
+"""A module defining the third party dependency OpenResty"""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+def _nfpm_release_select_impl(ctx):
+ if ctx.attr.build_file:
+ ctx.file("BUILD.bazel", ctx.read(ctx.attr.build_file))
+ elif ctx.attr.build_file_content:
+ ctx.file("BUILD.bazel", ctx.attr.build_file_content)
+
+ os_name = ctx.os.name
+ os_arch = ctx.os.arch
+
+ if os_arch == "aarch64":
+ os_arch = "arm64"
+ elif os_arch == "amd64":
+ os_arch = "x86_64"
+ else:
+ fail("Unsupported arch %s" % os_arch)
+
+ if os_name == "mac os x":
+ os_name = "Darwin"
+ elif os_name != "linux":
+ fail("Unsupported OS %s" % os_name)
+
+ nfpm_bin = "%s" % ctx.path(Label("@nfpm_%s_%s//:nfpm" % (os_name, os_arch)))
+ ctx.symlink(nfpm_bin, "nfpm")
+
+nfpm_release_select = repository_rule(
+ implementation = _nfpm_release_select_impl,
+ attrs = {
+ "build_file": attr.label(allow_single_file = True),
+ "build_file_content": attr.string(),
+ },
+)
+
+def nfpm_repositories():
+ gh_matrix = [
+ ["linux", "x86_64", "4c63031ddbef198e21c8561c438dde4c93c3457ffdc868d7d28fa670e0cc14e5"],
+ ["linux", "arm64", "2af1717cc9d5dcad5a7e42301dabc538acf5d12ce9ee39956c66f30215311069"],
+ ["Darwin", "x86_64", "fb3b8ab5595117f621c69cc51db71d481fbe733fa3c35500e1b64319dc8fd5b4"],
+ ["Darwin", "arm64", "9ca3ac6e0c4139a9de214f78040d1d11dd221496471696cc8ab5d357850ccc54"],
+ ]
+ for name, arch, sha in gh_matrix:
+ http_archive(
+ name = "nfpm_%s_%s" % (name, arch),
+ url = "https://github.com/goreleaser/nfpm/releases/download/v2.23.0/nfpm_2.23.0_%s_%s.tar.gz" % (name, arch),
+ sha256 = sha,
+ build_file = "//build/nfpm:BUILD.bazel",
+ )
+
+ nfpm_release_select(
+ name = "nfpm",
+ build_file = "//build/nfpm:BUILD.bazel",
+ )
diff --git a/build/nfpm/rules.bzl b/build/nfpm/rules.bzl
new file mode 100644
index 000000000000..23fe413d224d
--- /dev/null
+++ b/build/nfpm/rules.bzl
@@ -0,0 +1,90 @@
+"""
+NFPM package rule.
+"""
+
+load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+def _nfpm_pkg_impl(ctx):
+ env = dicts.add(ctx.attr.env, ctx.attr.extra_env, KONG_VAR, ctx.configuration.default_shell_env)
+
+ target_cpu = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo].cpu
+ if target_cpu == "k8" or target_cpu == "x86_64" or target_cpu == "amd64":
+ target_arch = "amd64"
+ else:
+ fail("Unsupported platform cpu: %s" % target_cpu)
+ env["ARCH"] = target_arch
+
+ # XXX: remove the "env" from KONG_VAR which is a list
+ env["OPENRESTY_PATCHES"] = ""
+
+ pkg_ext = ctx.attr.packager
+ if pkg_ext == "apk":
+ pkg_ext = "apk.tar.gz"
+
+ # create like kong.amd64.deb
+ out = ctx.actions.declare_file("%s/%s.%s.%s" % (
+ ctx.attr.out_dir,
+ ctx.attr.pkg_name,
+ target_arch,
+ pkg_ext,
+ ))
+
+ nfpm_args = ctx.actions.args()
+ nfpm_args.add("pkg")
+ nfpm_args.add("-f", ctx.file.config.path)
+ nfpm_args.add("-p", ctx.attr.packager)
+ nfpm_args.add("-t", out.path)
+
+ ctx.actions.run_shell(
+ inputs = ctx.files._nfpm_bin,
+ mnemonic = "nFPM",
+ command = "ln -sf %s nfpm-prefix; external/nfpm/nfpm $@" % KONG_VAR["BUILD_DESTDIR"],
+ arguments = [nfpm_args],
+ outputs = [out],
+ env = env,
+ )
+
+ # TODO: fix runfiles so that it can used as a dep
+ return [DefaultInfo(files = depset([out]), runfiles = ctx.runfiles(files = ctx.files.config + ctx.files.deps))]
+
+nfpm_pkg = rule(
+ _nfpm_pkg_impl,
+ attrs = {
+ "config": attr.label(
+ mandatory = True,
+ allow_single_file = True,
+ doc = "nFPM configuration file.",
+ ),
+ "packager": attr.string(
+ mandatory = True,
+ doc = "Packager name.",
+ ),
+ "env": attr.string_dict(
+ doc = "Environment variables to set when running nFPM.",
+ ),
+ "extra_env": attr.string_dict(
+ # https://github.com/bazelbuild/bazel/issues/12457
+ doc = "Additional environment variables to set when running nFPM. This is a workaround since Bazel doesn't support union operator for select yet.",
+ ),
+ "pkg_name": attr.string(
+ mandatory = True,
+ doc = "Output package name.",
+ ),
+ "out_dir": attr.string(
+ doc = "Output directory name.",
+ default = "pkg",
+ ),
+ "deps": attr.label_list(
+ mandatory = False,
+ doc = "extra deps should exist in the package",
+ ),
+ # hidden attributes
+ "_nfpm_bin": attr.label(
+ default = "@nfpm//:all_srcs",
+ ),
+ "_cc_toolchain": attr.label(
+ default = "@bazel_tools//tools/cpp:current_cc_toolchain",
+ ),
+ },
+)
diff --git a/build/openresty/BUILD.bazel b/build/openresty/BUILD.bazel
new file mode 100644
index 000000000000..c527359e1f08
--- /dev/null
+++ b/build/openresty/BUILD.bazel
@@ -0,0 +1,6 @@
+exports_files(
+ [
+ "BUILD.openresty.bazel",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/build/openresty/BUILD.openresty.bazel b/build/openresty/BUILD.openresty.bazel
new file mode 100644
index 000000000000..cc4107cabc7c
--- /dev/null
+++ b/build/openresty/BUILD.openresty.bazel
@@ -0,0 +1,262 @@
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make", "make")
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+load("@openresty_binding//:variables.bzl", "LUAJIT_VERSION")
+
+filegroup(
+ name = "luajit_srcs",
+ srcs = glob(
+ include = ["bundle/LuaJIT*/**"],
+ ),
+)
+
+genrule(
+ name = "luajit_xcflags",
+ outs = ["luajit_xcflags.txt"],
+ cmd = "macos=" + select({
+ "@platforms//os:macos": "1",
+ "//conditions:default": "0",
+ }) + "\n" +
+ "aarch64=" + select({
+ "@platforms//cpu:aarch64": "1",
+ "//conditions:default": "0",
+ }) + "\n" +
+ "debug=" + select({
+ "@kong//:debug_flag": "1",
+ "//conditions:default": "0",
+ }) + "\n" +
+ "cross=" + select({
+ "@kong//:any-cross": "1",
+ "//conditions:default": "0",
+ }) +
+ """
+ flags="-DLUAJIT_ENABLE_LUA52COMPAT -DLUAJIT_VERSION=\\\\\\"{luajit_version}\\\\\\""
+ if [[ $$debug -eq 1 ]]; then
+ flags="$$flags -DLUA_USE_ASSERT -DLUA_USE_APICHECK"
+ if [[ $$macos -ne 1 ]]; then
+ if [[ $$cross -ne 1 ]]; then
+ flags="$$flags -DLUA_USE_VALGRIND"
+ fi
+ if [[ $$aarch64 -ne 1 ]]; then
+ flags="$$flags -DLUAJIT_USE_SYSMALLOC"
+ fi
+ fi
+ fi
+
+ if [[ $$macos -eq 1 ]]; then
+ flags="$$flags -fno-stack-check"
+ fi
+
+ echo "$$flags" >$@
+
+ """.format(luajit_version = LUAJIT_VERSION),
+ toolchains = [
+ "@bazel_tools//tools/cpp:current_cc_toolchain",
+ ],
+)
+
+make(
+ name = "luajit",
+ args = [
+ "LDFLAGS=\"-Wl,-rpath,%s/kong/lib\"" % (
+ KONG_VAR["INSTALL_DESTDIR"],
+ ), # make ffi.load happy, even when it's invoked without nginx
+ "XCFLAGS=\"$(cat $$EXT_BUILD_ROOT$$/$(execpath :luajit_xcflags))\"",
+ "LUA_ROOT=%s/openresty/luajit" % KONG_VAR["INSTALL_DESTDIR"].rstrip("/"),
+ "MACOSX_DEPLOYMENT_TARGET=" + KONG_VAR["MACOSX_DEPLOYMENT_TARGET"],
+ ] + select({
+ "@kong//:any-cross": [
+ "HOST_CC=cc",
+ ],
+ "@platforms//os:macos": [
+ "AR=/usr/bin/ar",
+ ],
+ "//conditions:default": [
+ ],
+ }),
+ data = [
+ ":luajit_xcflags",
+ ],
+ lib_source = ":luajit_srcs",
+ out_binaries = [
+ "luajit",
+ ],
+ out_shared_libs = select({
+ "@platforms//os:macos": [
+ "libluajit-5.1.2.dylib",
+ ],
+ "//conditions:default": [
+ "libluajit-5.1.so.2",
+ ],
+ }),
+ targets = [
+ "-j" + KONG_VAR["NPROC"],
+ "install",
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ ],
+)
+
+selects.config_setting_group(
+ name = "nogroup-name-as-nobody",
+ match_any = [
+ "@kong//build/platforms/distro:rhel9",
+ "@kong//build/platforms/distro:rhel8",
+ "@kong//build/platforms/distro:aws2023",
+ "@kong//build/platforms/distro:aws2",
+ ],
+)
+
+selects.config_setting_group(
+ name = "needs-xcrypt2",
+ match_any = [
+ "@kong//build/platforms/distro:generic",
+ "@kong//build/platforms/distro:rhel9",
+ "@kong//build/platforms/distro:aws2023",
+ ],
+)
+
+CONFIGURE_OPTIONS = [
+ "--with-pcre-jit",
+ "--with-http_ssl_module",
+ "--with-http_sub_module",
+ "--with-http_realip_module",
+ "--with-http_stub_status_module",
+ "--with-http_v2_module",
+ "--with-stream_realip_module", # >= 1.11.4
+ "--with-stream_ssl_preread_module", # >= 1.11.5
+ "--without-http_encrypted_session_module",
+ "--with-luajit=$$EXT_BUILD_DEPS$$/luajit",
+ "--with-cc-opt=\"-I$$EXT_BUILD_DEPS$$/pcre/include\"",
+ "--with-cc-opt=\"-I$$EXT_BUILD_DEPS$$/luajit/include\"",
+ "--with-ld-opt=\"-L$$EXT_BUILD_DEPS$$/pcre/lib\"",
+ "--with-ld-opt=\"-L$$EXT_BUILD_DEPS$$/luajit/lib\"",
+ "--with-cc-opt=\"-I$$EXT_BUILD_DEPS$$/openssl/include\"",
+ "--with-ld-opt=\"-L$$EXT_BUILD_DEPS$$/openssl/lib\"",
+ # here let's try not having --disable-new-dtags; --disable-new-dtags creates runpath instead of rpath
+ # note runpath can't handle indirect dependency (nginx -> luajit -> dlopen("other")), so each indirect
+ # dependency should have its rpath set (luajit, libxslt etc); on the other side, rpath is not
+ # overridable by LD_LIBRARY_PATH and it may cause trouble debugging, so we _should_ prefer runpath.
+ # if it doesn't work, then add --disable-new-dtags back
+ "--with-ld-opt=\"-Wl,-rpath,%s/kong/lib\"" % KONG_VAR["INSTALL_DESTDIR"],
+ "-j%s" % KONG_VAR["NPROC"],
+
+ # options from our customed patch
+ "--with-install-prefix=%s" % KONG_VAR["INSTALL_DESTDIR"],
+
+ # Note $$EXT_BUILD_ROOT$$ is bazel variable not from environment variable
+ # which points to the directory of current WORKSPACE
+
+ # external modules
+ "--add-module=$$EXT_BUILD_ROOT$$/external/lua-kong-nginx-module",
+ "--add-module=$$EXT_BUILD_ROOT$$/external/lua-kong-nginx-module/stream",
+] + select({
+ "@kong//:aarch64-linux-anylibc-cross": [
+ "--crossbuild=Linux:aarch64",
+ "--with-endian=little",
+ "--with-int=4",
+ "--with-long=8",
+ "--with-long-long=8",
+ "--with-ptr-size=8",
+ "--with-sig-atomic-t=4",
+ "--with-size-t=8",
+ "--with-off-t=8",
+ "--with-time-t=8",
+ "--with-sys-nerr=132",
+ ],
+ "@kong//:x86_64-linux-musl-cross": [
+ "--crossbuild=Linux:x86_64",
+ "--with-endian=little",
+ "--with-int=4",
+ "--with-long=8",
+ "--with-long-long=8",
+ "--with-ptr-size=8",
+ "--with-sig-atomic-t=4",
+ "--with-size-t=8",
+ "--with-off-t=8",
+ "--with-time-t=8",
+ "--with-sys-nerr=132",
+ ],
+ "//conditions:default": [],
+}) + select({
+ "@kong//:any-cross": [
+ "--with-cc-opt=\"-I$$EXT_BUILD_DEPS$$/zlib/include\"",
+ "--with-ld-opt=\"-L$$EXT_BUILD_DEPS$$/zlib/lib\"",
+ ],
+ "//conditions:default": [],
+}) + select({
+ # any cross build that migrated to use libxcrypt needs those flags
+ # alpine uses different libc so doesn't need it
+ "@kong//:aarch64-linux-anylibc-cross": [
+ "--with-cc-opt=\"-I$$EXT_BUILD_DEPS$$/libxcrypt/include\"",
+ "--with-ld-opt=\"-L$$EXT_BUILD_DEPS$$/libxcrypt/lib\"",
+ ],
+ "//conditions:default": [],
+}) + select({
+ "@kong//:debug_flag": [
+ "--with-debug",
+ "--with-no-pool-patch",
+ "--with-cc-opt=\"-DNGX_LUA_USE_ASSERT -DNGX_LUA_ABORT_AT_PANIC -O0\"",
+ ],
+ "//conditions:default": [],
+}) + select({
+ # some distros name "nogroup" group name as "nobody"
+ ":nogroup-name-as-nobody": [
+ "--group=nobody",
+ ],
+ "//conditions:default": [],
+})
+
+# TODO: set prefix to populate pid_path, conf_path, log_path etc
+
+filegroup(
+ name = "all_srcs",
+ srcs = glob(
+ include = [
+ "configure",
+ "bundle/**",
+ ],
+ exclude = [
+ "bundle/LuaJIT*/**",
+ ],
+ ),
+)
+
+configure_make(
+ name = "openresty",
+ build_data = [
+ "@lua-kong-nginx-module//:all_srcs",
+ "@openresty_binding//:all_srcs",
+ ],
+ configure_command = "configure",
+ configure_in_place = True,
+ configure_options = CONFIGURE_OPTIONS,
+ lib_source = ":all_srcs",
+ out_bin_dir = "",
+ out_binaries = [
+ "nginx/sbin/nginx",
+ ],
+ targets = [
+ "-j" + KONG_VAR["NPROC"],
+ "install -j" + KONG_VAR["NPROC"],
+ ],
+ visibility = ["//visibility:public"],
+ deps = [
+ "@pcre",
+ "@openssl",
+ "@openresty//:luajit",
+ ] + select({
+ "@kong//:any-cross": [
+ "@cross_deps_zlib//:zlib",
+ ],
+ "//conditions:default": [],
+ }) + select({
+ # any cross build that migrated to use libxcrypt needs those flags
+ # alpine uses different libc so doesn't need it
+ ":needs-xcrypt2": [
+ "@cross_deps_libxcrypt//:libxcrypt",
+ ],
+ "//conditions:default": [],
+ }),
+)
diff --git a/build/openresty/atc_router/BUILD.bazel b/build/openresty/atc_router/BUILD.bazel
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/build/openresty/lua-resty-lmdb-cross.patch b/build/openresty/lua-resty-lmdb-cross.patch
new file mode 100644
index 000000000000..d1bf0820f57f
--- /dev/null
+++ b/build/openresty/lua-resty-lmdb-cross.patch
@@ -0,0 +1,51 @@
+lua-resty-lmdb is an external repository, previous artifact may carry
+thus we always clean here
+
+diff --git a/config b/config
+index 126c78c..1f0b2aa 100644
+--- a/config
++++ b/config
+@@ -5,6 +5,8 @@ ngx_module_incs="$ngx_addon_dir/lmdb/libraries/liblmdb $ngx_addon_dir/src"
+
+ . auto/module
+
++rm -f $ngx_addon_dir/lmdb/libraries/liblmdb/liblmdb.a
++
+ LINK_DEPS="$LINK_DEPS $ngx_addon_dir/lmdb/libraries/liblmdb/liblmdb.a"
+ CORE_LIBS="$CORE_LIBS $ngx_addon_dir/lmdb/libraries/liblmdb/liblmdb.a"
+
+diff --git a/config.make b/config.make
+index 14d8cc2..cf17251 100644
+--- a/config.make
++++ b/config.make
+@@ -3,7 +3,7 @@ cat <>$NGX_MAKEFILE
+
+ $ngx_addon_dir/lmdb/libraries/liblmdb/liblmdb.a:
+ echo "Building liblmdb"; \\
+- \$(MAKE) -C $ngx_addon_dir/lmdb/libraries/liblmdb; \\
++ \$(MAKE) -C $ngx_addon_dir/lmdb/libraries/liblmdb CC=\$(CC) AR=\$(AR); \\
+ echo "Finished building liblmdb"
+
+ EOF
+diff --git a/libraries/liblmdb/Makefile b/libraries/liblmdb/Makefile
+index c252b50..1054432 100644
+--- a/lmdb/libraries/liblmdb/Makefile
++++ b/lmdb/libraries/liblmdb/Makefile
+@@ -18,13 +18,13 @@
+ # There may be other macros in mdb.c of interest. You should
+ # read mdb.c before changing any of them.
+ #
+-CC = gcc
+-AR = ar
++CC ?= gcc
++AR ?= ar
+ W = -W -Wall -Wno-unused-parameter -Wbad-function-cast -Wuninitialized
+ THREADS = -pthread
+ OPT = -O2 -g
+-CFLAGS = $(THREADS) $(OPT) $(W) $(XCFLAGS)
+-LDFLAGS = $(THREADS)
++CFLAGS += $(THREADS) $(OPT) $(W) $(XCFLAGS)
++LDFLAGS += $(THREADS)
+ LDLIBS =
+ SOLIBS =
+ SOEXT = .so
diff --git a/build/openresty/openssl/BUILD.bazel b/build/openresty/openssl/BUILD.bazel
new file mode 100644
index 000000000000..5970c67b2335
--- /dev/null
+++ b/build/openresty/openssl/BUILD.bazel
@@ -0,0 +1,5 @@
+load("@kong//build/openresty/openssl:openssl.bzl", "build_openssl")
+
+build_openssl(
+ name = "openssl",
+)
diff --git a/build/openresty/openssl/README.md b/build/openresty/openssl/README.md
new file mode 100644
index 000000000000..8cc90c255fa4
--- /dev/null
+++ b/build/openresty/openssl/README.md
@@ -0,0 +1,10 @@
+This target is modified from https://github.com/bazelbuild/rules_foreign_cc/tree/main/examples/third_party
+with following changes:
+
+- Read version from requirements.txt
+- Updated `build_file` to new path under //build/openresty
+- Remove Windows build support
+- Removed the bazel mirror as it's missing latest versions
+- Remove runnable test for now until cross compile has been sorted out
+- Use system Perl for now
+- Updated to be reusable
\ No newline at end of file
diff --git a/build/openresty/openssl/openssl.bzl b/build/openresty/openssl/openssl.bzl
new file mode 100644
index 000000000000..62aa72f34dc8
--- /dev/null
+++ b/build/openresty/openssl/openssl.bzl
@@ -0,0 +1,82 @@
+"""An openssl build file based on a snippet found in the github issue:
+https://github.com/bazelbuild/rules_foreign_cc/issues/337
+
+Note that the $(PERL) "make variable" (https://docs.bazel.build/versions/main/be/make-variables.html)
+is populated by the perl toolchain provided by rules_perl.
+"""
+
+load("@rules_foreign_cc//foreign_cc:defs.bzl", "configure_make")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+# Read https://wiki.openssl.org/index.php/Compilation_and_Installation
+
+CONFIGURE_OPTIONS = select({
+ "@kong//:aarch64-linux-anylibc-cross": [
+ "linux-aarch64",
+ ],
+ # no extra args needed for "@kong//:x86_64-linux-musl-cross" or non-cross builds
+ "//conditions:default": [],
+}) + [
+ "-g",
+ "-O3", # force -O3 even we are using --debug (for example on CI)
+ "shared",
+ "-DPURIFY",
+ "no-threads",
+ "no-tests",
+ "--prefix=%s/kong" % KONG_VAR["INSTALL_DESTDIR"],
+ "--openssldir=%s/kong" % KONG_VAR["INSTALL_DESTDIR"],
+ "--libdir=lib", # force lib instead of lib64 (multilib postfix)
+ "-Wl,-rpath,%s/kong/lib" % KONG_VAR["INSTALL_DESTDIR"],
+] + select({
+ "@kong//:debug_flag": ["--debug"],
+ "//conditions:default": [],
+})
+
+def build_openssl(
+ name = "openssl"):
+ extra_make_targets = []
+ extra_configure_options = []
+
+ native.filegroup(
+ name = name + "-all_srcs",
+ srcs = native.glob(
+ include = ["**"],
+ exclude = ["*.bazel"],
+ ),
+ )
+
+ configure_make(
+ name = name,
+ configure_command = "config",
+ configure_in_place = True,
+ configure_options = CONFIGURE_OPTIONS + extra_configure_options,
+ env = select({
+ "@platforms//os:macos": {
+ "AR": "/usr/bin/ar",
+ },
+ "//conditions:default": {},
+ }),
+ lib_source = ":%s-all_srcs" % name,
+ out_binaries = ["openssl"],
+ # Note that for Linux builds, libssl must come before libcrypto on the linker command-line.
+ # As such, libssl must be listed before libcrypto
+ out_shared_libs = select({
+ "@platforms//os:macos": [
+ "libssl.1.1.dylib",
+ "libcrypto.1.1.dylib",
+ ],
+ "//conditions:default": [
+ "libssl.so.1.1",
+ "libcrypto.so.1.1",
+ ],
+ }),
+ targets = [
+ "-j" + KONG_VAR["NPROC"],
+ # don't set the prefix by --prefix switch, but only override the install destdir using INSTALLTOP
+ # while install. this makes both bazel and openssl (build time generated) paths happy.
+ "install_sw INSTALLTOP=$BUILD_TMPDIR/$INSTALL_PREFIX",
+ ] + extra_make_targets,
+ # TODO: uncomment this to allow bazel build a perl if not installed on system
+ # toolchains = ["@rules_perl//:current_toolchain"],
+ visibility = ["//visibility:public"],
+ )
diff --git a/build/openresty/openssl/openssl_repositories.bzl b/build/openresty/openssl/openssl_repositories.bzl
new file mode 100644
index 000000000000..c549b59fb1be
--- /dev/null
+++ b/build/openresty/openssl/openssl_repositories.bzl
@@ -0,0 +1,21 @@
+"""A module defining the third party dependency OpenSSL"""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("@kong_bindings//:variables.bzl", "KONG_VAR")
+
+def openssl_repositories():
+ version = KONG_VAR["OPENSSL"]
+ version_github = version.replace(".", "_")
+
+ maybe(
+ http_archive,
+ name = "openssl",
+ build_file = "//build/openresty/openssl:BUILD.bazel",
+ sha256 = "9384a2b0570dd80358841464677115df785edb941c71211f75076d72fe6b438f",
+ strip_prefix = "openssl-" + version,
+ urls = [
+ "https://www.openssl.org/source/openssl-" + version + ".tar.gz",
+ "https://github.com/openssl/openssl/archive/OpenSSL_" + version_github + ".tar.gz",
+ ],
+ )
diff --git a/build/openresty/patches/LuaJIT-2.1-20210510_01-ffi-arm64-macos-fix-vararg-call-handling.patch b/build/openresty/patches/LuaJIT-2.1-20210510_01-ffi-arm64-macos-fix-vararg-call-handling.patch
new file mode 100644
index 000000000000..9047d7c86d1a
--- /dev/null
+++ b/build/openresty/patches/LuaJIT-2.1-20210510_01-ffi-arm64-macos-fix-vararg-call-handling.patch
@@ -0,0 +1,62 @@
+From 521b367567dc5d91d7f9ae29c257998953e24e53 Mon Sep 17 00:00:00 2001
+From: Mike Pall
+Date: Sun, 2 May 2021 22:11:05 +0200
+Subject: [PATCH] FFI/ARM64/OSX: Fix vararg call handling.
+
+Thanks to Igor Munkin.
+---
+ LuaJIT-2.1-20210510/src/lj_ccall.c | 8 ++++----
+ LuaJIT-2.1-20210510/src/lj_ccallback.c | 2 +-
+ 2 files changed, 5 insertions(+), 5 deletions(-)
+
+diff --git a/LuaJIT-2.1-20210510/src/lj_ccall.c b/LuaJIT-2.1-20210510/src/lj_ccall.c
+index a91ffc7e..3c029823 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_ccall.c
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_ccall.c
+@@ -334,7 +334,7 @@
+ isfp = sz == 2*sizeof(float) ? 2 : 1;
+
+ #define CCALL_HANDLE_REGARG \
+- if (LJ_TARGET_IOS && isva) { \
++ if (LJ_TARGET_OSX && isva) { \
+ /* IOS: All variadic arguments are on the stack. */ \
+ } else if (isfp) { /* Try to pass argument in FPRs. */ \
+ int n2 = ctype_isvector(d->info) ? 1 : \
+@@ -345,10 +345,10 @@
+ goto done; \
+ } else { \
+ nfpr = CCALL_NARG_FPR; /* Prevent reordering. */ \
+- if (LJ_TARGET_IOS && d->size < 8) goto err_nyi; \
++ if (LJ_TARGET_OSX && d->size < 8) goto err_nyi; \
+ } \
+ } else { /* Try to pass argument in GPRs. */ \
+- if (!LJ_TARGET_IOS && (d->info & CTF_ALIGN) > CTALIGN_PTR) \
++ if (!LJ_TARGET_OSX && (d->info & CTF_ALIGN) > CTALIGN_PTR) \
+ ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
+ if (ngpr + n <= maxgpr) { \
+ dp = &cc->gpr[ngpr]; \
+@@ -356,7 +356,7 @@
+ goto done; \
+ } else { \
+ ngpr = maxgpr; /* Prevent reordering. */ \
+- if (LJ_TARGET_IOS && d->size < 8) goto err_nyi; \
++ if (LJ_TARGET_OSX && d->size < 8) goto err_nyi; \
+ } \
+ }
+
+diff --git a/LuaJIT-2.1-20210510/src/lj_ccallback.c b/LuaJIT-2.1-20210510/src/lj_ccallback.c
+index 8d6cb737..80d738c6 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_ccallback.c
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_ccallback.c
+@@ -460,7 +460,7 @@ void lj_ccallback_mcode_free(CTState *cts)
+ nfpr = CCALL_NARG_FPR; /* Prevent reordering. */ \
+ } \
+ } else { \
+- if (!LJ_TARGET_IOS && n > 1) \
++ if (!LJ_TARGET_OSX && n > 1) \
+ ngpr = (ngpr + 1u) & ~1u; /* Align to regpair. */ \
+ if (ngpr + n <= maxgpr) { \
+ sp = &cts->cb.gpr[ngpr]; \
+--
+2.34.1
+
diff --git a/build/openresty/patches/LuaJIT-2.1-20210510_02-arm64-fix-pcall-error-case.patch b/build/openresty/patches/LuaJIT-2.1-20210510_02-arm64-fix-pcall-error-case.patch
new file mode 100644
index 000000000000..37934cee30d6
--- /dev/null
+++ b/build/openresty/patches/LuaJIT-2.1-20210510_02-arm64-fix-pcall-error-case.patch
@@ -0,0 +1,29 @@
+From b4b2dce9fc3ffaaaede39b36d06415311e2aa516 Mon Sep 17 00:00:00 2001
+From: Mike Pall
+Date: Wed, 27 Oct 2021 21:56:07 +0200
+Subject: [PATCH] ARM64: Fix pcall() error case.
+
+Reported by Alex Orlenko.
+---
+ src/vm_arm64.dasc | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/LuaJIT-2.1-20210510/src/vm_arm64.dasc b/LuaJIT-2.1-20210510/src/vm_arm64.dasc
+index c7090ca3..eb87857f 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/vm_arm64.dasc
++++ b/bundle/LuaJIT-2.1-20210510/src/vm_arm64.dasc
+@@ -1163,9 +1163,10 @@ static void build_subroutines(BuildCtx *ctx)
+ |//-- Base library: catch errors ----------------------------------------
+ |
+ |.ffunc pcall
++ | cmp NARGS8:RC, #8
+ | ldrb TMP0w, GL->hookmask
+- | subs NARGS8:RC, NARGS8:RC, #8
+ | blo ->fff_fallback
++ | sub NARGS8:RC, NARGS8:RC, #8
+ | mov RB, BASE
+ | add BASE, BASE, #16
+ | ubfx TMP0w, TMP0w, #HOOK_ACTIVE_SHIFT, #1
+--
+2.34.1
+
diff --git a/build/openresty/patches/LuaJIT-2.1-20210510_04_pass_cc_env.patch b/build/openresty/patches/LuaJIT-2.1-20210510_04_pass_cc_env.patch
new file mode 100644
index 000000000000..afe165ab78a1
--- /dev/null
+++ b/build/openresty/patches/LuaJIT-2.1-20210510_04_pass_cc_env.patch
@@ -0,0 +1,40 @@
+diff --git a/bundle/LuaJIT-2.1-20210510/src/Makefile b/bundle/LuaJIT-2.1-20210510/src/Makefile
+index 47a21c9..c60b94e 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/Makefile
++++ b/bundle/LuaJIT-2.1-20210510/src/Makefile
+@@ -27,7 +27,8 @@ NODOTABIVER= 51
+ DEFAULT_CC = gcc
+ #
+ # LuaJIT builds as a native 32 or 64 bit binary by default.
+-CC= $(DEFAULT_CC)
++CC?= $(DEFAULT_CC)
++AR?= ar
+ #
+ # Use this if you want to force a 32 bit build on a 64 bit multilib OS.
+ #CC= $(DEFAULT_CC) -m32
+@@ -211,7 +212,7 @@ TARGET_CC= $(STATIC_CC)
+ TARGET_STCC= $(STATIC_CC)
+ TARGET_DYNCC= $(DYNAMIC_CC)
+ TARGET_LD= $(CROSS)$(CC)
+-TARGET_AR= $(CROSS)ar rcus 2>/dev/null
++TARGET_AR= $(CROSS)$(AR) rcus 2>/dev/null
+ TARGET_STRIP= $(CROSS)strip
+
+ TARGET_LIBPATH= $(or $(PREFIX),/usr/local)/$(or $(MULTILIB),lib)
+@@ -291,11 +292,11 @@ TARGET_XCFLAGS+= $(CCOPT_$(TARGET_LJARCH))
+ TARGET_ARCH+= $(patsubst %,-DLUAJIT_TARGET=LUAJIT_ARCH_%,$(TARGET_LJARCH))
+
+ ifneq (,$(PREFIX))
+-ifneq (/usr/local,$(PREFIX))
+- TARGET_XCFLAGS+= -DLUA_ROOT=\"$(PREFIX)\"
+- ifneq (/usr,$(PREFIX))
+- TARGET_DYNXLDOPTS= -Wl,-rpath,$(TARGET_LIBPATH)
+- endif
++ifneq (/usr/local,$(LUA_ROOT))
++ TARGET_XCFLAGS+= -DLUA_ROOT=\"$(LUA_ROOT)\"
++endif
++ifneq (/usr,$(PREFIX))
++ TARGET_DYNXLDOPTS= -Wl,-rpath,$(TARGET_LIBPATH)
+ endif
+ endif
+ ifneq (,$(MULTILIB))
\ No newline at end of file
diff --git a/build/openresty/patches/LuaJIT-2.1-20210510_05_Revert_Detect_SSE4.2_support_dynamically.patch b/build/openresty/patches/LuaJIT-2.1-20210510_05_Revert_Detect_SSE4.2_support_dynamically.patch
new file mode 100644
index 000000000000..20cb556d493c
--- /dev/null
+++ b/build/openresty/patches/LuaJIT-2.1-20210510_05_Revert_Detect_SSE4.2_support_dynamically.patch
@@ -0,0 +1,562 @@
+From db0824835876d11bf88b0c8ad9791019ea969ef7 Mon Sep 17 00:00:00 2001
+From: Zhongwei Yao
+Date: Fri, 31 May 2024 11:39:51 -0700
+Subject: [PATCH] Revert "Detect SSE4.2 support dynamically"
+
+This reverts commit 34b63ba83542cad8675f875c9aa849653ead378d.
+---
+ src/Makefile | 18 +--
+ src/lj_arch.h | 4 -
+ src/lj_init.c | 69 ------------
+ src/lj_jit.h | 1 -
+ src/lj_str.c | 25 ++---
+ src/lj_str.h | 12 --
+ src/ljamalg.c | 1 +
+ .../src/lj_str_hash_x64.h} | 106 +++++++-----------
+ src/x64/test/benchmark.cxx | 13 +--
+ src/x64/test/test.cpp | 10 +-
+ 10 files changed, 64 insertions(+), 195 deletions(-)
+ delete mode 100644 src/lj_init.c
+ rename src/{lj_str_hash.c => x64/src/lj_str_hash_x64.h} (76%)
+
+diff --git a/bundle/LuaJIT-2.1-20210510/src/Makefile b/bundle/LuaJIT-2.1-20210510/src/Makefile
+index 287e4cd2..9fcf9316 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/Makefile
++++ b/bundle/LuaJIT-2.1-20210510/src/Makefile
+@@ -508,16 +508,10 @@
+ lj_ctype.o lj_cdata.o lj_cconv.o lj_ccall.o lj_ccallback.o \
+ lj_carith.o lj_clib.o lj_cparse.o \
+ lj_lib.o lj_alloc.o lib_aux.o \
+- $(LJLIB_O) lib_init.o lj_str_hash.o
+-
+-ifeq (x64,$(TARGET_LJARCH))
+- lj_str_hash-CFLAGS = -msse4.2
+-endif
+-
+-F_CFLAGS = $($(patsubst %.c,%-CFLAGS,$<))
++ $(LJLIB_O) lib_init.o
+
+ LJVMCORE_O= $(LJVM_O) $(LJCORE_O)
+-LJVMCORE_DYNO= $(LJVMCORE_O:.o=_dyn.o) lj_init_dyn.o
++LJVMCORE_DYNO= $(LJVMCORE_O:.o=_dyn.o)
+
+ LIB_VMDEF= jit/vmdef.lua
+ LIB_VMDEFP= $(LIB_VMDEF)
+@@ -539,7 +533,7 @@
+ ##############################################################################
+
+ # Mixed mode defaults.
+-TARGET_O= lj_init.o $(LUAJIT_A)
++TARGET_O= $(LUAJIT_A)
+ TARGET_T= $(LUAJIT_T) $(LUAJIT_SO)
+ TARGET_DEP= $(LIB_VMDEF) $(LUAJIT_SO)
+
+@@ -621,7 +615,7 @@
+ default all: $(TARGET_T)
+
+ amalg:
+- $(MAKE) all "LJCORE_O=ljamalg.o lj_str_hash.o"
++ $(MAKE) all "LJCORE_O=ljamalg.o"
+
+ clean:
+ $(HOST_RM) $(ALL_RM)
+@@ -698,8 +692,8 @@
+
+ %.o: %.c
+ $(E) "CC $@"
+- $(Q)$(TARGET_DYNCC) $(TARGET_ACFLAGS) $(F_CFLAGS) -c -o $(@:.o=_dyn.o) $<
+- $(Q)$(TARGET_CC) $(TARGET_ACFLAGS) $(F_CFLAGS) -c -o $@ $<
++ $(Q)$(TARGET_DYNCC) $(TARGET_ACFLAGS) -c -o $(@:.o=_dyn.o) $<
++ $(Q)$(TARGET_CC) $(TARGET_ACFLAGS) -c -o $@ $<
+
+ %.o: %.S
+ $(E) "ASM $@"
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_arch.h b/bundle/LuaJIT-2.1-20210510/src/lj_arch.h
+index 326c7148..1ea68032 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_arch.h
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_arch.h
+@@ -209,10 +209,6 @@
+ #define LJ_TARGET_GC64 1
+ #endif
+
+-#ifdef __GNUC__
+-#define LJ_HAS_OPTIMISED_HASH 1
+-#endif
+-
+ #elif LUAJIT_TARGET == LUAJIT_ARCH_ARM
+
+ #define LJ_ARCH_NAME "arm"
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_init.c b/bundle/LuaJIT-2.1-20210510/src/lj_init.c
+deleted file mode 100644
+index a6816e1e..00000000
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_init.c
++++ /dev/null
+@@ -1,69 +0,0 @@
+-#include
+-#include "lj_arch.h"
+-#include "lj_jit.h"
+-#include "lj_vm.h"
+-#include "lj_str.h"
+-
+-#if LJ_TARGET_ARM && LJ_TARGET_LINUX
+-#include
+-#endif
+-
+-#ifdef _MSC_VER
+-/*
+-** Append a function pointer to the static constructor table executed by
+-** the C runtime.
+-** Based on https://stackoverflow.com/questions/1113409/attribute-constructor-equivalent-in-vc
+-** see also https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-initialization.
+-*/
+-#pragma section(".CRT$XCU",read)
+-#define LJ_INITIALIZER2_(f,p) \
+- static void f(void); \
+- __declspec(allocate(".CRT$XCU")) void (*f##_)(void) = f; \
+- __pragma(comment(linker,"/include:" p #f "_")) \
+- static void f(void)
+-#ifdef _WIN64
+-#define LJ_INITIALIZER(f) LJ_INITIALIZER2_(f,"")
+-#else
+-#define LJ_INITIALIZER(f) LJ_INITIALIZER2_(f,"_")
+-#endif
+-
+-#else
+-#define LJ_INITIALIZER(f) static void __attribute__((constructor)) f(void)
+-#endif
+-
+-
+-#ifdef LJ_HAS_OPTIMISED_HASH
+-static void str_hash_init(uint32_t flags)
+-{
+- if (flags & JIT_F_SSE4_2)
+- str_hash_init_sse42 ();
+-}
+-
+-/* CPU detection for interpreter features such as string hash function
+- selection. We choose to cherry-pick from lj_cpudetect and not have a single
+- initializer to make sure that merges with LuaJIT/LuaJIT remain
+- convenient. */
+-LJ_INITIALIZER(lj_init_cpuflags)
+-{
+- uint32_t flags = 0;
+-#if LJ_TARGET_X86ORX64
+-
+- uint32_t vendor[4];
+- uint32_t features[4];
+- if (lj_vm_cpuid(0, vendor) && lj_vm_cpuid(1, features)) {
+- flags |= ((features[2] >> 0)&1) * JIT_F_SSE3;
+- flags |= ((features[2] >> 19)&1) * JIT_F_SSE4_1;
+- flags |= ((features[2] >> 20)&1) * JIT_F_SSE4_2;
+- if (vendor[0] >= 7) {
+- uint32_t xfeatures[4];
+- lj_vm_cpuid(7, xfeatures);
+- flags |= ((xfeatures[1] >> 8)&1) * JIT_F_BMI2;
+- }
+- }
+-
+-#endif
+-
+- /* The reason why we initialized early: select our string hash functions. */
+- str_hash_init (flags);
+-}
+-#endif
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_jit.h b/bundle/LuaJIT-2.1-20210510/src/lj_jit.h
+index 8993354f..c834d80a 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_jit.h
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_jit.h
+@@ -22,7 +22,6 @@
+ #define JIT_F_SSE3 (JIT_F_CPU << 0)
+ #define JIT_F_SSE4_1 (JIT_F_CPU << 1)
+ #define JIT_F_BMI2 (JIT_F_CPU << 2)
+-#define JIT_F_SSE4_2 (JIT_F_CPU << 3)
+
+
+ #define JIT_F_CPUSTRING "\4SSE3\6SSE4.1\4BMI2"
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_str.c b/bundle/LuaJIT-2.1-20210510/src/lj_str.c
+index d37f3b22..c0e2dfad 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_str.c
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_str.c
+@@ -12,6 +12,7 @@
+ #include "lj_str.h"
+ #include "lj_char.h"
+ #include "lj_prng.h"
++#include "x64/src/lj_str_hash_x64.h"
+
+ /* -- String helpers ------------------------------------------------------ */
+
+@@ -82,22 +83,9 @@ int lj_str_haspattern(GCstr *s)
+
+ /* -- String hashing ------------------------------------------------------ */
+
+-#ifdef LJ_HAS_OPTIMISED_HASH
+-static StrHash hash_sparse_def (uint64_t, const char *, MSize);
+-str_sparse_hashfn hash_sparse = hash_sparse_def;
+-#if LUAJIT_SECURITY_STRHASH
+-static StrHash hash_dense_def(uint64_t, StrHash, const char *, MSize);
+-str_dense_hashfn hash_dense = hash_dense_def;
+-#endif
+-#else
+-#define hash_sparse hash_sparse_def
+-#if LUAJIT_SECURITY_STRHASH
+-#define hash_dense hash_dense_def
+-#endif
+-#endif
+-
++#ifndef ARCH_HASH_SPARSE
+ /* Keyed sparse ARX string hash. Constant time. */
+-static StrHash hash_sparse_def(uint64_t seed, const char *str, MSize len)
++static StrHash hash_sparse(uint64_t seed, const char *str, MSize len)
+ {
+ /* Constants taken from lookup3 hash by Bob Jenkins. */
+ StrHash a, b, h = len ^ (StrHash)seed;
+@@ -118,11 +106,12 @@ static StrHash hash_sparse_def(uint64_t seed, const char *str, MSize len)
+ h ^= b; h -= lj_rol(b, 16);
+ return h;
+ }
++#endif
+
+-#if LUAJIT_SECURITY_STRHASH
++#if LUAJIT_SECURITY_STRHASH && !defined(ARCH_HASH_DENSE)
+ /* Keyed dense ARX string hash. Linear time. */
+-static LJ_NOINLINE StrHash hash_dense_def(uint64_t seed, StrHash h,
+- const char *str, MSize len)
++static LJ_NOINLINE StrHash hash_dense(uint64_t seed, StrHash h,
++ const char *str, MSize len)
+ {
+ StrHash b = lj_bswap(lj_rol(h ^ (StrHash)(seed >> 32), 4));
+ if (len > 12) {
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_str.h b/bundle/LuaJIT-2.1-20210510/src/lj_str.h
+index f7b9234b..28edb5a5 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_str.h
++++ b/bundle/LuaJIT-2.1-20210510/src/lj_str.h
+@@ -28,16 +28,4 @@ LJ_FUNC void LJ_FASTCALL lj_str_init(lua_State *L);
+ #define lj_str_newlit(L, s) (lj_str_new(L, "" s, sizeof(s)-1))
+ #define lj_str_size(len) (sizeof(GCstr) + (((len)+4) & ~(MSize)3))
+
+-#ifdef LJ_HAS_OPTIMISED_HASH
+-typedef StrHash (*str_sparse_hashfn) (uint64_t, const char *, MSize);
+-extern str_sparse_hashfn hash_sparse;
+-
+-#if LUAJIT_SECURITY_STRHASH
+-typedef StrHash (*str_dense_hashfn) (uint64_t, StrHash, const char *, MSize);
+-extern str_dense_hashfn hash_dense;
+-#endif
+-
+-extern void str_hash_init_sse42 (void);
+-#endif
+-
+ #endif
+diff --git a/bundle/LuaJIT-2.1-20210510/src/ljamalg.c b/bundle/LuaJIT-2.1-20210510/src/ljamalg.c
+index 36ad2f6d..34922650 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/ljamalg.c
++++ b/bundle/LuaJIT-2.1-20210510/src/ljamalg.c
+@@ -86,3 +86,4 @@
+ #include "lib_jit.c"
+ #include "lib_ffi.c"
+ #include "lib_init.c"
++
+diff --git a/bundle/LuaJIT-2.1-20210510/src/lj_str_hash.c b/bundle/LuaJIT-2.1-20210510/src/x64/src/lj_str_hash_x64.h
+similarity index 76%
+rename from src/lj_str_hash.c
+rename to src/x64/src/lj_str_hash_x64.h
+index 0ee4b5f6..e6538953 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/lj_str_hash.c
++++ b/bundle/LuaJIT-2.1-20210510/src/x64/src/lj_str_hash_x64.h
+@@ -5,48 +5,23 @@
+ * to 128 bytes of given string.
+ */
+
+-#include "lj_arch.h"
++#ifndef _LJ_STR_HASH_X64_H_
++#define _LJ_STR_HASH_X64_H_
++
++#if defined(__SSE4_2__) && defined(__x86_64) && defined(__GNUC__)
+
+-#if LJ_HAS_OPTIMISED_HASH == 1 || defined(SMOKETEST)
+ #include
+ #include
++#include
+ #include
+ #include
+
+-#if defined(_MSC_VER)
+-#include
+-/* Silence deprecated name warning */
+-#define getpid _getpid
+-#else
+-#include
+-#endif
+-
+-#include "lj_def.h"
+-#include "lj_str.h"
+-#include "lj_jit.h"
+-
+-
+-#if defined(_MSC_VER)
+-/*
+- * MSVC doesn't seem to restrict intrinsics used based on /arch: value set
+- * while clang-cl will error on it.
+- */
+-#if defined(__clang__) && !defined(__SSE4_2__)
+-#error "This file must be built with /arch:AVX1 or higher"
+-#endif
+-#else
+-#if !defined(__SSE4_2__)
+-#error "This file must be built with -msse4.2"
+-#endif
+-#endif
+-
+-#define lj_crc32_u32 _mm_crc32_u32
+-#define lj_crc32_u64 _mm_crc32_u64
++#include "../../lj_def.h"
+
+ #undef LJ_AINLINE
+ #define LJ_AINLINE
+
+-#if defined(__MINGW32__) || defined(_MSC_VER)
++#ifdef __MINGW32__
+ #define random() ((long) rand())
+ #define srandom(seed) srand(seed)
+ #endif
+@@ -74,7 +49,7 @@ static LJ_AINLINE uint32_t hash_sparse_1_4(uint64_t seed, const char* str,
+ v = (v << 8) | str[len >> 1];
+ v = (v << 8) | str[len - 1];
+ v = (v << 8) | len;
+- return lj_crc32_u32(0, v);
++ return _mm_crc32_u32(0, v);
+ #else
+ uint32_t a, b, h = len ^ seed;
+
+@@ -105,9 +80,9 @@ static LJ_AINLINE uint32_t hash_sparse_4_16(uint64_t seed, const char* str,
+ v2 = *cast_uint32p(str + len - 4);
+ }
+
+- h = lj_crc32_u32(0, len ^ seed);
+- h = lj_crc32_u64(h, v1);
+- h = lj_crc32_u64(h, v2);
++ h = _mm_crc32_u32(0, len ^ seed);
++ h = _mm_crc32_u64(h, v1);
++ h = _mm_crc32_u64(h, v2);
+ return h;
+ }
+
+@@ -118,18 +93,18 @@ static uint32_t hash_16_128(uint64_t seed, const char* str,
+ uint64_t h1, h2;
+ uint32_t i;
+
+- h1 = lj_crc32_u32(0, len ^ seed);
++ h1 = _mm_crc32_u32(0, len ^ seed);
+ h2 = 0;
+
+ for (i = 0; i < len - 16; i += 16) {
+- h1 += lj_crc32_u64(h1, *cast_uint64p(str + i));
+- h2 += lj_crc32_u64(h2, *cast_uint64p(str + i + 8));
++ h1 += _mm_crc32_u64(h1, *cast_uint64p(str + i));
++ h2 += _mm_crc32_u64(h2, *cast_uint64p(str + i + 8));
+ };
+
+- h1 = lj_crc32_u64(h1, *cast_uint64p(str + len - 16));
+- h2 = lj_crc32_u64(h2, *cast_uint64p(str + len - 8));
++ h1 = _mm_crc32_u64(h1, *cast_uint64p(str + len - 16));
++ h2 = _mm_crc32_u64(h2, *cast_uint64p(str + len - 8));
+
+- return lj_crc32_u32(h1, h2);
++ return _mm_crc32_u32(h1, h2);
+ }
+
+ /* **************************************************************************
+@@ -172,7 +147,7 @@ static LJ_AINLINE uint32_t log2_floor(uint32_t n)
+ /* This function is to populate `random_pos` such that random_pos[i][*]
+ * contains random value in the range of [2**i, 2**(i+1)).
+ */
+-static void str_hash_init_random(void)
++static void x64_init_random(void)
+ {
+ int i, seed, rml;
+
+@@ -183,8 +158,8 @@ static void str_hash_init_random(void)
+ }
+
+ /* Init seed */
+- seed = lj_crc32_u32(0, getpid());
+- seed = lj_crc32_u32(seed, time(NULL));
++ seed = _mm_crc32_u32(0, getpid());
++ seed = _mm_crc32_u32(seed, time(NULL));
+ srandom(seed);
+
+ /* Now start to populate the random_pos[][]. */
+@@ -213,6 +188,11 @@ static void str_hash_init_random(void)
+ }
+ #undef POW2_MASK
+
++void __attribute__((constructor)) x64_init_random_constructor()
++{
++ x64_init_random();
++}
++
+ /* Return a pre-computed random number in the range of [1**chunk_sz_order,
+ * 1**(chunk_sz_order+1)). It is "unsafe" in the sense that the return value
+ * may be greater than chunk-size; it is up to the caller to make sure
+@@ -239,7 +219,7 @@ static LJ_NOINLINE uint32_t hash_128_above(uint64_t seed, const char* str,
+ pos1 = get_random_pos_unsafe(chunk_sz_log2, 0);
+ pos2 = get_random_pos_unsafe(chunk_sz_log2, 1);
+
+- h1 = lj_crc32_u32(0, len ^ seed);
++ h1 = _mm_crc32_u32(0, len ^ seed);
+ h2 = 0;
+
+ /* loop over 14 chunks, 2 chunks at a time */
+@@ -247,29 +227,29 @@ static LJ_NOINLINE uint32_t hash_128_above(uint64_t seed, const char* str,
+ chunk_ptr += chunk_sz, i++) {
+
+ v = *cast_uint64p(chunk_ptr + pos1);
+- h1 = lj_crc32_u64(h1, v);
++ h1 = _mm_crc32_u64(h1, v);
+
+ v = *cast_uint64p(chunk_ptr + chunk_sz + pos2);
+- h2 = lj_crc32_u64(h2, v);
++ h2 = _mm_crc32_u64(h2, v);
+ }
+
+ /* the last two chunks */
+ v = *cast_uint64p(chunk_ptr + pos1);
+- h1 = lj_crc32_u64(h1, v);
++ h1 = _mm_crc32_u64(h1, v);
+
+ v = *cast_uint64p(chunk_ptr + chunk_sz - 8 - pos2);
+- h2 = lj_crc32_u64(h2, v);
++ h2 = _mm_crc32_u64(h2, v);
+
+ /* process the trailing part */
+- h1 = lj_crc32_u64(h1, *cast_uint64p(str));
+- h2 = lj_crc32_u64(h2, *cast_uint64p(str + len - 8));
++ h1 = _mm_crc32_u64(h1, *cast_uint64p(str));
++ h2 = _mm_crc32_u64(h2, *cast_uint64p(str + len - 8));
+
+- h1 = lj_crc32_u32(h1, h2);
++ h1 = _mm_crc32_u32(h1, h2);
+ return h1;
+ }
+
+ /* NOTE: the "len" should not be zero */
+-static StrHash hash_sparse_sse42(uint64_t seed, const char* str, MSize len)
++static uint32_t hash_sparse(uint64_t seed, const char* str, size_t len)
+ {
+ if (len < 4 || len >= 128)
+ return hash_sparse_1_4(seed, str, len);
+@@ -280,10 +260,11 @@ static StrHash hash_sparse_sse42(uint64_t seed, const char* str, MSize len)
+ /* [4, 16) */
+ return hash_sparse_4_16(seed, str, len);
+ }
++#define ARCH_HASH_SPARSE hash_sparse
+
+ #if LUAJIT_SECURITY_STRHASH
+-static StrHash hash_dense_sse42(uint64_t seed, uint32_t h, const char* str,
+- MSize len)
++static uint32_t hash_dense(uint64_t seed, uint32_t h, const char* str,
++ size_t len)
+ {
+ uint32_t b = lj_bswap(lj_rol(h ^ (uint32_t)(seed >> 32), 4));
+
+@@ -296,14 +277,11 @@ static StrHash hash_dense_sse42(uint64_t seed, uint32_t h, const char* str,
+ /* Otherwise, do the slow crc32 randomization for long strings. */
+ return hash_128_above(b, str, len);
+ }
++#define ARCH_HASH_DENSE hash_dense
+ #endif
+
+-void str_hash_init_sse42(void)
+-{
+- hash_sparse = hash_sparse_sse42;
+-#if LUAJIT_SECURITY_STRHASH
+- hash_dense = hash_dense_sse42;
+-#endif
+- str_hash_init_random();
+-}
++#else
++#undef ARCH_HASH_SPARSE
++#undef ARCH_HASH_DENSE
+ #endif
++#endif /*_LJ_STR_HASH_X64_H_*/
+diff --git a/bundle/LuaJIT-2.1-20210510/src/x64/test/benchmark.cxx b/bundle/LuaJIT-2.1-20210510/src/x64/test/benchmark.cxx
+index 1ea8fb6b..ee247c1c 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/x64/test/benchmark.cxx
++++ b/bundle/LuaJIT-2.1-20210510/src/x64/test/benchmark.cxx
+@@ -1,10 +1,7 @@
+ #include // for gettimeofday()
+ extern "C" {
+ #define LUAJIT_SECURITY_STRHASH 1
+-#include "../../lj_str.h"
+-str_sparse_hashfn hash_sparse;
+-str_dense_hashfn hash_dense;
+-#include "../../lj_str_hash.c"
++#include "lj_str_hash_x64.h"
+ }
+ #include
+ #include
+@@ -100,7 +97,7 @@ struct TestFuncWasSparse
+ struct TestFuncIsSparse
+ {
+ uint32_t operator()(uint64_t seed, const char* buf, uint32_t len) {
+- return hash_sparse_sse42(seed, buf, len);
++ return hash_sparse(seed, buf, len);
+ }
+ };
+
+@@ -114,7 +111,7 @@ struct TestFuncWasDense
+ struct TestFuncIsDense
+ {
+ uint32_t operator()(uint64_t seed, const char* buf, uint32_t len) {
+- return hash_dense_sse42(seed, 42, buf, len);
++ return hash_dense(seed, 42, buf, len);
+ }
+ };
+
+@@ -271,9 +268,9 @@ benchmarkConflictHelper(uint64_t seed, uint32_t bucketNum,
+ for (vector::const_iterator i = strs.begin(), e = strs.end();
+ i != e; ++i) {
+ uint32_t h1 = original_hash_sparse(seed, i->c_str(), i->size());
+- uint32_t h2 = hash_sparse_sse42(seed, i->c_str(), i->size());
++ uint32_t h2 = hash_sparse(seed, i->c_str(), i->size());
+ uint32_t h3 = original_hash_dense(seed, h1, i->c_str(), i->size());
+- uint32_t h4 = hash_dense_sse42(seed, h2, i->c_str(), i->size());
++ uint32_t h4 = hash_dense(seed, h2, i->c_str(), i->size());
+
+ conflictWasSparse[h1 & mask]++;
+ conflictIsSparse[h2 & mask]++;
+diff --git a/bundle/LuaJIT-2.1-20210510/src/x64/test/test.cpp b/bundle/LuaJIT-2.1-20210510/src/x64/test/test.cpp
+index 432c7bbb..75f34e9f 100644
+--- a/bundle/LuaJIT-2.1-20210510/src/x64/test/test.cpp
++++ b/bundle/LuaJIT-2.1-20210510/src/x64/test/test.cpp
+@@ -4,14 +4,10 @@
+ #include